Warning: include(php/utility.php): Failed to open stream: No such file or directory in /home/argos/argos3/doc/api/embedded/a00666_source.php on line 2

Warning: include(): Failed opening 'php/utility.php' for inclusion (include_path='.:/usr/lib64/php') in /home/argos/argos3/doc/api/embedded/a00666_source.php on line 2
The ARGoS Website

colored_blob_perspective_camera_default_sensor.cpp
Go to the documentation of this file.
2 #include <argos3/core/simulator/simulator.h>
3 #include <argos3/core/simulator/space/positional_indices/positional_index.h>
4 #include <argos3/core/simulator/entity/composable_entity.h>
5 #include <argos3/core/simulator/entity/embodied_entity.h>
6 #include <argos3/plugins/simulator/entities/led_entity.h>
7 #include <argos3/plugins/simulator/entities/perspective_camera_equipped_entity.h>
8 #include <argos3/plugins/simulator/media/led_medium.h>
9 
10 namespace argos {
11 
12  /****************************************/
13  /****************************************/
14 
15  class CPerspectiveCameraLEDCheckOperation : public CPositionalIndex<CLEDEntity>::COperation {
16 
17  public:
18 
22  CEmbodiedEntity& c_embodied_entity,
23  CControllableEntity& c_controllable_entity,
24  bool b_show_rays,
25  Real f_noise_std_dev) :
26  m_tBlobs(t_blobs),
27  m_cCamEntity(c_cam_entity),
28  m_cEmbodiedEntity(c_embodied_entity),
29  m_cControllableEntity(c_controllable_entity),
30  m_bShowRays(b_show_rays),
31  m_fNoiseStdDev(f_noise_std_dev),
32  m_pcRNG(NULL) {
33  m_pcRootSensingEntity = &m_cEmbodiedEntity.GetRootEntity();
34  if(m_fNoiseStdDev > 0.0f) {
35  m_pcRNG = CRandom::CreateRNG("argos");
36  }
37  }
39  while(! m_tBlobs.empty()) {
40  delete m_tBlobs.back();
41  m_tBlobs.pop_back();
42  }
43  }
44 
45  virtual bool operator()(CLEDEntity& c_led) {
46  /* Process this LED only if it's lit */
47  if(c_led.GetColor() != CColor::BLACK) {
48  /* Filter out the LEDs belonging to the sensing entity by checking if they share the same parent entity */
49  if(m_pcRootSensingEntity == &c_led.GetRootEntity()) return true;
50  /* If we are here, it's because the LED must be processed */
51  /* Set the end of the ray for occlusion checking */
52  m_cOcclusionCheckRay.SetEnd(c_led.GetPosition());
53  /* Calculate the vector to LED in the camera-anchor frame of reference */
54  m_cLEDRelative = c_led.GetPosition();
55  m_cLEDRelative -= m_cCamEntity.GetAnchor().Position;
56  m_cLEDRelative.Rotate(m_cInvCameraOrient);
57  /* Calculate the projection of the LED vector into the camera direction */
58  Real fDotProd = m_cLEDRelative.GetX();
59  /* The blob is visible if
60  * 1. It is within the distance range AND
61  * 2. It is within the aperture range AND
62  * 3. There are no occlusions
63  */
64  if(fDotProd < m_cCamEntity.GetRange() &&
65  ACos(fDotProd / m_cLEDRelative.Length()) < m_cCamEntity.GetAperture() &&
66  !GetClosestEmbodiedEntityIntersectedByRay(m_sIntersectionItem,
67  m_cOcclusionCheckRay,
68  m_cEmbodiedEntity)) {
69  /* The LED is visibile */
70  /* Calculate the intersection point between the LED ray and the image plane */
71  m_cLEDRelative.Normalize();
72  m_cLEDRelative *= m_cCamEntity.GetFocalLength() / m_cLEDRelative.GetX();
73  /*
74  * The image plane is perpendicular to the local X axis
75  * Y points to the left, Z up, the origin is in the image center
76  * To find the pixel (i,j), we need to flip both Y and Z, and translate the origin
77  * So that the origin is up-left, the i axis goes to the right, and the j axis goes down
78  */
79  SInt32 unI =
80  - m_cCamEntity.GetImagePxWidth() /
81  m_cCamEntity.GetImageMtWidth() *
82  (m_cLEDRelative.GetY() -
83  m_cCamEntity.GetImageMtWidth() * 0.5f);
84  SInt32 unJ =
85  - m_cCamEntity.GetImagePxHeight() /
86  m_cCamEntity.GetImageMtHeight() *
87  (m_cLEDRelative.GetZ() -
88  m_cCamEntity.GetImageMtHeight() * 0.5f);
89  /* Make sure (i,j) is within the limits */
90  if((unI >= m_cCamEntity.GetImagePxWidth() || unI < 0) ||
91  (unJ >= m_cCamEntity.GetImagePxHeight() || unJ < 0))
92  return true;
93  /* Add new blob */
94  m_tBlobs.push_back(
96  c_led.GetColor(), unI, unJ));
97  /* Draw ray */
98  if(m_bShowRays) {
99  m_cControllableEntity.AddCheckedRay(
100  false,
101  CRay3(m_cCamEntity.GetAnchor().Position,
102  c_led.GetPosition()));
103  }
104  }
105  }
106  return true;
107  }
108 
109  void Setup() {
110  /* Erase blobs */
111  while(! m_tBlobs.empty()) {
112  delete m_tBlobs.back();
113  m_tBlobs.pop_back();
114  }
115  /* Reset ray start */
116  m_cOcclusionCheckRay.SetStart(m_cCamEntity.GetAnchor().Position);
117  /* Calculate inverse of camera orientation */
118  m_cInvCameraOrient = m_cCamEntity.GetAnchor().Orientation.Inverse();
119  }
120 
121  private:
122 
124  CPerspectiveCameraEquippedEntity& m_cCamEntity;
125  CEmbodiedEntity& m_cEmbodiedEntity;
126  CControllableEntity& m_cControllableEntity;
127  CQuaternion m_cInvCameraOrient;
128  bool m_bShowRays;
129  CEntity* m_pcRootSensingEntity;
130  CRadians m_cTmp1, m_cTmp2;
131  CVector3 m_cLEDRelative;
132  SEmbodiedEntityIntersectionItem m_sIntersectionItem;
133  CRay3 m_cOcclusionCheckRay;
134  Real m_fNoiseStdDev;
135  CRandom::CRNG* m_pcRNG;
136  };
137 
138  /****************************************/
139  /****************************************/
140 
142  m_bEnabled(false),
143  m_pcCamEntity(NULL),
144  m_pcControllableEntity(NULL),
145  m_pcEmbodiedEntity(NULL),
146  m_pcLEDIndex(NULL),
147  m_pcEmbodiedIndex(NULL),
148  m_bShowRays(false) {
149  }
150 
151  /****************************************/
152  /****************************************/
153 
155  }
156 
157  /****************************************/
158  /****************************************/
159 
161  /* Get omndirectional camera equipped entity */
162  m_pcCamEntity = &(c_entity.GetComponent<CPerspectiveCameraEquippedEntity>("perspective_camera"));
163  /* Get controllable entity */
164  m_pcControllableEntity = &(c_entity.GetComponent<CControllableEntity>("controller"));
165  /* Get embodied entity */
166  m_pcEmbodiedEntity = &(c_entity.GetComponent<CEmbodiedEntity>("body"));
167  }
168 
169  /****************************************/
170  /****************************************/
171 
173  try {
174  /* Parent class init */
176  /* Show rays? */
177  GetNodeAttributeOrDefault(t_tree, "show_rays", m_bShowRays, m_bShowRays);
178  /* Parse noise */
179  Real fNoiseStdDev = 0.0f;
180  GetNodeAttributeOrDefault(t_tree, "noise_std_dev", fNoiseStdDev, fNoiseStdDev);
181  /* Get LED medium from id specified in the XML */
182  std::string strMedium;
183  GetNodeAttribute(t_tree, "medium", strMedium);
184  m_pcLEDIndex = &(CSimulator::GetInstance().GetMedium<CLEDMedium>(strMedium).GetIndex());
185  /* Create check operation */
188  *m_pcCamEntity,
191  m_bShowRays,
192  fNoiseStdDev);
193  }
194  catch(CARGoSException& ex) {
195  THROW_ARGOSEXCEPTION_NESTED("Error initializing the colored blob perspective camera default sensor", ex);
196  }
197  }
198 
199  /****************************************/
200  /****************************************/
201 
203  if(m_bEnabled) {
204  /* Increase data counter */
206  /* Prepare the operation */
207  m_pcOperation->Setup();
208  /* Calculate the sensing box */
209  Real fHalfRange = m_pcCamEntity->GetRange() * 0.5f;
210  Real fHalfSide = fHalfRange * Tan(m_pcCamEntity->GetAperture());
211  /* Box center */
212  CVector3 cCenter(fHalfRange, 0.0f, 0.0f);
213  cCenter.Rotate(m_pcCamEntity->GetAnchor().Orientation);
214  cCenter += m_pcCamEntity->GetAnchor().Position;
215  /* Box half size */
216  CVector3 cCorner(fHalfRange, fHalfSide, fHalfSide);
218  CVector3 cHalfSize(
219  Abs(cCorner.GetX()),
220  Abs(cCorner.GetY()),
221  Abs(cCorner.GetZ()));
222  /* Go through LED entities in box range */
223  m_pcLEDIndex->ForEntitiesInBoxRange(
224  cCenter, cHalfSize, *m_pcOperation);
225  }
226  }
227 
228  /****************************************/
229  /****************************************/
230 
232  m_sReadings.Counter = 0;
233  m_sReadings.BlobList.clear();
234  }
235 
236  /****************************************/
237  /****************************************/
238 
240  delete m_pcOperation;
241  }
242 
243  /****************************************/
244  /****************************************/
245 
248  m_bEnabled = true;
249  }
250 
251  /****************************************/
252  /****************************************/
253 
256  m_bEnabled = false;
257  }
258 
259  /****************************************/
260  /****************************************/
261 
263  "colored_blob_perspective_camera", "default",
264  "Carlo Pinciroli [ilpincy@gmail.com]",
265  "1.0",
266  "A generic perspective camera sensor to detect colored blobs.",
267  "This sensor accesses an perspective camera that detects colored blobs. The\n"
268  "sensor returns a list of blobs, each defined by a color and a position with\n"
269  "respect to the robot reference point on the ground. In controllers, you must\n"
270  "include the ci_colored_blob_perspective_camera_sensor.h header.\n\n"
271  "REQUIRED XML CONFIGURATION\n\n"
272  " <controllers>\n"
273  " ...\n"
274  " <my_controller ...>\n"
275  " ...\n"
276  " <sensors>\n"
277  " ...\n"
278  " <colored_blob_perspective_camera implementation=\"default\"\n"
279  " medium=\"leds\" />\n"
280  " ...\n"
281  " </sensors>\n"
282  " ...\n"
283  " </my_controller>\n"
284  " ...\n"
285  " </controllers>\n\n"
286  "The 'medium' attribute must be set to the id of the leds medium declared in the\n"
287  "<media> section.\n\n"
288  "OPTIONAL XML CONFIGURATION\n\n"
289  "It is possible to draw the rays shot by the camera sensor in the OpenGL\n"
290  "visualization. This can be useful for sensor debugging but also to understand\n"
291  "what's wrong in your controller. In OpenGL, the rays are drawn in cyan when\n"
292  "they are not obstructed and in purple when they are. In case a ray is\n"
293  "obstructed, a black dot is drawn where the intersection occurred.\n"
294  "To turn this functionality on, add the attribute \"show_rays\" as in this\n"
295  "example:\n\n"
296  " <controllers>\n"
297  " ...\n"
298  " <my_controller ...>\n"
299  " ...\n"
300  " <sensors>\n"
301  " ...\n"
302  " <colored_blob_perspective_camera implementation=\"default\"\n"
303  " medium=\"leds\" />\n"
304  " show_rays=\"true\" />\n"
305  " ...\n"
306  " </sensors>\n"
307  " ...\n"
308  " </my_controller>\n"
309  " ...\n"
310  " </controllers>\n\n"
311  "It is possible to add uniform noise to the blobs, thus matching the\n"
312  "characteristics of a real robot better. This can be done with the attribute\n"
313  "\"noise_std_dev\".\n\n"
314  " <controllers>\n"
315  " ...\n"
316  " <my_controller ...>\n"
317  " ...\n"
318  " <sensors>\n"
319  " ...\n"
320  " <colored_blob_perspective_camera implementation=\"default\"\n"
321  " medium=\"leds\" />\n"
322  " noise_std_dev=\"0.1\" />\n"
323  " ...\n"
324  " </sensors>\n"
325  " ...\n"
326  " </my_controller>\n"
327  " ...\n"
328  " </controllers>\n",
329  "Usable"
330  );
331 
332 }
The RNG.
Definition: rng.h:90
signed int SInt32
32-bit signed integer.
Definition: datatypes.h:93
An entity that contains a pointer to the user-defined controller.
A 3D vector class.
Definition: vector3.h:29
void GetNodeAttributeOrDefault(TConfigurationNode &t_node, const std::string &str_attribute, T &t_buffer, const T &t_default)
Returns the value of a node's attribute, or the passed default value.
static CColor BLACK
Definition: color.h:29
Real GetRange() const
Returns the range of the camera.
virtual void Update()
Updates the state of the entity associated to this sensor.
float Real
Collects all ARGoS code.
Definition: datatypes.h:39
Real GetX() const
Returns the x coordinate of this vector.
Definition: vector3.h:93
CVector3 & Rotate(const CQuaternion &c_quaternion)
Rotates this vector by the given quaternion.
Definition: vector3.cpp:25
T Abs(const T &t_v)
Returns the absolute value of the passed argument.
Definition: general.h:25
The basic entity type.
Definition: entity.h:89
Real GetY() const
Returns the y coordinate of this vector.
Definition: vector3.h:109
Real GetFocalLength() const
Returns the focal length of the camera.
A data structure that contains positional entities.
const CColor & GetColor() const
Returns the current color of the LED.
Definition: led_entity.h:58
CQuaternion Orientation
The orientation of the anchor wrt the global coordinate system.
Definition: physics_model.h:53
ticpp::Element TConfigurationNode
The ARGoS configuration XML node.
std::vector< SBlob * > TBlobList
Vector of pointers to colored blobs.
This entity is a link to a body in the physics engine.
#define THROW_ARGOSEXCEPTION_NESTED(message, nested)
This macro throws an ARGoS exception with the passed message and nesting the passed exception...
const CVector3 & GetPosition() const
It defines the basic type CRadians, used to store an angle value in radians.
Definition: angles.h:42
SInt32 GetImagePxHeight() const
Returns the image height in pixel.
CEntity & GetRootEntity()
Returns the root entity containing this entity.
Definition: entity.cpp:115
virtual void Disable()
Disables image acquisition and processing.
Real Length() const
Returns the length of this vector.
Definition: vector3.h:205
An SBlob represents a generic colored 2D segment in the image.
void SetEnd(const CVector3 &c_end)
Definition: ray3.h:57
virtual void Enable()
Enables image acquisition and processing.
Real GetImageMtWidth() const
Returns the image width in meters.
Basic class for an entity that contains other entities.
Real GetImageMtHeight() const
Returns the image height in meters.
virtual void Init(TConfigurationNode &t_node)
Initializes the sensor from the XML configuration tree.
Definition: ci_sensor.h:54
CQuaternion Inverse() const
Definition: quaternion.h:98
Real Tan(const CRadians &c_radians)
Computes the tangent of the passed value in radians.
Definition: angles.h:604
void GetNodeAttribute(TConfigurationNode &t_node, const std::string &str_attribute, T &t_buffer)
Returns the value of a node's attribute.
void SetStart(const CVector3 &c_start)
Definition: ray3.h:53
The exception that wraps all errors in ARGoS.
virtual void SetRobot(CComposableEntity &c_entity)
Sets the entity associated to this sensor.
const SAnchor & GetAnchor() const
Returns the offset of the camera with respect to the reference point.
bool GetClosestEmbodiedEntityIntersectedByRay(SEmbodiedEntityIntersectionItem &s_item, const CRay3 &c_ray)
Returns the closest intersection with an embodied entity to the ray start.
SInt32 GetImagePxWidth() const
Returns the image width in pixel.
T & GetMedium(const std::string &str_id)
Returns a reference to a medium.
Definition: simulator.h:129
virtual void Reset()
Resets the sensor to the state it had just after Init().
CVector3 Position
The position of the anchor wrt the global coordinate system.
Definition: physics_model.h:51
static CRNG * CreateRNG(const std::string &str_category)
Creates a new RNG inside the given category.
Definition: rng.cpp:326
void AddCheckedRay(bool b_obstructed, const CRay3 &c_ray)
Adds a ray to the list of checked rays.
const CRadians & GetAperture() const
Returns the aperture of the visibility cone of the camera.
The namespace containing all the ARGoS related code.
Definition: ci_actuator.h:12
Real GetZ() const
Returns the z coordinate of this vector.
Definition: vector3.h:125
CPerspectiveCameraLEDCheckOperation(CCI_ColoredBlobPerspectiveCameraSensor::TBlobList &t_blobs, CPerspectiveCameraEquippedEntity &c_cam_entity, CEmbodiedEntity &c_embodied_entity, CControllableEntity &c_controllable_entity, bool b_show_rays, Real f_noise_std_dev)
virtual void Init(TConfigurationNode &t_tree)
Initializes the sensor from the XML configuration tree.
CRadians ACos(Real f_value)
Computes the arccosine of the passed value.
Definition: angles.h:622
CVector3 & Normalize()
Normalizes this vector.
Definition: vector3.h:215
static CSimulator & GetInstance()
Returns the instance to the CSimulator class.
Definition: simulator.cpp:78
CEntity & GetComponent(const std::string &str_component)
Returns the component with the passed string label.
REGISTER_SENSOR(CEyeBotLightRotZOnlySensor,"eyebot_light","rot_z_only","Carlo Pinciroli [ilpincy@gmail.com]","1.0","The eye-bot light sensor (optimized for 2D).","This sensor accesses a set of light sensors. The sensors all return a value\n""between 0 and 1, where 0 means nothing within range and 1 means the perceived\n""light saturates the sensor. Values between 0 and 1 depend on the distance of\n""the perceived light. Each reading R is calculated with R=(I/x)^2, where x is the\n""distance between a sensor and the light, and I is the reference intensity of the\n""perceived light. The reference intensity corresponds to the minimum distance at\n""which the light saturates a sensor. The reference intensity depends on the\n""individual light, and it is set with the \"intensity\" attribute of the light\n""entity. In case multiple lights are present in the environment, each sensor\n""reading is calculated as the sum of the individual readings due to each light.\n""In other words, light wave interference is not taken into account. In\n""controllers, you must include the ci_light_sensor.h header.\n\n""REQUIRED XML CONFIGURATION\n\n"" <controllers>\n"" ...\n"" <my_controller ...>\n"" ...\n"" <sensors>\n"" ...\n"" <eyebot_light implementation=\"rot_z_only\" />\n"" ...\n"" </sensors>\n"" ...\n"" </my_controller>\n"" ...\n"" </controllers>\n\n""OPTIONAL XML CONFIGURATION\n\n""It is possible to draw the rays shot by the light sensor in the OpenGL\n""visualization. This can be useful for sensor debugging but also to understand\n""what's wrong in your controller. In OpenGL, the rays are drawn in cyan when\n""they are not obstructed and in purple when they are. In case a ray is\n""obstructed, a black dot is drawn where the intersection occurred.\n""To turn this functionality on, add the attribute \"show_rays\" as in this\n""example:\n\n"" <controllers>\n"" ...\n"" <my_controller ...>\n"" ...\n"" <sensors>\n"" ...\n"" <eyebot_light implementation=\"rot_z_only\"\n"" show_rays=\"true\" />\n"" ...\n"" </sensors>\n"" ...\n"" </my_controller>\n"" ...\n"" </controllers>\n\n""It is possible to add uniform noise to the sensors, thus matching the\n""characteristics of a real robot better. This can be done with the attribute\n""\"noise_level\", whose allowed range is in [-1,1] and is added to the calculated\n""reading. The final sensor reading is always normalized in the [0-1] range.\n\n"" <controllers>\n"" ...\n"" <my_controller ...>\n"" ...\n"" <sensors>\n"" ...\n"" <eyebot_light implementation=\"rot_z_only\"\n"" noise_level=\"0.1\" />\n"" ...\n"" </sensors>\n"" ...\n"" </my_controller>\n"" ...\n"" </controllers>\n\n""OPTIONAL XML CONFIGURATION\n\n""None.\n","Usable")