camera_default_sensor.cpp
Go to the documentation of this file.
1 
8 #include <argos3/core/simulator/simulator.h>
9 #include <argos3/core/simulator/entity/composable_entity.h>
10 #include <argos3/plugins/robots/generic/simulator/camera_sensor_algorithm.h>
11 
12 namespace argos {
13 
14  /****************************************/
15  /****************************************/
16 
18  m_bShowFrustum(false),
19  m_pcEmbodiedEntity(nullptr),
20  m_pcControllableEntity(nullptr) {}
21 
22  /****************************************/
23  /****************************************/
24 
26  /* Get the embodied and controllable entities */
27  m_pcEmbodiedEntity = &(c_entity.GetComponent<CEmbodiedEntity>("body"));
28  m_pcControllableEntity = &(c_entity.GetComponent<CControllableEntity>("controller"));
29  }
30 
31  /****************************************/
32  /****************************************/
33 
35  try {
36  /* Parent class init */
37  CCI_CameraSensor::Init(t_tree);
38  /* Show the frustums */
40  /* For each camera */
41  TConfigurationNodeIterator itCamera("camera");
42  for(itCamera = itCamera.begin(&t_tree);
43  itCamera != itCamera.end();
44  ++itCamera) {
45  /* Get camera indentifier */
46  std::string strId;
47  GetNodeAttribute(*itCamera, "id", strId);
48  /* Parse and look up the anchor */
49  std::string strAnchorId;
50  GetNodeAttribute(*itCamera, "anchor", strAnchorId);
51  SAnchor& sAnchor = m_pcEmbodiedEntity->GetAnchor(strAnchorId);
52  /* parse the offset */
53  CVector3 cOffsetPosition;
54  CQuaternion cOffsetOrientation;
55  GetNodeAttribute(*itCamera, "position", cOffsetPosition);
56  GetNodeAttribute(*itCamera, "orientation", cOffsetOrientation);
57  CTransformationMatrix3 cOffset(cOffsetOrientation, cOffsetPosition);
58  /* parse the range */
59  CRange<Real> cRange;
60  GetNodeAttribute(*itCamera, "range", cRange);
61  /* create the projection matrix */
62  CSquareMatrix<3> cProjectionMatrix;
63  cProjectionMatrix.SetIdentityMatrix();
64  /* set the focal length */
65  CVector2 cFocalLength;
66  GetNodeAttribute(*itCamera, "focal_length", cFocalLength);
67  cProjectionMatrix(0,0) = cFocalLength.GetX(); // Fx
68  cProjectionMatrix(1,1) = cFocalLength.GetY(); // Fy
69  /* set the principle point */
70  CVector2 cPrinciplePoint;
71  GetNodeAttribute(*itCamera, "principle_point", cPrinciplePoint);
72  cProjectionMatrix(0,2) = cPrinciplePoint.GetX(); // Px
73  cProjectionMatrix(1,2) = cPrinciplePoint.GetY(); // Py
74  /* set the distortion parameters */
75  /*
76  CMatrix<1,5> cDistortionParameters;
77  std::string strDistortionParameters;
78  Real pfDistortionParameters[3];
79  GetNodeAttribute(*itCamera, "distortion_parameters", strDistortionParameters);
80  ParseValues<Real>(strDistortionParameters, 3, pfDistortionParameters, ',');
81  cDistortionParameters(0,0) = pfDistortionParameters[0]; // K1
82  cDistortionParameters(0,1) = pfDistortionParameters[1]; // K2
83  cDistortionParameters(0,4) = pfDistortionParameters[2]; // K3
84  */
85  /* parse the resolution */
86  CVector2 cResolution;
87  GetNodeAttribute(*itCamera, "resolution", cResolution);
88  /* create and initialise the algorithms */
89  std::vector<CCameraSensorSimulatedAlgorithm*> vecSimulatedAlgorithms;
90  std::vector<CCI_CameraSensorAlgorithm*> vecAlgorithms;
91  TConfigurationNodeIterator itAlgorithm;
92  for(itAlgorithm = itAlgorithm.begin(&(*itCamera));
93  itAlgorithm != itAlgorithm.end();
94  ++itAlgorithm) {
95  /* create the algorithm */
96  CCameraSensorSimulatedAlgorithm* pcAlgorithm =
98  /* check that algorithm inherits from a control interface */
99  CCI_CameraSensorAlgorithm* pcCIAlgorithm =
100  dynamic_cast<CCI_CameraSensorAlgorithm*>(pcAlgorithm);
101  if(pcCIAlgorithm == nullptr) {
102  THROW_ARGOSEXCEPTION("Algorithm \"" << itAlgorithm->Value() <<
103  "\" does not inherit from CCI_CameraSensorAlgorithm");
104  }
105  /* initialize the algorithm's control interface */
106  pcCIAlgorithm->Init(*itAlgorithm);
107  /* store pointers to the algorithms */
108  vecSimulatedAlgorithms.push_back(pcAlgorithm);
109  vecAlgorithms.push_back(pcCIAlgorithm);
110  }
111  /* create the simulated sensor */
112  m_vecSensors.emplace_back(sAnchor, cOffset, cRange, cProjectionMatrix,
113  cResolution, vecSimulatedAlgorithms);
114  /* create the sensor's control interface */
115  m_vecInterfaces.emplace_back(strId, vecAlgorithms);
116  }
117  }
118  catch(CARGoSException& ex) {
119  THROW_ARGOSEXCEPTION_NESTED("Error initializing camera sensor", ex);
120  }
121  Update();
122  }
123 
124  /****************************************/
125  /****************************************/
126 
128  /* vector of controller rays */
129  std::vector<std::pair<bool, CRay3> >& vecCheckedRays =
131  /* sensor parameters */
132  CTransformationMatrix3 cWorldToAnchorTransform;
133  CTransformationMatrix3 cWorldToCameraTransform;
134  CTransformationMatrix3 cCameraToWorldTransform;
135  //CTransformationMatrix3 cCameraToWorldTransform;
136  CVector3 cCameraLocation, cLookAt, cUp;
137  CVector3 cX, cY, cZ;
138  CVector3 cNearCenter, cNearTopLeft, cNearTopRight, cNearBottomLeft, cNearBottomRight;
139  CVector3 cFarCenter, cFarTopLeft, cFarTopRight, cFarBottomLeft, cFarBottomRight;
140  std::array<CPlane, 6> arrFrustumPlanes;
141  CVector3 cBoundingBoxMinCorner, cBoundingBoxMaxCorner;
142  CVector3 cBoundingBoxPosition, cBoundingBoxHalfExtents;
143  /* for each camera sensor */
144  for(SSensor& s_sensor : m_vecSensors) {
145  /* calculate transform matrices */
146  cWorldToAnchorTransform.SetFromComponents(s_sensor.Anchor.Orientation, s_sensor.Anchor.Position);
147  cWorldToCameraTransform = cWorldToAnchorTransform * s_sensor.Offset;
148  cCameraToWorldTransform = cWorldToCameraTransform.GetInverse();
149  /* calculate camera direction vectors */
150  cCameraLocation = cWorldToCameraTransform.GetTranslationVector();
151  cLookAt = cWorldToCameraTransform * CVector3::Z;
152  cUp = CVector3(0,-1,0); // -Y
153  cUp.Rotate(cWorldToCameraTransform.GetRotationMatrix());
154  /* calculate direction vectors */
155  cZ = cCameraLocation - cLookAt;
156  cZ.Normalize();
157  cX = cUp;
158  cX.CrossProduct(cZ);
159  cX.Normalize();
160  cY = cZ;
161  cY.CrossProduct(cX);
162  /* calculate frustum coordinates */
163  cNearCenter = cCameraLocation - cZ * s_sensor.Range.GetMin();
164  cFarCenter = cCameraLocation - cZ * s_sensor.Range.GetMax();
165  cNearTopLeft = cNearCenter + (cY * s_sensor.NearPlaneHeight) - (cX * s_sensor.NearPlaneWidth);
166  cNearTopRight = cNearCenter + (cY * s_sensor.NearPlaneHeight) + (cX * s_sensor.NearPlaneWidth);
167  cNearBottomLeft = cNearCenter - (cY * s_sensor.NearPlaneHeight) - (cX * s_sensor.NearPlaneWidth);
168  cNearBottomRight = cNearCenter - (cY * s_sensor.NearPlaneHeight) + (cX * s_sensor.NearPlaneWidth);
169  cFarTopLeft = cFarCenter + (cY * s_sensor.FarPlaneHeight) - (cX * s_sensor.FarPlaneWidth);
170  cFarTopRight = cFarCenter + (cY * s_sensor.FarPlaneHeight) + (cX * s_sensor.FarPlaneWidth);
171  cFarBottomLeft = cFarCenter - (cY * s_sensor.FarPlaneHeight) - (cX * s_sensor.FarPlaneWidth);
172  cFarBottomRight = cFarCenter - (cY * s_sensor.FarPlaneHeight) + (cX * s_sensor.FarPlaneWidth);
173  /* show frustum if enabled by adding outline to the checked rays vector */
174  if(m_bShowFrustum) {
175  vecCheckedRays.emplace_back(false, CRay3(cNearTopLeft, cNearTopRight));
176  vecCheckedRays.emplace_back(false, CRay3(cNearTopRight, cNearBottomRight));
177  vecCheckedRays.emplace_back(false, CRay3(cNearBottomRight, cNearBottomLeft));
178  vecCheckedRays.emplace_back(false, CRay3(cNearBottomLeft, cNearTopLeft));
179  vecCheckedRays.emplace_back(false, CRay3(cFarTopLeft, cFarTopRight));
180  vecCheckedRays.emplace_back(false, CRay3(cFarTopRight, cFarBottomRight));
181  vecCheckedRays.emplace_back(false, CRay3(cFarBottomRight, cFarBottomLeft));
182  vecCheckedRays.emplace_back(false, CRay3(cFarBottomLeft, cFarTopLeft));
183  vecCheckedRays.emplace_back(false, CRay3(cNearTopLeft, cFarTopLeft));
184  vecCheckedRays.emplace_back(false, CRay3(cNearTopRight, cFarTopRight));
185  vecCheckedRays.emplace_back(false, CRay3(cNearBottomRight, cFarBottomRight));
186  vecCheckedRays.emplace_back(false, CRay3(cNearBottomLeft, cFarBottomLeft));
187  }
188  //std::cerr << cFarBottomRight.GetZ() << "\t" << cFarBottomLeft.GetZ() << std::endl; TODO
189  /* generate a bounding box for the frustum */
190  cBoundingBoxMinCorner = cNearCenter;
191  cBoundingBoxMaxCorner = cNearCenter;
192  for(const CVector3& c_point : {
193  cNearTopLeft, cNearTopRight, cNearBottomLeft, cNearBottomRight,
194  cFarTopLeft, cFarTopRight, cFarBottomLeft, cFarBottomRight
195  }) {
196  if(c_point.GetX() > cBoundingBoxMaxCorner.GetX()) {
197  cBoundingBoxMaxCorner.SetX(c_point.GetX());
198  }
199  if(c_point.GetX() < cBoundingBoxMinCorner.GetX()) {
200  cBoundingBoxMinCorner.SetX(c_point.GetX());
201  }
202  if(c_point.GetY() > cBoundingBoxMaxCorner.GetY()) {
203  cBoundingBoxMaxCorner.SetY(c_point.GetY());
204  }
205  if(c_point.GetY() < cBoundingBoxMinCorner.GetY()) {
206  cBoundingBoxMinCorner.SetY(c_point.GetY());
207  }
208  if(c_point.GetZ() > cBoundingBoxMaxCorner.GetZ()) {
209  cBoundingBoxMaxCorner.SetZ(c_point.GetZ());
210  }
211  if(c_point.GetZ() < cBoundingBoxMinCorner.GetZ()) {
212  cBoundingBoxMinCorner.SetZ(c_point.GetZ());
213  }
214  }
215  cBoundingBoxMaxCorner *= 0.5;
216  cBoundingBoxMinCorner *= 0.5;
217  cBoundingBoxPosition = (cBoundingBoxMaxCorner + cBoundingBoxMinCorner);
218  cBoundingBoxHalfExtents = (cBoundingBoxMaxCorner - cBoundingBoxMinCorner);
219  /* generate frustum planes */
220  arrFrustumPlanes[0].SetFromThreePoints(cNearTopRight, cNearTopLeft, cFarTopLeft);
221  arrFrustumPlanes[1].SetFromThreePoints(cNearBottomLeft, cNearBottomRight, cFarBottomRight);
222  arrFrustumPlanes[2].SetFromThreePoints(cNearTopLeft, cNearBottomLeft, cFarBottomLeft);
223  arrFrustumPlanes[3].SetFromThreePoints(cNearBottomRight, cNearTopRight, cFarBottomRight);
224  arrFrustumPlanes[4].SetFromThreePoints(cNearTopLeft, cNearTopRight, cNearBottomRight);
225  arrFrustumPlanes[5].SetFromThreePoints(cFarTopRight, cFarTopLeft, cFarBottomLeft);
226  /* execute each algorithm */
227  for(CCameraSensorSimulatedAlgorithm* pc_algorithm : s_sensor.Algorithms) {
228  pc_algorithm->Update(s_sensor.ProjectionMatrix,
229  arrFrustumPlanes,
230  cCameraToWorldTransform,
231  cCameraLocation,
232  cBoundingBoxPosition,
233  cBoundingBoxHalfExtents);
234  /* transfer any rays to the controllable entity for rendering */
235  vecCheckedRays.insert(std::end(vecCheckedRays),
236  std::begin(pc_algorithm->GetCheckedRays()),
237  std::end(pc_algorithm->GetCheckedRays()));
238  }
239  }
240  }
241 
242  /****************************************/
243  /****************************************/
244 
246  "cameras", "default",
247  "Michael Allwright [allsey87@gmail.com]",
248  "1.0",
249  "A generic multi-camera sensor capable of running various algorithms",
250  "The generic multi-camera sensor can be attached to any composable entity in\n"
251  "ARGoS that contains an embodied entity with at least one anchor. The sensor\n"
252  "can be initialized with a number of cameras each running different algorithms\n"
253  "for detecting different objects in the simulation. The sensor is designed so\n"
254  "that algorithms can project a feature in the simulation on to the virtual\n"
255  "sensor and store its 2D pixel coordinates as a reading. The implementation\n"
256  "of algorithms that behave differently, however, is also possible.\n\n"
257  "REQUIRED XML CONFIGURATION\n\n"
258  " <controllers>\n"
259  " ...\n"
260  " <my_controller ...>\n"
261  " ...\n"
262  " <sensors>\n"
263  " ...\n"
264  " <cameras implementation=\"default\"/>\n"
265  " ...\n"
266  " </sensors>\n"
267  " ...\n"
268  " </my_controller>\n"
269  " ...\n"
270  " </controllers>\n\n"
271  "OPTIONAL XML CONFIGURATION\n\n"
272  "It is possible to draw the frustum of each camera sensor in the OpenGL\n"
273  "visualization. This can be useful for sensor debugging but also to understand\n"
274  "what's wrong in your controller. To turn this functionality on, add the\n"
275  "attribute \"show_frustum\" as follows:\n\n"
276  " <controllers>\n"
277  " ...\n"
278  " <my_controller ...>\n"
279  " ...\n"
280  " <sensors>\n"
281  " ...\n"
282  " <cameras implementation=\"default\" show_frustum=\"true\"/>\n"
283  " ...\n"
284  " </sensors>\n"
285  " ...\n"
286  " </my_controller>\n"
287  " ...\n"
288  " </controllers>\n\n"
289  "To add a camera to the plugin, create a camera node as shown in the following\n"
290  "example. A camera is defined by its range (how close and how far the camera\n"
291  "can see), its anchor and its position and orientation offsets from that\n"
292  "that anchor, its focal length and principle point (which define the\n"
293  "projection matrix), and its resolution.\n\n"
294  " <controllers>\n"
295  " ...\n"
296  " <my_controller ...>\n"
297  " ...\n"
298  " <sensors>\n"
299  " ...\n"
300  " <cameras implementation=\"default\" show_frustum=\"true\">\n"
301  " <camera id=\"camera0\" range=\"0.025:0.25\" anchor=\"origin\"\n"
302  " position=\"0.1,0,0.1\" orientation=\"90,-90,0\"\n"
303  " focal_length=\"800,800\" principle_point=\"320,240\"\n"
304  " resolution=\"640,480\"/>\n"
305  " </cameras>\n"
306  " ...\n"
307  " </sensors>\n"
308  " ...\n"
309  " </my_controller>\n"
310  " ...\n"
311  " </controllers>\n\n"
312  "To run an algorithm on the camera sensor, simply add the algorithm as a node\n"
313  "under the camera node. At the time of writing, three algorithms are available\n"
314  "by default: led_detector, directional_led_detector, and tag_detector. Each of\n"
315  "algorithms requires a medium attribute that specifies the medium where the\n"
316  "target entities are indexed. By setting the show_rays attribute to true, you\n"
317  "can see whether or not a target was partially occluded by another object in\n"
318  "the simulation. For example:\n\n"
319  " <controllers>\n"
320  " ...\n"
321  " <my_controller ...>\n"
322  " ...\n"
323  " <sensors>\n"
324  " ...\n"
325  " <cameras implementation=\"default\" show_frustum=\"true\">\n"
326  " <camera id=\"camera0\" range=\"0.025:0.25\" anchor=\"origin\"\n"
327  " position=\"0.1,0,0.1\" orientation=\"90,-90,0\"\n"
328  " focal_length=\"800,800\" principle_point=\"320,240\"\n"
329  " resolution=\"640,480\">\n"
330  " <led_detector medium=\"leds\" show_rays=\"true\"/>\n"
331  " </camera>\n"
332  " </cameras>\n"
333  " ...\n"
334  " </sensors>\n"
335  " ...\n"
336  " </my_controller>\n"
337  " ...\n"
338  " </controllers>\n",
339  "Usable");
340 }
An entity that contains a pointer to the user-defined controller.
CSquareMatrix< DIM > GetInverse() const
Definition: squarematrix.h:120
A 3D vector class.
Definition: vector3.h:29
std::vector< std::pair< bool, CRay3 > > & GetCheckedRays()
Returns the list of checked rays.
void GetNodeAttributeOrDefault(TConfigurationNode &t_node, const std::string &str_attribute, T &t_buffer, const T &t_default)
Returns the value of a node's attribute, or the passed default value.
#define THROW_ARGOSEXCEPTION(message)
This macro throws an ARGoS exception with the passed message.
Real GetX() const
Returns the x coordinate of this vector.
Definition: vector2.h:78
virtual void Init(TConfigurationNode &t_tree)
Initializes the sensor from the XML configuration tree.
Real GetX() const
Returns the x coordinate of this vector.
Definition: vector3.h:93
CVector3 & Rotate(const CQuaternion &c_quaternion)
Rotates this vector by the given quaternion.
Definition: vector3.cpp:25
Real GetY() const
Returns the y coordinate of this vector.
Definition: vector3.h:109
CRotationMatrix3 GetRotationMatrix() const
virtual void SetRobot(CComposableEntity &c_entity)
Sets the entity associated to this sensor.
ticpp::Element TConfigurationNode
The ARGoS configuration XML node.
Real GetY() const
Returns the y coordinate of this vector.
Definition: vector2.h:94
This entity is a link to a body in the physics engine.
#define THROW_ARGOSEXCEPTION_NESTED(message, nested)
This macro throws an ARGoS exception with the passed message and nesting the passed exception...
CControllableEntity * m_pcControllableEntity
SInterface::TVector m_vecInterfaces
void SetFromComponents(const CRotationMatrix3 &c_rotation, const CVector3 &c_translation)
Basic class for an entity that contains other entities.
const std::vector< std::pair< bool, CRay3 > > & GetCheckedRays() const
static TYPE * New(const std::string &str_label)
Creates a new object of type TYPE
Definition: factory_impl.h:48
A 2D vector class.
Definition: vector2.h:25
void SetX(const Real f_x)
Sets the x coordinate of this vector.
Definition: vector3.h:101
An anchor related to the body of an entity.
Definition: physics_model.h:38
std::vector< SSensor > m_vecSensors
virtual void Init(TConfigurationNode &t_node)
Initializes the sensor from the XML configuration tree.
Definition: ci_sensor.h:54
ticpp::Iterator< ticpp::Element > TConfigurationNodeIterator
The iterator for the ARGoS configuration XML node.
void GetNodeAttribute(TConfigurationNode &t_node, const std::string &str_attribute, T &t_buffer)
Returns the value of a node's attribute.
The exception that wraps all errors in ARGoS.
static const CVector3 Z
The z axis.
Definition: vector3.h:40
void SetY(const Real f_y)
Sets the y coordinate of this vector.
Definition: vector3.h:117
CVector3 & CrossProduct(const CVector3 &c_vector3)
Calculates the cross product between this vector and the passed one.
Definition: vector3.h:361
const SAnchor & GetAnchor(const std::string &str_id) const
Returns the wanted anchor as a const reference.
virtual void Update()
Updates the state of the entity associated to this sensor.
The namespace containing all the ARGoS related code.
Definition: ci_actuator.h:12
Real GetZ() const
Returns the z coordinate of this vector.
Definition: vector3.h:125
void SetZ(const Real f_z)
Sets the z coordinate of this vector.
Definition: vector3.h:133
CVector3 & Normalize()
Normalizes this vector.
Definition: vector3.h:215
virtual void Update(const CSquareMatrix< 3 > &c_projection_matrix, const std::array< CPlane, 6 > &arr_frustum_planes, const CTransformationMatrix3 &c_world_to_camera_transform, const CVector3 &c_camera_location, const CVector3 &c_bounding_box_position, const CVector3 &c_bounding_box_half_extents)=0
CEntity & GetComponent(const std::string &str_component)
Returns the component with the passed string label.
REGISTER_SENSOR(CEyeBotLightRotZOnlySensor,"eyebot_light","rot_z_only","Carlo Pinciroli [ilpincy@gmail.com]","1.0","The eye-bot light sensor (optimized for 2D).","This sensor accesses a set of light sensors. The sensors all return a value\n""between 0 and 1, where 0 means nothing within range and 1 means the perceived\n""light saturates the sensor. Values between 0 and 1 depend on the distance of\n""the perceived light. Each reading R is calculated with R=(I/x)^2, where x is the\n""distance between a sensor and the light, and I is the reference intensity of the\n""perceived light. The reference intensity corresponds to the minimum distance at\n""which the light saturates a sensor. The reference intensity depends on the\n""individual light, and it is set with the \"intensity\" attribute of the light\n""entity. In case multiple lights are present in the environment, each sensor\n""reading is calculated as the sum of the individual readings due to each light.\n""In other words, light wave interference is not taken into account. In\n""controllers, you must include the ci_light_sensor.h header.\n\n""REQUIRED XML CONFIGURATION\n\n"" <controllers>\n"" ...\n"" <my_controller ...>\n"" ...\n"" <sensors>\n"" ...\n"" <eyebot_light implementation=\"rot_z_only\" />\n"" ...\n"" </sensors>\n"" ...\n"" </my_controller>\n"" ...\n"" </controllers>\n\n""OPTIONAL XML CONFIGURATION\n\n""It is possible to draw the rays shot by the light sensor in the OpenGL\n""visualization. This can be useful for sensor debugging but also to understand\n""what's wrong in your controller. In OpenGL, the rays are drawn in cyan when\n""they are not obstructed and in purple when they are. In case a ray is\n""obstructed, a black dot is drawn where the intersection occurred.\n""To turn this functionality on, add the attribute \"show_rays\" as in this\n""example:\n\n"" <controllers>\n"" ...\n"" <my_controller ...>\n"" ...\n"" <sensors>\n"" ...\n"" <eyebot_light implementation=\"rot_z_only\"\n"" show_rays=\"true\" />\n"" ...\n"" </sensors>\n"" ...\n"" </my_controller>\n"" ...\n"" </controllers>\n\n""It is possible to add uniform noise to the sensors, thus matching the\n""characteristics of a real robot better. This can be done with the attribute\n""\"noise_level\", whose allowed range is in [-1,1] and is added to the calculated\n""reading. The final sensor reading is always normalized in the [0-1] range.\n\n"" <controllers>\n"" ...\n"" <my_controller ...>\n"" ...\n"" <sensors>\n"" ...\n"" <eyebot_light implementation=\"rot_z_only\"\n"" noise_level=\"0.1\" />\n"" ...\n"" </sensors>\n"" ...\n"" </my_controller>\n"" ...\n"" </controllers>\n\n""OPTIONAL XML CONFIGURATION\n\n""None.\n","Usable")