-
Notifications
You must be signed in to change notification settings - Fork 127
/
osi_sensorview.proto
349 lines (315 loc) · 11 KB
/
osi_sensorview.proto
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
syntax = "proto2";
option optimize_for = SPEED;
import "osi_version.proto";
import "osi_common.proto";
import "osi_groundtruth.proto";
import "osi_sensorviewconfiguration.proto";
import "osi_hostvehicledata.proto";
package osi3;
//
// \brief The sensor view is derived from \c GroundTruth and used as
// input to sensor models.
//
// The sensor view information is supposed to provide input to sensor
// models for simulation of actual real sensors.
// All information regarding the environment is given with respect to
// the virtual sensor coordinate system specified in
// \c SensorView::mounting_position, except for the individual physical
// technology-specific data, which is given with respect to the physical
// sensor coordinate system specified in the corresponding physical sensor's
// \c #mounting_position, and the \c #global_ground_truth, which is given in
// global coordinates.
//
// When simulating multiple distinct sensors, each sensor can consume an
// individual copy of the \c SensorView interface. This allows an independent
// treatment of the sensors.
//
// Alternatively combined sensor models can also consume one combined
// \c SensorView, with either combined or separate \c SensorData outputs,
// depending on model architecture.
//
message SensorView
{
// The interface version used by the sender (simulation environment).
//
// \rules
// is_set
// \endrules
//
optional InterfaceVersion version = 1;
// The data timestamp of the simulation environment. Zero time is arbitrary
// but must be identical for all messages. Zero time does not need to
// coincide with the UNIX epoch. Recommended is the starting time point of
// the simulation.
//
// \note For sensor view data this timestamp coincides both with the
// notional simulation time the data applies to and the time it was sent
// (there is no inherent latency for sensor view data, as opposed to
// sensor data).
//
// \rules
// is_set
// \endrules
//
optional Timestamp timestamp = 2;
// The ID of the sensor at host vehicle's \c #mounting_position.
//
// This is the ID of the virtual sensor, to be used in its detected
// object output; it is distinct from the IDs of its physical detectors,
// which are used in the detected features.
//
// \rules
// is_set
// \endrules
//
optional Identifier sensor_id = 3;
// The virtual mounting position of the sensor (origin and orientation of
// the sensor frame). Both origin and orientation are given in and with
// respect to the host vehicle coordinate system [1].
//
// The virtual position pertains to the sensor as a whole, regardless of the
// actual position of individual physical detectors, and governs the
// sensor-relative coordinates in detected objects of the sensor as a whole.
// Individual features detected by individual physical detectors are
// governed by the actual physical mounting positions of the detectors, as
// indicated in the technology-specific sub-views and sub-view
// configurations.
//
// \arg \b x-direction of sensor coordinate system: sensor viewing direction
// \arg \b z-direction of sensor coordinate system: sensor (up)
// \arg \b y-direction of sensor coordinate system: perpendicular to x and z
// right hand system
//
// \par Reference:
// [1] DIN Deutsches Institut fuer Normung e. V. (2013). <em>DIN ISO 8855 Strassenfahrzeuge - Fahrzeugdynamik und Fahrverhalten - Begriffe</em>. (DIN ISO 8855:2013-11). Berlin, Germany.
//
// \note This field is usually static during the simulation.
// \note The origin of vehicle's coordinate system in world frame is
// ( \c MovingObject::base . \c BaseMoving::position +
// Inverse_Rotation_yaw_pitch_roll( \c MovingObject::base . \c
// BaseMoving::orientation) * \c
// MovingObject::VehicleAttributes::bbcenter_to_rear) . The orientation of
// the vehicle's coordinate system is equal to the orientation of the
// vehicle's bounding box \c MovingObject::base . \c
// BaseMoving::orientation.
//
// \rules
// is_set
// \endrules
//
optional MountingPosition mounting_position = 4;
// The root mean squared error of the mounting position.
//
optional MountingPosition mounting_position_rmse = 5;
// Host vehicle data.
//
// Host vehicle data is data that the host vehicle knows about itself,
// e.g. from location sensors, internal sensors and ECU bus data, etc.,
// that is made available to sensors as input.
//
optional HostVehicleData host_vehicle_data = 6;
// Ground truth w.r.t. global coordinate system.
//
// This is the ground truth that is provided to the sensor model by the
// simulation environment. It may be filtered as per the requirements of
// the sensor model as expressed by the \c SensorViewConfiguration
// message(s) that where exchanged during the simulation initialization
// phase.
//
// \note The host vehicle is always contained in the ground truth provided,
// regardless of any filtering. The ground truth MUST contain at least as
// much of the ground truth data, as is requested by the sensor model, but
// MAY always contain more data, since the filtering is intended only as
// an optimization mechanism, not as a replacement of a proper sensor
// field of view modeling.
//
optional GroundTruth global_ground_truth = 7;
// The ID of the host vehicle in the \c #global_ground_truth data.
//
// \rules
// refers_to: 'MovingObject'
// is_set
// \endrules
//
optional Identifier host_vehicle_id = 8;
// Generic SensorView(s).
//
repeated GenericSensorView generic_sensor_view = 1000;
// Radar-specific SensorView(s).
//
repeated RadarSensorView radar_sensor_view = 1001;
// Lidar-specific SensorView(s).
//
repeated LidarSensorView lidar_sensor_view = 1002;
// Camera-specific SensorView(s).
//
repeated CameraSensorView camera_sensor_view = 1003;
// Ultrasonic-specific SensorView(s).
//
repeated UltrasonicSensorView ultrasonic_sensor_view = 1004;
}
//
// \brief Definition of the generic sensor view.
//
// Generic sensor view data.
//
message GenericSensorView
{
// Generic view configuration valid at the time the data was created.
//
optional GenericSensorViewConfiguration view_configuration = 1;
}
//
// \brief Definition of the radar sensor view.
//
// Radar specific sensor view data.
//
message RadarSensorView
{
// Radar view configuration valid at the time the data was created.
//
optional RadarSensorViewConfiguration view_configuration = 1;
// Ray tracing data.
//
// This field includes one entry for each ray, in left-to-right,
// top-to-bottom order (think of scan lines in a TV).
//
repeated Reflection reflection = 2;
//
// \brief Definition of the radar reflection.
//
message Reflection
{
// Relative signal level of the reflection.
//
// This takes the combined antenna diagram (losses in TX and RX)
// as well as the signal losses due to scattering and absorption
// into account, and will, when multiplied by TX power yield the
// actual RX power.
//
// Unit: dB
//
optional double signal_strength = 1;
// Time of flight.
//
// This is the time of flight of the reflection, which is directly
// proportional to the distance traveled.
//
// Unit: s
//
optional double time_of_flight = 2;
// Doppler shift.
//
// Shift in frequency based on the specified TX frequency.
//
// Unit: Hz
//
optional double doppler_shift = 3;
// TX horizontal angle (azimuth).
//
// Horizontal angle of incidence of the source of the reflection
// at the TX antenna.
//
// Unit: rad
//
optional double source_horizontal_angle = 4;
// TX vertical angle (elevation).
//
// Vertical angle of incidence of the source of the reflection
// at the TX antenna.
//
// Unit: rad
//
optional double source_vertical_angle = 5;
}
}
//
// \brief Definition of the lidar sensor view.
//
// Lidar specific sensor view data.
//
message LidarSensorView
{
// Lidar view configuration valid at the time the data was created.
//
optional LidarSensorViewConfiguration view_configuration = 1;
// Ray tracing data.
//
// This field includes one entry for each ray, in left-to-right,
// top-to-bottom order (think of scan lines in a TV).
//
repeated Reflection reflection = 2;
//
// \brief Definition of the lidar reflection.
//
message Reflection
{
// Relative signal level of the reflection.
//
// This takes the signal losses due to scattering and absorption
// into account, and will, when multiplied by TX power yield the
// potential RX power (disregarding any other RX/TX losses).
//
// Unit: dB
//
optional double signal_strength = 1;
// Time of flight.
//
// This is the time of flight of the reflection, which is directly
// proportional to the distance traveled.
//
// Unit: s
//
optional double time_of_flight = 2;
// Doppler shift.
//
// Shift in frequency based on the specified TX frequency.
//
// Unit: Hz
//
optional double doppler_shift = 3;
// normal to surface angle.
//
// The normal of the transmitted beam to the object, road marking, etc.
// encounter. \note data is in Lidar coordinate system
//
// Unit: unit vector
//
optional Vector3d normal_to_surface = 5;
// ID of the detected object this reflection is associated to.
// can be used for ray tracing debug
//
// \note ID = MAX(uint64) indicates no reference to an object.
optional Identifier object_id = 6;
}
}
//
// \brief Definition of the camera sensor view.
//
// Camera specific sensor view data.
//
message CameraSensorView
{
// Camera view configuration valid at the time the data was created.
//
optional CameraSensorViewConfiguration view_configuration = 1;
// Raw image data.
//
// The raw image data in the memory layout specified by the camera
// sensor input configuration. The pixel order is specified in
// CameraSensorViewConfiguration.pixel_order with the
// default value PIXEL_ORDER_DEFAULT (i.e. left-to-right, top-to-bottom).
//
optional bytes image_data = 2;
}
//
// \brief Definition of the ultrasonic sensor view.
//
// Ultrasonic specific sensor view data.
//
message UltrasonicSensorView
{
// Ultrasonic view configuration valid at the time the data was created.
//
optional UltrasonicSensorViewConfiguration view_configuration = 1;
}