-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathindex.html
executable file
·448 lines (367 loc) · 20.8 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
<!DOCTYPE html>
<html lang="en">
<!-- Head -->
<head> <!-- Metadata, OpenGraph and Schema.org -->
<!-- Standard metadata -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>The ECMD Datasets </title>
<meta name="author" content="The ECMD Datasets " />
<meta name="description" content="An Event-Centric Multisensory Driving Dataset for SLAM.
" />
<meta name="keywords" content="Event-based Vision, Multi-sensor, SLAM, Autonomous Driving" />
<!-- OpenGraph -->
<meta property="og:site_name" content="The ECMD Datasets " />
<meta property="og:type" content="website" />
<meta property="og:title" content="The ECMD Datasets | Home" />
<meta property="og:url" content="http://localhost:4000/ecmd/" />
<meta property="og:description" content="An Event-Centric Multisensory Driving Dataset for SLAM.
" />
<meta property="og:locale" content="en" />
<!-- Twitter card -->
<meta name="twitter:card" content="summary" />
<meta name="twitter:title" content="Home" />
<meta name="twitter:description" content="An Event-Centric Multisensory Driving Dataset for SLAM.
" />
<!-- Schema.org -->
<script type="application/ld+json">
{
"author":
{
"@type": "Person",
"name": "The ECMD Datasets "
},
"url": "http://localhost:4000/ECMD/",
"@type": "WebSite",
"description": "An Event-Centric Multisensory Driving Dataset for SLAM.
",
"headline": "Home",
"sameAs": ["https://github.com/arclab-hku/Event_based_VO-VIO-SLAM", "https://arclab.hku.hk/"],
"name": "The ECMD Datasets ",
"@context": "https://schema.org"
}
</script>
<!-- Bootstrap & MDB -->
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha256-DF7Zhf293AJxJNTmh5zhoYYIMs2oXitRfBjY+9L//AY=" crossorigin="anonymous">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/[email protected]/css/mdb.min.css" integrity="sha256-jpjYvU3G3N6nrrBwXJoVEYI/0zw8htfFnhT9ljN3JJw=" crossorigin="anonymous" />
<!-- Fonts & Icons -->
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@fortawesome/[email protected]/css/all.min.css" integrity="sha256-mUZM63G8m73Mcidfrv5E+Y61y7a12O5mW4ezU3bxqW4=" crossorigin="anonymous">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/[email protected]/css/academicons.min.css" integrity="sha256-i1+4qU2G2860dGGIOJscdC30s9beBXjFfzjWLjBRsBg=" crossorigin="anonymous">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link rel="stylesheet" type="text/css" href="https://fonts.googleapis.com/css2?family=Barriecito&family=Poppins:ital,wght@0,400;0,500;0,600;0,700;1,400;1,500;1,600;1,700">
<!-- Code Syntax Highlighting -->
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/jwarby/jekyll-pygments-themes@master/PASTIE.css" media="none" id="highlight_theme_light" />
<!-- Styles -->
<link rel="shortcut icon" href="https://arclab-hku.github.io/ecmd/assets/img/hku_logo.png"/>
<link rel="stylesheet" href="https://arclab-hku.github.io/ecmd/assets/css/main.css">
<link rel="canonical" href="http://localhost:4000/ecmd/">
<link rel="stylesheet" href="https://arclab-hku.github.io/ecmd/assets/css/fonts.css">
<link rel="stylesheet" href="/ecmd/assets/css/fonts.css">
<!-- Dark Mode -->
</head>
<!-- Body -->
<body class="fixed-top-nav ">
<!-- Header -->
<header>
<!-- Nav Bar -->
<nav id="navbar" class="navbar navbar-light navbar-expand-sm fixed-top">
<div class="container">
<!-- Navbar Toggle -->
<button class="navbar-toggler collapsed ml-auto" type="button" data-toggle="collapse" data-target="#navbarNav" aria-controls="navbarNav" aria-expanded="false" aria-label="Toggle navigation">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar top-bar"></span>
<span class="icon-bar middle-bar"></span>
<span class="icon-bar bottom-bar"></span>
</button>
<div class="collapse navbar-collapse text-right" id="navbarNav">
<ul class="navbar-nav ml-auto flex-nowrap">
<!-- Home -->
<li class="nav-item active">
<a class="nav-link" href="/ecmd/">Home<span class="sr-only">(current)</span></a>
</li>
<!-- Other pages -->
<!-- <li class="nav-item dropdown ">
<a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Sensors</a>
<div class="dropdown-menu dropdown-menu-right" aria-labelledby="navbarDropdown">
<a class="dropdown-item" href="/ECMD/about/sensor/">Sensor Suite</a>
<div class="dropdown-divider"></div>
<a class="dropdown-item" href="/ECMD/about/synchronization/">Synchronization</a>
<div class="dropdown-divider"></div>
<a class="dropdown-item" href="/ECMD/about/ground_truth/">Ground Truth</a>
</div>
</li> -->
<li class="nav-item ">
<a class="nav-link" href="/ecmd/sensors/">Sensors</a>
</li>
<li class="nav-item ">
<a class="nav-link" href="/ecmd/calibration/">Calibration</a>
</li>
<li class="nav-item ">
<a class="nav-link" href="/ecmd/download/">Download</a>
</li>
<!-- <li class="nav-item dropdown ">
<a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact Us</a>
<div class="dropdown-menu dropdown-menu-right" aria-labelledby="navbarDropdown">
<a class="dropdown-item" href="https://github.com/mgaoling/mpl_calibration_toolbox/issues" target="_blank" rel="noopener noreferrer">Calibration Issue</a>
<div class="dropdown-divider"></div>
<a class="dropdown-item" href="https://github.com/mgaoling/mpl_dataset_toolbox/issues" target="_blank" rel="noopener noreferrer">Dataset Issue</a>
</div>
</li> -->
</ul>
</div>
</div>
</nav>
</header>
<!-- Content -->
<div class="container mt-5">
<!-- home.html -->
<div class="post">
<header class="post-header">
<h1 class="post-title">
<span style="font-weight: 600;">The ECMD Datasets</span>
</h1>
<p class="desc">
An Event-Centric Multisensory Driving Dataset for SLAM.
</p>
</header>
<article>
<div class="clearfix">
<p><br></p>
<!-- <picture style="text-align: center;">
<img class="img-fluid rounded z-depth-1" src="https://arclab-hku.github.io/ecmd/assets/gif/total_homepage.gif" title="total_homepage">
</picture> -->
<div style="text-align: center;">
<picture>
<img style="width: 90%;" class="img-fluid rounded z-depth-1" src="https://arclab-hku.github.io/ecmd/assets/gif/sensor_video_new.gif" title="sensor_video">
</picture>
</div>
<p><br></p>
<div style="text-align: center;">
<picture>
<img style="width: 90%;" class="img-fluid rounded z-depth-1" src="https://arclab-hku.github.io/ecmd/assets/gif/homepage_vis.gif" title="homepage_vis">
</picture>
</div>
<p><br></p>
<p style="text-align: justify;">
This is first event-based SLAM datasets specifically focus on urbanized autonomou driving.
We explore the inquiry: Are event cameras ready for autonomous driving?
At the same time, we also investigates the perceptual capabilities of various sensors, including LiDAR, standard cameras, infrared cameras, and GNSS-RTK/INS.
<!-- After the paper is accepted, we will release all the datasets and keep the dataset maintenance. -->
<br>
The contributions of our work can be summarized as follows:
</p>
<ul>
<li style="text-align: justify;"> Our <a href="/ecmd/sensors">sensor platform</a> consists of various novel sensors,
including two sets of stereo event cameras with distinct resolutions (640×480, 346×260),
an infrared camera,
stereo industrial cameras,
three mechanical LiDARs (including two slanted LiDARs),
an onboard inertial measurement unit (IMU),
and two global navigation satellite system (GNSS) receivers.
For the ground-truth, we adopt a centimeter-level position system that combines the GNSS real-time kinematic (RTK)
with the fiber optics gyroscope integrated inertial system as GNSS-RTK/INS.
</li>
<!-- <li style="text-align: justify;">
ALl sensors are <a href="/ecmd/calibration">well-calibrated</a> and temporally synchronized at the hardware level, with recording data simultaneously.
</li> -->
<li style="text-align: justify;"> ECMD collects 81 sequences covering over 200 kilometers of trajectories in various driving scenarios,
including dense streets, urban, tunnels, highways, bridges, and suburbs.
These <a href="/ecmd/download">sequences</a> are recorded under daylight and nighttime, providing challenging situations for Visual and LiDAR SLAM, e.g.,
dynamic objects, high-speed motion, repetitive scenarios, and HDR scenes.
Meanwhile, we evaluate existing state-of-the-art visual and LiDAR SLAM algorithms with various sensor modalities on our datasets.
Moreover, our dataset and benchmark results are released publicly available on our website.
</li>
<!-- <li style="text-align: justify;"> We present a comprehensive <a href="/ecmd/benchmark">benchmark</a> that evaluates existing state-of-the-art SLAM algorithms of various
sensor modalities and analyzes their limitations.
</li> -->
</ul>
<p style="text-align: justify;">
We hope that we can make some contributions for the development of event-based vision, especially event-based multi-sensor fusioin for autonomous driving.
<br>
The visualization of each sequence is available in <a href="/ecmd/download">Download section</a> and <a href="https://www.bilibili.com/video/BV1Km4y157KC/?spm_id_from=333.999.0.0&vd_source=a88e426798937812a8ffc1a9be5a3cb7">Bilibili</a>.
<br>
If you have any suggestions or questions, do not hesitate to propose an issue to our <a href="https://github.com/arclab-hku/ecmd">Github Repository</a>.
</p>
<p><br></p>
</div>
<!-- News -->
<div class="news">
<h2>News</h2>
<div class="table-responsive">
<table class="table table-sm table-borderless">
<tr>
<th scope="row">December 2, 2023</th>
<td>
We release our GNSS-RTK/INS gt and M8T/F9P GNSS at <a href="/ecmd/download">Download section</a>.
</td>
</tr>
<tr>
<th scope="row">November 28, 2023</th>
<td>
Our work has been accepted by IEEE Transactions on Intelligent Vehicles!
</td>
</tr>
<tr>
<th scope="row">November 21, 2023</th>
<td>
Calibration results and rosbag are avaliable at <a href="/ecmd/calibration">Calibration page</a>.
</td>
</tr>
<tr>
<th scope="row">November 19, 2023</th>
<td>
We release our sequences at <a href="/ecmd/download">Download section</a>.
</td>
</tr>
<tr>
<th scope="row">November 07, 2023</th>
<td>
The preprint version is available at <a href="https://arxiv.org/abs/2311.02327">arXiv</a>.
</td>
</tr>
<tr>
<th scope="row">October 31, 2023</th>
<td>
Watch our video presentation on <a href="https://www.bilibili.com/video/BV1pN411s79g/?spm_id_from=333.999.list.card_archive.click&vd_source=a88e426798937812a8ffc1a9be5a3cb7">Bilibili</a> or <a href="https://youtu.be/Q1F9M_DZLws">Youtube</a>.
</td>
</tr>
<tr>
<th scope="row">August 28, 2023</th>
<td>
We finish the evaluation of ECMD using various LiDAR SLAM (<a href="https://space.bilibili.com/499377825/channel/collectiondetail?sid=1835333">Record Videos</a>)
</td>
</tr>
<tr>
<th scope="row">August 22, 2023</th>
<td>
We complete the collection of all sequences (<a href="https://www.bilibili.com/video/BV1Km4y157KC/?share_source=copy_web&vd_source=f5ac2a23210d1b4b7b4aa5e374feae10">bilibili visulization</a>).
</td>
</tr>
<tr>
<th scope="row">June 2, 2023</th>
<td>
Driver code and time synchronization of event cameras are now available (<a href="https://github.com/arclab-hku/Event_based_VO-VIO-SLAM/tree/main/driver_code/dv-ros-master/script">Code</a>, <a href="https://www.bilibili.com/video/BV168411o7BJ/?spm_id_from=333.999.0.0&vd_source=a88e426798937812a8ffc1a9be5a3cb7">Bilibili</a>).
</td>
</tr>
<tr>
<th scope="row">June 1, 2023</th>
<td>
ECMD Datasets goes live!
</td>
</tr>
</table>
</div>
</div>
<!-- Selected papers -->
<div class="publications">
<h2>BibTeX</h2>
Please cite the following publication when using this benchmark in an academic context:
<h2></h2>
<ol class="bibliography"><li>
<!-- _layouts/bib.html -->
<div class="row">
<!-- <div class="col-sm-2 abbr"><abbr class="badge">RA-L</abbr></div> -->
<!-- 上面这个RAL显示好像有点问题 -->
<!-- Entry bib key -->
<div id="ecmd2023" class="col-sm-8">
<!-- Author -->
<div class="paper" style="text-align: justify;">P. Chen, W. Guan, F. Huang, Y. Zhong, W. Wen, L. Hsu, and P. Lu.
ECMD: An Event-Centric Multisensory Driving Dataset for SLAM.
IEEE Transactions on Intelligent Vehicles, vol. 9, no. 1, pp. 407-416, 2023.
</div>
<!-- Title -->
<!-- <div class="title" style="text-align: justify;">ECMD: An Event-Centric Multisensory Driving Dataset for SLAM.</div> -->
<!-- Journal/Book title and date -->
<!-- <div class="periodical" style="text-align: justify;">
IEEE Transactions on Intelligent Vehicles, vol. 9, no. 1, pp. 407-416, 2023.
</div> -->
<!-- Links/Buttons -->
<div class="links">
<a class="abstract btn btn-sm z-depth-0" role="button">Abs</a>
<a href="https://arxiv.org/abs/2311.02327" class="btn btn-sm z-depth-0" role="button" target="_blank" rel="noopener noreferrer">arXiv</a>
<a href="https://arclab-hku.github.io/ecmd/assets/pdf/2311.02327.pdf" class="btn btn-sm z-depth-0" role="button">PDF</a>
<!-- <a href="/ecmd/assets/pdf/supplementary_material.pdf" class="btn btn-sm z-depth-0" role="button">Supp</a> -->
<!-- 以上三个内容依次放,arxiv网址,我们论文的pdf,如果有补充材料就加,没有就不加 -->
</div>
<!-- Hidden abstract block -->
<div class="abstract hidden">
<p style="text-align: justify;">
Leveraging multiple sensors enhances complex environmental perception and increases resilience to varying luminance conditions and high-speed motion patterns, achieving precise localization and mapping.
This paper proposes, ECMD, an event-centric multisensory dataset containing 81 sequences and covering over 200 km of various challenging driving scenarios including high-speed motion, repetitive scenarios, dynamic objects, etc.
ECMD provides data from two sets of stereo event cameras with different resolutions (640×480, 346×260), stereo industrial cameras, an infrared camera, a top-installed mechanical LiDAR with two slanted LiDARs,
two consumer-level GNSS receivers, and an onboard IMU.
Meanwhile, the ground-truth of the vehicle was obtained using a centimeter-level high-accuracy GNSS-RTK/INS navigation system.
All sensors are well-calibrated and temporally synchronized at the hardware level, with recording data simultaneously.
We additionally evaluate several state-of-the-art SLAM algorithms for benchmarking visual and LiDAR SLAM and identifying their limitations.
<!-- The full dataset can be found at \url{https://arclab-hku.github.io/ecmd/}. -->
</p>
</div>
</div>
</div>
</li></ol>
</div>
<br>
<div class="Other resources">
<h2>Other resources</h2>
<p style="text-align: justify;">
Some tools for pre-processing the dataset
and HKU event-based handheld & drone dataset are available at <a href="https://github.com/arclab-hku/Event_based_VO-VIO-SLAM" target="_blank" rel="noopener noreferrer">here</a>.
</p>
</div>
<!-- License -->
<div class="License">
<h2>License</h2>
<p>
This work is released under <a href="https://www.gnu.org/licenses/" target="_blank" rel="noopener noreferrer">GPLv3</a> license.
For commercial inquires, please contact Dr. Peng Lu ([email protected]).
</p>
</div>
<br>
<br>
<!-- Acknowledgements -->
<div class="Acknowledgement">
<h2>Acknowledgement</h2>
<p style="text-align: justify;">
This work was supported by General Research Fund under Grant 17204222, and in part by the Seed Fund for Collaborative Research and General Funding Scheme-HKU-TCL Joint Research Center for Artificial Intelligence.
</p>
</div>
<br>
<br>
</article>
</div>
</div>
<!-- Footer -->
<footer class="nofixed-bottom">
<div class="container mt-0" style="width:100%;text-align:center;">
© 2023 Adaptive Robotics Controls Lab (Arclab), The University of HongKong. All rights reserved.
</div>
</footer>
<!-- JavaScripts -->
<!-- jQuery -->
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/jquery.min.js" integrity="sha256-/xUj+3OJU5yExlq6GSYGSHk7tPXikynS7ogEvDej/m4=" crossorigin="anonymous"></script>
<!-- Bootsrap & MDB scripts -->
<script src="https://cdn.jsdelivr.net/npm/@popperjs/[email protected]/dist/umd/popper.min.js" integrity="sha256-l/1pMF/+J4TThfgARS6KwWrk/egwuVvhRzfLAMQ6Ds4=" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.min.js" integrity="sha256-SyTu6CwrfOhaznYZPoolVw2rxoY7lKYKQvqbtqN93HI=" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/js/mdb.min.js" integrity="sha256-NdbiivsvWt7VYCt6hYNT3h/th9vSTL4EDWeGs5SN3DA=" crossorigin="anonymous"></script>
<!-- Masonry & imagesLoaded -->
<script defer src="https://cdn.jsdelivr.net/npm/[email protected]/dist/masonry.pkgd.min.js" integrity="sha256-Nn1q/fx0H7SNLZMQ5Hw5JLaTRZp0yILA/FRexe19VdI=" crossorigin="anonymous"></script>
<script defer src="https://cdn.jsdelivr.net/npm/imagesloaded@4/imagesloaded.pkgd.min.js"></script>
<script defer src="/ecmd/assets/js/masonry.js" type="text/javascript"></script>
<!-- Medium Zoom JS -->
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/medium-zoom.min.js" integrity="sha256-EdPgYcPk/IIrw7FYeuJQexva49pVRZNmt3LculEr7zM=" crossorigin="anonymous"></script>
<script src="/ecmd/assets/js/zoom.js"></script><!-- Load Common JS -->
<script src="/ecmd/assets/js/common.js"></script>
<!-- MathJax -->
<script type="text/javascript">
window.MathJax = {
tex: {
tags: 'ams'
}
};
</script>
<script defer type="text/javascript" id="MathJax-script" src="https://cdn.jsdelivr.net/npm/[email protected]/es5/tex-mml-chtml.js"></script>
<script defer src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script>
</body>
</html>