updated accompanying tools
Browse files- .DS_Store +0 -0
- LICENSE +400 -0
- data/dataset_metadata.json +0 -0
- data/participants_metadata.csv +289 -0
- download.py +76 -0
- environment.yml +17 -0
- nymeria/__init__.py +5 -0
- nymeria/body_motion_provider.py +227 -0
- nymeria/data_provider.py +257 -0
- nymeria/data_viewer.py +237 -0
- nymeria/definitions.py +182 -0
- nymeria/download_utils.py +289 -0
- nymeria/handeye.py +91 -0
- nymeria/narration_provider.py +12 -0
- nymeria/path_provider.py +57 -0
- nymeria/recording_data_provider.py +251 -0
- nymeria/sequence_attributes.py +62 -0
- nymeria/xsens_constants.py +98 -0
- setup.py +18 -0
- viewer.py +46 -0
.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
LICENSE
ADDED
@@ -0,0 +1,400 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
Attribution-NonCommercial 4.0 International
|
3 |
+
|
4 |
+
=======================================================================
|
5 |
+
|
6 |
+
Creative Commons Corporation ("Creative Commons") is not a law firm and
|
7 |
+
does not provide legal services or legal advice. Distribution of
|
8 |
+
Creative Commons public licenses does not create a lawyer-client or
|
9 |
+
other relationship. Creative Commons makes its licenses and related
|
10 |
+
information available on an "as-is" basis. Creative Commons gives no
|
11 |
+
warranties regarding its licenses, any material licensed under their
|
12 |
+
terms and conditions, or any related information. Creative Commons
|
13 |
+
disclaims all liability for damages resulting from their use to the
|
14 |
+
fullest extent possible.
|
15 |
+
|
16 |
+
Using Creative Commons Public Licenses
|
17 |
+
|
18 |
+
Creative Commons public licenses provide a standard set of terms and
|
19 |
+
conditions that creators and other rights holders may use to share
|
20 |
+
original works of authorship and other material subject to copyright
|
21 |
+
and certain other rights specified in the public license below. The
|
22 |
+
following considerations are for informational purposes only, are not
|
23 |
+
exhaustive, and do not form part of our licenses.
|
24 |
+
|
25 |
+
Considerations for licensors: Our public licenses are
|
26 |
+
intended for use by those authorized to give the public
|
27 |
+
permission to use material in ways otherwise restricted by
|
28 |
+
copyright and certain other rights. Our licenses are
|
29 |
+
irrevocable. Licensors should read and understand the terms
|
30 |
+
and conditions of the license they choose before applying it.
|
31 |
+
Licensors should also secure all rights necessary before
|
32 |
+
applying our licenses so that the public can reuse the
|
33 |
+
material as expected. Licensors should clearly mark any
|
34 |
+
material not subject to the license. This includes other CC-
|
35 |
+
licensed material, or material used under an exception or
|
36 |
+
limitation to copyright. More considerations for licensors:
|
37 |
+
wiki.creativecommons.org/Considerations_for_licensors
|
38 |
+
|
39 |
+
Considerations for the public: By using one of our public
|
40 |
+
licenses, a licensor grants the public permission to use the
|
41 |
+
licensed material under specified terms and conditions. If
|
42 |
+
the licensor's permission is not necessary for any reason--for
|
43 |
+
example, because of any applicable exception or limitation to
|
44 |
+
copyright--then that use is not regulated by the license. Our
|
45 |
+
licenses grant only permissions under copyright and certain
|
46 |
+
other rights that a licensor has authority to grant. Use of
|
47 |
+
the licensed material may still be restricted for other
|
48 |
+
reasons, including because others have copyright or other
|
49 |
+
rights in the material. A licensor may make special requests,
|
50 |
+
such as asking that all changes be marked or described.
|
51 |
+
Although not required by our licenses, you are encouraged to
|
52 |
+
respect those requests where reasonable. More_considerations
|
53 |
+
for the public:
|
54 |
+
wiki.creativecommons.org/Considerations_for_licensees
|
55 |
+
|
56 |
+
=======================================================================
|
57 |
+
|
58 |
+
Creative Commons Attribution-NonCommercial 4.0 International Public
|
59 |
+
License
|
60 |
+
|
61 |
+
By exercising the Licensed Rights (defined below), You accept and agree
|
62 |
+
to be bound by the terms and conditions of this Creative Commons
|
63 |
+
Attribution-NonCommercial 4.0 International Public License ("Public
|
64 |
+
License"). To the extent this Public License may be interpreted as a
|
65 |
+
contract, You are granted the Licensed Rights in consideration of Your
|
66 |
+
acceptance of these terms and conditions, and the Licensor grants You
|
67 |
+
such rights in consideration of benefits the Licensor receives from
|
68 |
+
making the Licensed Material available under these terms and
|
69 |
+
conditions.
|
70 |
+
|
71 |
+
Section 1 -- Definitions.
|
72 |
+
|
73 |
+
a. Adapted Material means material subject to Copyright and Similar
|
74 |
+
Rights that is derived from or based upon the Licensed Material
|
75 |
+
and in which the Licensed Material is translated, altered,
|
76 |
+
arranged, transformed, or otherwise modified in a manner requiring
|
77 |
+
permission under the Copyright and Similar Rights held by the
|
78 |
+
Licensor. For purposes of this Public License, where the Licensed
|
79 |
+
Material is a musical work, performance, or sound recording,
|
80 |
+
Adapted Material is always produced where the Licensed Material is
|
81 |
+
synched in timed relation with a moving image.
|
82 |
+
|
83 |
+
b. Adapter's License means the license You apply to Your Copyright
|
84 |
+
and Similar Rights in Your contributions to Adapted Material in
|
85 |
+
accordance with the terms and conditions of this Public License.
|
86 |
+
|
87 |
+
c. Copyright and Similar Rights means copyright and/or similar rights
|
88 |
+
closely related to copyright including, without limitation,
|
89 |
+
performance, broadcast, sound recording, and Sui Generis Database
|
90 |
+
Rights, without regard to how the rights are labeled or
|
91 |
+
categorized. For purposes of this Public License, the rights
|
92 |
+
specified in Section 2(b)(1)-(2) are not Copyright and Similar
|
93 |
+
Rights.
|
94 |
+
d. Effective Technological Measures means those measures that, in the
|
95 |
+
absence of proper authority, may not be circumvented under laws
|
96 |
+
fulfilling obligations under Article 11 of the WIPO Copyright
|
97 |
+
Treaty adopted on December 20, 1996, and/or similar international
|
98 |
+
agreements.
|
99 |
+
|
100 |
+
e. Exceptions and Limitations means fair use, fair dealing, and/or
|
101 |
+
any other exception or limitation to Copyright and Similar Rights
|
102 |
+
that applies to Your use of the Licensed Material.
|
103 |
+
|
104 |
+
f. Licensed Material means the artistic or literary work, database,
|
105 |
+
or other material to which the Licensor applied this Public
|
106 |
+
License.
|
107 |
+
|
108 |
+
g. Licensed Rights means the rights granted to You subject to the
|
109 |
+
terms and conditions of this Public License, which are limited to
|
110 |
+
all Copyright and Similar Rights that apply to Your use of the
|
111 |
+
Licensed Material and that the Licensor has authority to license.
|
112 |
+
|
113 |
+
h. Licensor means the individual(s) or entity(ies) granting rights
|
114 |
+
under this Public License.
|
115 |
+
|
116 |
+
i. NonCommercial means not primarily intended for or directed towards
|
117 |
+
commercial advantage or monetary compensation. For purposes of
|
118 |
+
this Public License, the exchange of the Licensed Material for
|
119 |
+
other material subject to Copyright and Similar Rights by digital
|
120 |
+
file-sharing or similar means is NonCommercial provided there is
|
121 |
+
no payment of monetary compensation in connection with the
|
122 |
+
exchange.
|
123 |
+
|
124 |
+
j. Share means to provide material to the public by any means or
|
125 |
+
process that requires permission under the Licensed Rights, such
|
126 |
+
as reproduction, public display, public performance, distribution,
|
127 |
+
dissemination, communication, or importation, and to make material
|
128 |
+
available to the public including in ways that members of the
|
129 |
+
public may access the material from a place and at a time
|
130 |
+
individually chosen by them.
|
131 |
+
|
132 |
+
k. Sui Generis Database Rights means rights other than copyright
|
133 |
+
resulting from Directive 96/9/EC of the European Parliament and of
|
134 |
+
the Council of 11 March 1996 on the legal protection of databases,
|
135 |
+
as amended and/or succeeded, as well as other essentially
|
136 |
+
equivalent rights anywhere in the world.
|
137 |
+
|
138 |
+
l. You means the individual or entity exercising the Licensed Rights
|
139 |
+
under this Public License. Your has a corresponding meaning.
|
140 |
+
|
141 |
+
Section 2 -- Scope.
|
142 |
+
|
143 |
+
a. License grant.
|
144 |
+
|
145 |
+
1. Subject to the terms and conditions of this Public License,
|
146 |
+
the Licensor hereby grants You a worldwide, royalty-free,
|
147 |
+
non-sublicensable, non-exclusive, irrevocable license to
|
148 |
+
exercise the Licensed Rights in the Licensed Material to:
|
149 |
+
|
150 |
+
a. reproduce and Share the Licensed Material, in whole or
|
151 |
+
in part, for NonCommercial purposes only; and
|
152 |
+
|
153 |
+
b. produce, reproduce, and Share Adapted Material for
|
154 |
+
NonCommercial purposes only.
|
155 |
+
|
156 |
+
2. Exceptions and Limitations. For the avoidance of doubt, where
|
157 |
+
Exceptions and Limitations apply to Your use, this Public
|
158 |
+
License does not apply, and You do not need to comply with
|
159 |
+
its terms and conditions.
|
160 |
+
|
161 |
+
3. Term. The term of this Public License is specified in Section
|
162 |
+
6(a).
|
163 |
+
|
164 |
+
4. Media and formats; technical modifications allowed. The
|
165 |
+
Licensor authorizes You to exercise the Licensed Rights in
|
166 |
+
all media and formats whether now known or hereafter created,
|
167 |
+
and to make technical modifications necessary to do so. The
|
168 |
+
Licensor waives and/or agrees not to assert any right or
|
169 |
+
authority to forbid You from making technical modifications
|
170 |
+
necessary to exercise the Licensed Rights, including
|
171 |
+
technical modifications necessary to circumvent Effective
|
172 |
+
Technological Measures. For purposes of this Public License,
|
173 |
+
simply making modifications authorized by this Section 2(a)
|
174 |
+
(4) never produces Adapted Material.
|
175 |
+
|
176 |
+
5. Downstream recipients.
|
177 |
+
|
178 |
+
a. Offer from the Licensor -- Licensed Material. Every
|
179 |
+
recipient of the Licensed Material automatically
|
180 |
+
receives an offer from the Licensor to exercise the
|
181 |
+
Licensed Rights under the terms and conditions of this
|
182 |
+
Public License.
|
183 |
+
|
184 |
+
b. No downstream restrictions. You may not offer or impose
|
185 |
+
any additional or different terms or conditions on, or
|
186 |
+
apply any Effective Technological Measures to, the
|
187 |
+
Licensed Material if doing so restricts exercise of the
|
188 |
+
Licensed Rights by any recipient of the Licensed
|
189 |
+
Material.
|
190 |
+
|
191 |
+
6. No endorsement. Nothing in this Public License constitutes or
|
192 |
+
may be construed as permission to assert or imply that You
|
193 |
+
are, or that Your use of the Licensed Material is, connected
|
194 |
+
with, or sponsored, endorsed, or granted official status by,
|
195 |
+
the Licensor or others designated to receive attribution as
|
196 |
+
provided in Section 3(a)(1)(A)(i).
|
197 |
+
|
198 |
+
b. Other rights.
|
199 |
+
|
200 |
+
1. Moral rights, such as the right of integrity, are not
|
201 |
+
licensed under this Public License, nor are publicity,
|
202 |
+
privacy, and/or other similar personality rights; however, to
|
203 |
+
the extent possible, the Licensor waives and/or agrees not to
|
204 |
+
assert any such rights held by the Licensor to the limited
|
205 |
+
extent necessary to allow You to exercise the Licensed
|
206 |
+
Rights, but not otherwise.
|
207 |
+
|
208 |
+
2. Patent and trademark rights are not licensed under this
|
209 |
+
Public License.
|
210 |
+
|
211 |
+
3. To the extent possible, the Licensor waives any right to
|
212 |
+
collect royalties from You for the exercise of the Licensed
|
213 |
+
Rights, whether directly or through a collecting society
|
214 |
+
under any voluntary or waivable statutory or compulsory
|
215 |
+
licensing scheme. In all other cases the Licensor expressly
|
216 |
+
reserves any right to collect such royalties, including when
|
217 |
+
the Licensed Material is used other than for NonCommercial
|
218 |
+
purposes.
|
219 |
+
|
220 |
+
Section 3 -- License Conditions.
|
221 |
+
|
222 |
+
Your exercise of the Licensed Rights is expressly made subject to the
|
223 |
+
following conditions.
|
224 |
+
|
225 |
+
a. Attribution.
|
226 |
+
|
227 |
+
1. If You Share the Licensed Material (including in modified
|
228 |
+
form), You must:
|
229 |
+
|
230 |
+
a. retain the following if it is supplied by the Licensor
|
231 |
+
with the Licensed Material:
|
232 |
+
|
233 |
+
i. identification of the creator(s) of the Licensed
|
234 |
+
Material and any others designated to receive
|
235 |
+
attribution, in any reasonable manner requested by
|
236 |
+
the Licensor (including by pseudonym if
|
237 |
+
designated);
|
238 |
+
|
239 |
+
ii. a copyright notice;
|
240 |
+
|
241 |
+
iii. a notice that refers to this Public License;
|
242 |
+
|
243 |
+
iv. a notice that refers to the disclaimer of
|
244 |
+
warranties;
|
245 |
+
|
246 |
+
v. a URI or hyperlink to the Licensed Material to the
|
247 |
+
extent reasonably practicable;
|
248 |
+
|
249 |
+
b. indicate if You modified the Licensed Material and
|
250 |
+
retain an indication of any previous modifications; and
|
251 |
+
|
252 |
+
c. indicate the Licensed Material is licensed under this
|
253 |
+
Public License, and include the text of, or the URI or
|
254 |
+
hyperlink to, this Public License.
|
255 |
+
|
256 |
+
2. You may satisfy the conditions in Section 3(a)(1) in any
|
257 |
+
reasonable manner based on the medium, means, and context in
|
258 |
+
which You Share the Licensed Material. For example, it may be
|
259 |
+
reasonable to satisfy the conditions by providing a URI or
|
260 |
+
hyperlink to a resource that includes the required
|
261 |
+
information.
|
262 |
+
|
263 |
+
3. If requested by the Licensor, You must remove any of the
|
264 |
+
information required by Section 3(a)(1)(A) to the extent
|
265 |
+
reasonably practicable.
|
266 |
+
|
267 |
+
4. If You Share Adapted Material You produce, the Adapter's
|
268 |
+
License You apply must not prevent recipients of the Adapted
|
269 |
+
Material from complying with this Public License.
|
270 |
+
|
271 |
+
Section 4 -- Sui Generis Database Rights.
|
272 |
+
|
273 |
+
Where the Licensed Rights include Sui Generis Database Rights that
|
274 |
+
apply to Your use of the Licensed Material:
|
275 |
+
|
276 |
+
a. for the avoidance of doubt, Section 2(a)(1) grants You the right
|
277 |
+
to extract, reuse, reproduce, and Share all or a substantial
|
278 |
+
portion of the contents of the database for NonCommercial purposes
|
279 |
+
only;
|
280 |
+
|
281 |
+
b. if You include all or a substantial portion of the database
|
282 |
+
contents in a database in which You have Sui Generis Database
|
283 |
+
Rights, then the database in which You have Sui Generis Database
|
284 |
+
Rights (but not its individual contents) is Adapted Material; and
|
285 |
+
|
286 |
+
c. You must comply with the conditions in Section 3(a) if You Share
|
287 |
+
all or a substantial portion of the contents of the database.
|
288 |
+
|
289 |
+
For the avoidance of doubt, this Section 4 supplements and does not
|
290 |
+
replace Your obligations under this Public License where the Licensed
|
291 |
+
Rights include other Copyright and Similar Rights.
|
292 |
+
|
293 |
+
Section 5 -- Disclaimer of Warranties and Limitation of Liability.
|
294 |
+
|
295 |
+
a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
|
296 |
+
EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
|
297 |
+
AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
|
298 |
+
ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
|
299 |
+
IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
|
300 |
+
WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
301 |
+
PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
|
302 |
+
ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
|
303 |
+
KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
|
304 |
+
ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
|
305 |
+
|
306 |
+
b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
|
307 |
+
TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
|
308 |
+
NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
|
309 |
+
INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
|
310 |
+
COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
|
311 |
+
USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
|
312 |
+
ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
|
313 |
+
DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
|
314 |
+
IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
|
315 |
+
|
316 |
+
c. The disclaimer of warranties and limitation of liability provided
|
317 |
+
above shall be interpreted in a manner that, to the extent
|
318 |
+
possible, most closely approximates an absolute disclaimer and
|
319 |
+
waiver of all liability.
|
320 |
+
|
321 |
+
Section 6 -- Term and Termination.
|
322 |
+
|
323 |
+
a. This Public License applies for the term of the Copyright and
|
324 |
+
Similar Rights licensed here. However, if You fail to comply with
|
325 |
+
this Public License, then Your rights under this Public License
|
326 |
+
terminate automatically.
|
327 |
+
|
328 |
+
b. Where Your right to use the Licensed Material has terminated under
|
329 |
+
Section 6(a), it reinstates:
|
330 |
+
|
331 |
+
1. automatically as of the date the violation is cured, provided
|
332 |
+
it is cured within 30 days of Your discovery of the
|
333 |
+
violation; or
|
334 |
+
|
335 |
+
2. upon express reinstatement by the Licensor.
|
336 |
+
|
337 |
+
For the avoidance of doubt, this Section 6(b) does not affect any
|
338 |
+
right the Licensor may have to seek remedies for Your violations
|
339 |
+
of this Public License.
|
340 |
+
|
341 |
+
c. For the avoidance of doubt, the Licensor may also offer the
|
342 |
+
Licensed Material under separate terms or conditions or stop
|
343 |
+
distributing the Licensed Material at any time; however, doing so
|
344 |
+
will not terminate this Public License.
|
345 |
+
|
346 |
+
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
|
347 |
+
License.
|
348 |
+
|
349 |
+
Section 7 -- Other Terms and Conditions.
|
350 |
+
|
351 |
+
a. The Licensor shall not be bound by any additional or different
|
352 |
+
terms or conditions communicated by You unless expressly agreed.
|
353 |
+
|
354 |
+
b. Any arrangements, understandings, or agreements regarding the
|
355 |
+
Licensed Material not stated herein are separate from and
|
356 |
+
independent of the terms and conditions of this Public License.
|
357 |
+
|
358 |
+
Section 8 -- Interpretation.
|
359 |
+
|
360 |
+
a. For the avoidance of doubt, this Public License does not, and
|
361 |
+
shall not be interpreted to, reduce, limit, restrict, or impose
|
362 |
+
conditions on any use of the Licensed Material that could lawfully
|
363 |
+
be made without permission under this Public License.
|
364 |
+
|
365 |
+
b. To the extent possible, if any provision of this Public License is
|
366 |
+
deemed unenforceable, it shall be automatically reformed to the
|
367 |
+
minimum extent necessary to make it enforceable. If the provision
|
368 |
+
cannot be reformed, it shall be severed from this Public License
|
369 |
+
without affecting the enforceability of the remaining terms and
|
370 |
+
conditions.
|
371 |
+
|
372 |
+
c. No term or condition of this Public License will be waived and no
|
373 |
+
failure to comply consented to unless expressly agreed to by the
|
374 |
+
Licensor.
|
375 |
+
|
376 |
+
d. Nothing in this Public License constitutes or may be interpreted
|
377 |
+
as a limitation upon, or waiver of, any privileges and immunities
|
378 |
+
that apply to the Licensor or You, including from the legal
|
379 |
+
processes of any jurisdiction or authority.
|
380 |
+
|
381 |
+
=======================================================================
|
382 |
+
|
383 |
+
Creative Commons is not a party to its public
|
384 |
+
licenses. Notwithstanding, Creative Commons may elect to apply one of
|
385 |
+
its public licenses to material it publishes and in those instances
|
386 |
+
will be considered the “Licensor.” The text of the Creative Commons
|
387 |
+
public licenses is dedicated to the public domain under the CC0 Public
|
388 |
+
Domain Dedication. Except for the limited purpose of indicating that
|
389 |
+
material is shared under a Creative Commons public license or as
|
390 |
+
otherwise permitted by the Creative Commons policies published at
|
391 |
+
creativecommons.org/policies, Creative Commons does not authorize the
|
392 |
+
use of the trademark "Creative Commons" or any other trademark or logo
|
393 |
+
of Creative Commons without its prior written consent including,
|
394 |
+
without limitation, in connection with any unauthorized modifications
|
395 |
+
to any of its public licenses or any other arrangements,
|
396 |
+
understandings, or agreements concerning use of licensed material. For
|
397 |
+
the avoidance of doubt, this paragraph does not form part of the
|
398 |
+
public licenses.
|
399 |
+
|
400 |
+
Creative Commons may be contacted at creativecommons.org.
|
data/dataset_metadata.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
data/participants_metadata.csv
ADDED
@@ -0,0 +1,289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# All names are fake. They are not related to participants. They cannot be used to infer gender.
|
2 |
+
date,session_id,name,underlayer_size,xsens_suit_size,height_cm,foot_length_cm,shoulder_height_cm,shoulder_width_cm,elbow_span_cm,wrist_span_cm,arm_span_cm,hip_height_cm,hip_widtd_cm,knee_height_cm,ankle_height_cm,extra_shoe_thickness_cm,gender,ethnicity,bmi,weight_kg,weight_bucket,age_group,note
|
3 |
+
20230601,s0,paul_weaver,S,M,160,26,129,39,78,128,161,88,32,42,10,5,Female,caucasian,18.8,48,45-50,36-40,
|
4 |
+
20230601,s1,jennifer_kennedy,L,L,158,27,130,38,74,124,163,85,35,42,9,4,Female,south asian,23.6,59,50-60,41-45,
|
5 |
+
20230602,s0,gregory_herman,M,L,154,26,126,38,73,125,157,86,33,48,10,5,Female,south asian,23.6,56,60-70,41-45,
|
6 |
+
20230602,s1,timothy_english,L,XL,165,26,137,38,81,129,163,91,29,48,10,4,Female,caucasian,22.4,61,60-70,36-40,
|
7 |
+
20230605,s0,amber_cruz,XL,XXL,196,31,165,43,94,156,198,111,29,61,10,5,Male,caucasian,23.7,91,90-100,41-45,
|
8 |
+
20230605,s1,kayla_dillon,XL,XXL,168,26,140,33,84,130,171,88,33,49,10,3,Male,african american,28,79,70-80,36-40,
|
9 |
+
20230606,s0,marcus_jordan,L,XL,181,31,148,44,84,137,178,103,34,56,11,3,Male,caucasian,25,82,80-90,18-24,
|
10 |
+
20230606,s1,taylor_walton,M,L,170,29,139,34,88,138,178,93,34,53,11,4,Male,east asian,24.9,72,70-80,36-40,
|
11 |
+
20230607,s0,james_johnson,S,M,158,25,129,34,69,106,152,87,31,45,9,1,Female,east asian,20.1,49,45-50,25-30,
|
12 |
+
20230607,s1,barbara_wheeler,XL,XXL,195,34,158,36,102,154,194,100,36,56,10,3,Male,african american,25.2,96,90-100,18-24,
|
13 |
+
20230608,s0,shelby_arroyo,S,XL,153,24,129,34,74,121,147,89,32,43,9,1,Female,caucasian,19.2,45,45-50,46-50,
|
14 |
+
20230608,s1,donna_gonzales,M,L,166,25,139,39,84,134,170,101,34,46,13,3,Female,south asian,18.5,49,45-50,18-24,
|
15 |
+
20230609,s0,angela_harrell,L,XL,162,28,137,41,84,132,169,95,33,53,15,4,Female,african american,21,55,50-60,31-35,
|
16 |
+
20230609,s1,heather_becker,L,XL,178,30,148,38,90,144,184,94,33,55,10,3,Male,south asian,24,76,70-80,25-30,
|
17 |
+
20230612,s0,jason_smith,L,XL,186,31,159,34,86,139,179,106,30,59,10,3,Male,caucasian,26.1,81,80-90,31-35,
|
18 |
+
20230612,s1,christina_jones,L,XXL,169,27,142,35,85,135,175,95,34,54,11,5,Male,south asian,27.7,79,70-80,46-50,
|
19 |
+
20230613,s0,virginia_rivera,L,XL,166,27,138,43,91,144,183,93,36,49,12,1,Female,african american,22.5,62,60-70,46-50,
|
20 |
+
20230613,s1,kristopher_mata,M,L,165,30,135,39,83,128,163,90,30,50,10,3,Male,caucasian,24.2,66,60-70,18-24,skipped
|
21 |
+
20230614,s0,elizabeth_sandoval,XL,XXL,181,29,148,37,93,137,183,100,35,56,10,5,Male,caucasian,22.9,75,70-80,18-24,
|
22 |
+
20230614,s1,matthew_harper,M,L,165,30,135,39,83,128,163,90,30,50,10,3,Male,caucasian,24.2,66,60-70,18-24,remap 0613/s0
|
23 |
+
20230615,s0,dawn_heath,L,XL,169,30,140,46,93,136,173,94,33,46,10,4,Male,hispanic,28.4,81,80-90,25-30,
|
24 |
+
20230615,s1,vincent_bell,L,XL,167,29,139,49,84,141,181,95,33,50,11,5,Male,african american,29.4,82,80-90,46-50,
|
25 |
+
20230616,s0,kristen_thomas,M,L,175,29,142,46,88,144,182,97,32,52,10,4,Male,caucasian,27.8,85,80-90,31-35,
|
26 |
+
20230616,s1,michael_griffin,XL,XXL,167,28,141,44,82,137,174,92,36,49,9,1,Female,african american,34.8,97,95-100,31-35,
|
27 |
+
20230619,s0,jeremiah_odonnell,S,M,156,27,126,36,72,121,153,90,35,45,9,5,Female,south asian,21.4,52,50-60,18-24,
|
28 |
+
20230619,s1,eric_martin,S,M,163,25,133,37,80,129,163,88,31,51,13,1,Female,east asian,18.4,49,45-50,31-35,
|
29 |
+
20230620,s0,marie_vasquez,M,L,178,29,145,46,86,137,173,92,33,50,11,3,Male,african american,18.6,59,50-60,36-40,
|
30 |
+
20230620,s1,alison_carter,M,L,174,29,146,44,77,136,170,98,30,52,11,4,Male,south asian,22.5,68,60-70,36-40,
|
31 |
+
20230621,s0,sergio_howard,L,XL,185,29,152,45,87,143,185,104,36,56,10,3,Male,caucasian,24.8,85,80-90,36-40,
|
32 |
+
20230621,s1,kevin_davis,S,M,171,27,141,44,87,136,171,95,35,49,10,4,Female,east asian,21.5,63,60-70,25-30,
|
33 |
+
20230622,s0,john_solomon,M,L,170,30,141,41,89,140,175,89,33,49,9,4,Male,south asian,22.1,64,60-70,31-35,
|
34 |
+
20230622,s1,sherri_scott,S,M,161,27,136,37,93,128,163,93,37,47,12,5,Female,african american,27.4,71,70-80,31-35,
|
35 |
+
20230623,s0,erin_mccormick,XL,4XL,189,30,159,47,95,151,192,112,36,57,11,5,Male,south asian,23.8,85,80-90,36-40,
|
36 |
+
20230623,s1,joshua_thompson,M,L,172,30,147,44,82,140,176,101,31,52,12,4,Male,south asian,24.3,72,70-80,36-40,
|
37 |
+
20230626,s0,gilbert_simmons,M,L,186,29,156,39,87,149,187,104,34,54,10,5,Female,other/mixed,24,83,80-90,31-35,
|
38 |
+
20230626,s1,albert_chapman,S,M,173,27,147,36,84,140,176,100,29,53,11,3,Female,african american,21.4,64,60-70,25-30,
|
39 |
+
20230627,s0,kenneth_hicks,S,M,159,28,132,35,74,121,152,85,30,49,10,5,Female,east asian,21,53,50-60,18-24,
|
40 |
+
20230627,s1,robert_reynolds,M,L,165,25,134,40,81,124,162,96,33,45,7,3,Female,other/mixed,26.4,72,70-80,18-24,
|
41 |
+
20230628,s0,adriana_gonzalez,L,XL,177,25,143,36,92,143,180,102,33,56,7,4,Male,caucasian,25.9,81,80-90,25-30,
|
42 |
+
20230628,s1,hayley_little,L,XL,172,30,144,44,85,136,170,96,33,49,8,2,Male,caucasian,28.4,84,80-90,31-35,
|
43 |
+
20230629,s0,nancy_mahoney,S,M,155,24,129,36,74,117,151,88,28,42,8,1,Female,south asian,22.1,53,50-60,31-35,
|
44 |
+
20230629,s1,laurie_webb,L,XL,178,32,147,40,84,143,194,98,33,55,11,4,Male,east asian,23.7,75,70-80,36-40,
|
45 |
+
20230630,s0,gloria_carr,L,XL,180,31,148,33,79,142,181,94,33,55,11,3,Male,east asian,26.5,86,80-90,41-45,
|
46 |
+
20230630,s1,linda_coleman,M,L,155,27,128,42,77,123,157,88,38,43,9,6,Female,south asian,30.8,74,70-80,25-30,
|
47 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
48 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
49 |
+
20230703,s0,tommy_ayala,L,XL,160,26,135,42,77,130,166,94,39,44,10,3,Female,south asian,28.5,73,70-80,18-24,
|
50 |
+
20230703,s1,joshua_burke,S,M,158,25,137,37,80,134,169,94,36,48,9,3,Female,african american,19.1,64,60-70,31-35,
|
51 |
+
20230705,s0,william_davis,L,XL,165,30,136,39,85,136,170,95,33,44,8,4,Male,east asian,26.8,73,70-80,31-35,
|
52 |
+
20230705,s1,april_harris,L,XL,190,31,157,32,89,149,187,93,34,56,11,4,Male,south asian,26,94,90-100,18-24,
|
53 |
+
20230706,s0,breanna_reese,L,XL,188,31,149,32,89,148,186,107,32,58,11,4,Male,south asian,20.7,73,70-80,25-30,
|
54 |
+
20230706,s1,morgan_terrell,S,M,154,25,130,36,77,123,150,88,34,43,8,3,Female,caucasian,23.2,55,50-60,36-40,
|
55 |
+
20230707,s0,anthony_perez,XXL,4XL,155,29,136,43,82,128,165,89,44,41,7,3,Female,caucasian,37.9,91,90-100,36-40,
|
56 |
+
20230707,s1,elizabeth_tucker,L,XL,182,31,155,39,92,148,188,102,30,51,10,4,Male,caucasian,23.2,77,70-80,25-30,
|
57 |
+
20230710,s0,barbara_norman,XL,XXL,185,31,156,45,91,149,188,104,33,52,11,4,Male,east asian,28,96,90-100,46-50,
|
58 |
+
20230710,s1,jennifer_torres,S,M,176,30,145,39,80,140,178,98,27,51,11,4,Female,caucasian,20.7,64,60-70,25-30,
|
59 |
+
20230711,s0,frederick_young,M,M,153,26,126,37,72,120,153,89,27,44,9,3,Female,african american,28.2,66,60-70,31-35,
|
60 |
+
20230711,s1,maria_duran,M,L,183,30,152,33,94,149,188,98,33,55,10,3,Male,caucasian,20.3,68,60-70,31-35,
|
61 |
+
20230712,s0,laura_wilson,M,M,170,28,138,30,71,129,166,89,30,45,9,3,Female,caucasian,22.5,65,60-70,25-30,
|
62 |
+
20230712,s1,stacy_cook,S,M,170,31,139,33,92,142,182,98,32,49,9,3,Male,south asian,23.5,68,60-70,36-40,
|
63 |
+
20230713,s0,brian_wheeler,M,M,175,28,147,30,84,146,182,106,29,45,9,2,Female,african american,20.9,64,60-70,25-30,
|
64 |
+
20230713,s1,amy_crawford,L,XL,176,31,149,35,86,141,179,98,33,49,11,4,Female,caucasian,24.5,76,70-80,36-40,
|
65 |
+
20230714,s0,christopher_rush,S,M,151,26,125,34,76,128,162,90,37,40,7,2,Female,south asian,25.4,58,50-60,25-30,
|
66 |
+
20230714,s1,jacob_webb,M,L,165,30,132,36,85,135,170,89,31,44,7,1,Male,southeast asian,23.5,64,60-70,25-30,
|
67 |
+
20230717,s0,danielle_ortiz,M,L,174,30,143,35,80,141,178,95,33,48,11,3,Male,southeast asian,21.8,66,60-70,25-30,
|
68 |
+
20230717,s1,janice_lopez,M,L,179,30,144,35,94,150,190,94,31,51,10,3,Male,caucasian,21.2,68,60-70,31-35,
|
69 |
+
20230718,s0,timothy_velasquez,M,L,180,26,148,35,89,144,181,97,33,49,8,1,Male,caucasian,20.4,66,60-70,25-30,
|
70 |
+
20230718,s1,john_williams,S,M,153,25,127,29,77,124,160,91,34,42,9,2,Female,caucasian,28.6,67,60-70,36-40,
|
71 |
+
20230719,s0,shane_smith,S,M,163,26,135,29,79,125,158,91,29,43,11,4,Female,east asian,18.1,48,40-50,25-30,
|
72 |
+
20230719,s1,patrick_diaz,L,L,154,26,124,32,73,118,151,87,34,40,10,3,Female,southeast asian,24.5,58,50-60,46-50,
|
73 |
+
20230720,s0,robert_ballard,L,L,180,29,147,36,88,142,181,102,32,50,9,3,Male,other/mixed,21,68,60-70,31-35,
|
74 |
+
20230721,s0,mitchell_mcdonald,XL,XXL,199,35,159,36,97,153,199,103,29,55,11,4,Male,caucasian,18.5,65,60-70,25-30,
|
75 |
+
20230721,s1,david_ramirez,S,M,164,29,135,34,85,133,173,89,31,47,9,1,Male,south asian,21.6,58,50-60,25-30,
|
76 |
+
20230724,s0,tyler_ayers,S,M,163,28,137,29,79,129,162,91,31,45,11,3,Female,caucasian,25.2,67,60-70,41-45,
|
77 |
+
20230724,s1,justin_heath,S,M,164,27,136,29,79,132,164,91,30,43,8,3,Female,east asian,18.2,49,40-50,36-40,
|
78 |
+
20230725,s0,stephanie_moses,S,M,164,25,134,29,86,135,165,93,29,41,9,3,Female,caucasian,21.2,57,50-60,36-40,
|
79 |
+
20230725,s1,julie_taylor,S,M,161,25,132,26,76,127,165,89,28,45,9,1,Female,african american,18.5,48,40-50,36-40,
|
80 |
+
20230726,s0,mark_richardson,S,M,155,24,129,27,73,123,157,91,27,40,8,1,Female,east asian,18.3,44,40-50,36-40,
|
81 |
+
20230726,s1,thomas_nixon,M,L,168,26,140,31,79,129,167,98,31,43,11,3,Female,caucasian,20.9,59,50-60,36-40,
|
82 |
+
20230727,s0,joanne_white,M,L,166,26,139,30,81,131,167,89,29,43,10,3,Female,other/mixed,19.2,53,50-60,41-45,
|
83 |
+
20230727,s1,jeremy_gill,S,M,156,24,129,30,76,118,156,86,30,40,9,2,Female,east asian,19.7,48,40-50,41-45,
|
84 |
+
20230728,s0,lauren_mayer,S,M,158,24,131,27,82,127,165,94,27,44,9,3,Female,southeast asian,19.2,48,40-50,18-24,
|
85 |
+
20230728,s1,bradley_herman,S,M,160,24,132,29,78,130,158,92,30,43,9,3,Female,east asian,18.8,48,40-50,18-24,
|
86 |
+
20230731,s0,tammy_campos,S,M,165,27,136,32,78,130,166,95,30,45,8,3,Male,caucasian,18,49,40-50,46-50,
|
87 |
+
20230731,s1,angela_mclean,S,M,158,25,132,29,72,121,156,89,29,40,10,3,Female,south asian,18.4,46,40-50,46-50,
|
88 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
89 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
90 |
+
20230801,s0,aimee_davis,S,M,155,26,127,28,75,119,155,89,30,41,10,2,Female,caucasian,22.5,54,50-60,46-50,
|
91 |
+
20230801,s1,alexis_hernandez,S,M,165,26,138,30,80,127,163,92,29,44,10,3,Female,other/mixed,21.3,58,50-60,18-24,
|
92 |
+
20230802,s0,kelly_golden,M,L,178,30,147,43,80,141,187,99,32,57,10,3,Male,south asian,20.2,64,60-70,18-24,
|
93 |
+
20230802,s1,scott_mcbride,L,XL,167,29,140,34,86,139,179,95,34,47,10,3,Male,south asian,25.8,72,70-80,46-50,
|
94 |
+
20230803,s0,robert_howard,M,L,173,30,145,30,80,133,171,98,30,47,12,4,Female,caucasian,18.7,56,50-60,31-35,
|
95 |
+
20230803,s1,jennifer_sexton,S,M,155,25,126,30,79,120,157,85,28,39,8,1,Female,southeast asian,20.4,49,45-50,18-24,
|
96 |
+
20230807,s0,dominique_frye,XL,XXL,183,30,154,49,90,150,187,100,33,49,10,3,Male,hispanic,24.5,82,80-90,18-24,
|
97 |
+
20230807,s1,vanessa_chavez,S,M,158,27,128,33,73,127,163,86,30,45,9,3,Female,east asian,19.6,49,45-50,25-30,
|
98 |
+
20230808,s0,timothy_taylor,S,M,154,26,131,31,74,120,154,94,31,42,11,4,Female,south asian,18.6,44,45-50,18-24,
|
99 |
+
20230808,s1,erin_jones,S,M,166,27,142,32,85,142,182,103,28,49,12,3,Female,south asian,17.1,47,45-50,18-24,
|
100 |
+
20230809,s1,laura_smith,S,M,158,25,130,27,72,108,147,95,30,45,10,3,Female,caucasian,22,55,50-60,36-40,
|
101 |
+
20230811,s1,paul_phillips,L,XXL,173,31,148,38,84,132,175,93,37,45,9,3,Male,caucasian,32.4,97,90-100,25-30,
|
102 |
+
20230814,s0,leah_gaines,XL,XXL,190,33,165,40,96,153,196,104,35,54,12,3,Male,other/mixed,26.87,99,90-100,36-40,
|
103 |
+
20230814,s1,david_hall,XL,XXL,181,30,153,40,91,142,182,94,35,50,11,4,Male,caucasian,30.22,99,90-100,46-50,
|
104 |
+
20230815,s0,samantha_lester,S,M,167,27,139,27,84,130,165,91,28,41,8,1,Female,south asian,19.4,54,50-60,18-24,
|
105 |
+
20230815,s1,lisa_colon,S,M,165,25,141,27,77,119,162,96,27,43,8,2,Female,east asian,20.8,56.7,50-60,36-40,
|
106 |
+
20230816,s0,mary_ortiz,XL,XXL,181,30,155,41,92,141,181,104,34,51,10,3,Male,caucasian,29.91,98,90-100,46-50,
|
107 |
+
20230816,s1,jeffery_bryant,L,XL,182,29,161,30,90,144,182,102,30,47,11,3,Male,caucasian,24.15,80,80-90,31-35,
|
108 |
+
20230817,s0,brittney_powell,L,XL,183,28,158,33,97,150,189,100,33,50,10,2,Female,caucasian,23.97,80.3,80-90,36-40,
|
109 |
+
20230817,s1,rebecca_ward,L,XL,182,30,157,37,90,147,189,106,35,52,9,3,Male,east asian,21.91,72.6,70-80,46-50,
|
110 |
+
20230818,s0,amy_padilla,L,XL,182,32,152,36,91,143,183,95,32,49,13,5,Male,caucasian,24.65,81.7,80-90,46-50,
|
111 |
+
20230818,s1,daniel_gregory,M,L,168,27,141,29,85,135,174,94,29,46,9,2,Female,caucasian,20.89,59,50-60,41-45,
|
112 |
+
20230821,s1,william_wilson,L,XL,170,28,143,36,77,127,165,98,36,47,10,4,Male,southeast asian,32.96,95.3,80-90,25-30,
|
113 |
+
20230822,s0,kyle_parker,S,M,160,27,133,29,83,127,164,96,27,46,10,3,Female,african american,19.14,49,45-50,18-24,
|
114 |
+
20230822,s1,joshua_rice,M,L,171,29,144,35,79,133,171,103,30,50,9,3,Male,southeast asian,20.18,59,50-60,18-24,
|
115 |
+
20230823,s0,evelyn_moody,L,XL,169,29,142,32,87,136,172,100,32,47,10,3,Female,caucasian,22.76,65,60-70,25-30,
|
116 |
+
20230823,s1,alison_riddle,XL,XXL,183,32,156,33,92,149,188,111,33,55,11,4,Male,caucasian,24.49,82,80-90,36-40,
|
117 |
+
20230824,s0,kim_wilkins,XL,XXL,177,27,150,32,87,144,185,101,37,48,11,2,Female,caucasian,30.12,94.4,90-100,46-50,
|
118 |
+
20230824,s1,jenna_farmer,XL,XXL,172,28,143,33,85,132,172,100,34,46,11,3,Female,other/mixed,31.43,93,90-100,18-24,
|
119 |
+
20230825,s0,carrie_robinson,XL,XXL,170,30,143,37,81,138,174,98,35,51,11,4,Male,caucasian,31.39,90.7,90-100,41-45,
|
120 |
+
20230825,s1,alejandra_reynolds,XL,XXL,186,33,161,35,85,147,188,104,33,49,11,4,Male,caucasian,24.25,83.9,80-90,31-35,
|
121 |
+
20230828,s0,kaylee_johnson,XL,XXL,192,32,164,37,96,155,194,113,33,50,10,3,Male,caucasian,19.69,72.6,80-90,18-24,
|
122 |
+
20230828,s1,omar_patterson,XL,XXL,184,31,157,34,89,148,188,106,34,54,11,4,Male,african american,28.14,95.3,90-100,18-24,
|
123 |
+
20230829,s0,ray_humphrey,XL,XXL,175,31,150,37,88,138,179,99,37,50,10,4,Male,east asian,32.22,99,90-100,18-24,
|
124 |
+
20230829,s1,angel_roberts,S,M,158,26,132,28,77,121,156,96,28,43,10,4,Female,east asian,21.8,54.4,50-60,25-30,
|
125 |
+
20230830,s0,jason_vaughn,L,L,153,24,126,28,76,116,152,94,28,40,9,2,Female,hispanic,31.1,73.0,60-70,46-50,
|
126 |
+
20230830,s1,christy_ramirez,S,M,154,24,127,28,76,125,161,90,29,42,10,2,Female,southeast asian,22.9,54.4,50-60,25-30,
|
127 |
+
20230831,s1,ronald_harris,L,XL,162,25,137,29,82,124,158,89,33,47,9,1,Female,caucasian,32,84,80-90,36-40,
|
128 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
129 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
130 |
+
20230901,s0,thomas_marks,M,M,164,25,133,31,77,128,161,88,33,45,9,3,Female,caucasian,26,70,70-80,36-40,
|
131 |
+
20230901,s1,hannah_brown,XL,XXL,172,27,147,34,88,139,173,91,32,50,12,4,Female,caucasian,33.1,98,90-100,46-50,
|
132 |
+
20230905,s1,elizabeth_morgan,M,L,163,28,134,30,87,132,167,98,30,46,11,3,Female,southeast asian,25.9,69,60-70,36-40,
|
133 |
+
20230906,s0,pam_nelson,L,XL,184,30,154,35,85,141,180,101,33,49,8,4,Male,east asian,20.9,71,70-80,36-40,
|
134 |
+
20230906,s1,ashley_owens,M,L,172,27,145,30,85,141,180,102,30,47,11,3,Female,african american,23.3,69,60-70,25-30,
|
135 |
+
20230907,s0,margaret_mccormick,M,L,170,30,140,33,82,138,174,104,30,51,10,4,Male,south asian,21.8,63,60-70,18-24,
|
136 |
+
20230907,s1,mary_cook,XL,XXL,196,33,170,34,96,157,201,115,30,58,8,2,Male,african american,20.5,79,70-80,36-40,
|
137 |
+
20230908,s0,joel_anderson,L,XL,178,30,147,33,86,139,179,102,31,50,10,4,Male,southeast asian,23.6,75,70-80,25-30,
|
138 |
+
20230908,s1,danielle_pierce,M,L,174,28,143,34,81,128,164,98,30,48,9,2,Male,south asian,22.7,69,60-70,31-35,
|
139 |
+
20230911,s0,angela_gomez,M,L,168,26,138,30,82,130,164,96,29,49,10,3,Female,south asian,20.9,59,50-60,31-35,
|
140 |
+
20230911,s1,ethan_jacobson,L,XL,175,30,146,33,88,148,182,92,32,47,8,4,Male,south asian,22.5,69,60-70,25-30,
|
141 |
+
20230912,s0,jeffrey_barnes,L,XL,181,30,150,34,90,140,180,100,31,51,9,3,Male,caucasian,23.1,76,70-80,31-35,
|
142 |
+
20230912,s1,peter_henderson,M,L,171,27,144,32,85,138,175,92,32,48,9,1,Female,caucasian,24.6,72,70-80,41-45,
|
143 |
+
20230913,s0,stacey_lamb,L,XL,183,30,149,38,89,146,185,98,35,48,9,1,Male,caucasian,24.8,83,80-90,41-45,
|
144 |
+
20230913,s1,austin_lopez,L,XL,166,28,139,31,77,123,161,102,37,42,11,3,Female,caucasian,32.3,89,80-90,36-40,
|
145 |
+
20230914,s0,tamara_gibbs,M,M,162,29,134,29,79,128,166,96,29,44,10,2,Female,east asian,22.5,59,50-60,18-24,
|
146 |
+
20230914,s1,stephanie_arnold,M,M,156,25,131,30,74,124,156,86,29,43,9,3,Female,east asian,22.6,55,50-60,36-40,
|
147 |
+
20230915,s0,richard_gallegos,L,XL,174,29,147,32,85,139,176,102,32,49,12,4,Female,caucasian,24.7,75,70-80,25-30,
|
148 |
+
20230915,s1,colleen_drake,L,XL,182,32,152,36,90,151,192,102,32,51,9,3,Male,african american,24.8,82,80-90,36-40,
|
149 |
+
20230918,s0,kevin_shaw,L,XL,176,31,145,35,89,133,177,102,30,51,10,3,Male,caucasian,21.9,68,60-70,36-40,
|
150 |
+
20230918,s1,bobby_griffith,L,XL,161,25,133,33,77,130,162,97,34,42,9,2,Female,caucasian,38.2,99,90-100,25-30,
|
151 |
+
20230919,s0,andrew_taylor,XL,XXL,159,26,128,32,77,122,157,92,31,41,9,2,Female,caucasian,35.2,89,80-90,36-40,
|
152 |
+
20230919,s1,jessica_webster,M,L,167,24,142,32,81,131,166,96,31,44,9,1,Female,caucasian,24.7,69,70-80,25-30,
|
153 |
+
20230920,s0,jeremy_lewis,M,L,166,28,138,33,82,129,166,95,31,43,8,3,Male,east asian,25,69,60-70,36-40,
|
154 |
+
20230920,s1,william_hansen,M,M,159,26,130,29,74,126,157,92,29,43,10,3,Female,east asian,21.8,55,50-60,25-30,
|
155 |
+
20230921,s0,justin_martin,M,L,162,26,134,30,73,122,158,94,30,48,11,4,Female,east asian,22.5,59,50-60,36-40,
|
156 |
+
20230921,s1,alec_meza,M,L,155,24,127,30,74,123,157,92,29,43,9,3,Female,east asian,24.6,59,50-60,25-30,
|
157 |
+
20230922,s0,philip_morales,M,L,165,26,139,33,82,134,167,94,29,44,9,2,Female,caucasian,24.6,67,60-70,31-35,
|
158 |
+
20230922,s1,steve_douglas,M,L,167,29,139,32,79,130,165,97,29,49,12,3,Female,caucasian,22.9,64,60-70,36-40,
|
159 |
+
20230925,s0,suzanne_romero,L,XL,188,31,155,33,90,149,189,104,33,51,9,3,Male,caucasian,23.5,83,80-90,18-24,
|
160 |
+
20230925,s1,xavier_norris,M,M,155,24,127,32,76,124,160,92,28,39,9,3,Female,southeast asian,26.6,64,60-70,41-45,
|
161 |
+
20230926,s0,tasha_lee,M,M,162,25,135,30,78,128,163,98,30,46,10,4,Female,caucasian,19.8,52,50-60,31-35,
|
162 |
+
20230926,s1,megan_mejia,L,L,180,32,147,35,91,143,183,101,30,54,10,3,Male,hispanic,23.1,75,70-80,18-24,
|
163 |
+
20230927,s0,zachary_price,XL,XL,185,32,155,30,88,146,186,109,32,51,10,4,Male,southeast asian,19.9,68,60-70,18-24,
|
164 |
+
20230927,s1,samantha_may,M,L,153,26,127,31,73,115,151,89,35,45,10,3,Female,caucasian,32.5,67,60-70,46-50,
|
165 |
+
20230928,s0,grace_randolph,L,L,175,27,146,32,90,142,180,107,31,51,7,2,Female,caucasian,18.6,57,50-60,31-35,
|
166 |
+
20230928,s1,barbara_sandoval,M,M,163,29,134,30,82,132,172,95,30,46,9,3,Male,african american,23.3,62,60-70,31-35,
|
167 |
+
20230929,s0,samuel_campos,L,L,177,28,149,30,82,127,170,101,30,49,12,4,Female,caucasian,18.8,59,50-60,25-30,
|
168 |
+
20230929,s1,alan_burns,L,XL,151,26,126,29,72,113,151,92,30,41,11,3,Female,hispanic,35.5,81,80-90,25-30,
|
169 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
170 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
171 |
+
20231002,s0,benjamin_bailey,L,L,170,29,137,34,88,138,178,93,34,53,11,4,Male,southeast asian,23.9,69,60-70,36-40,
|
172 |
+
20231002,s1,larry_thompson,L,L,163,25,133,28,77,125,158,94,26,44,9,3,Female,caucasian,22.2,59,50-60,25-30,
|
173 |
+
20231003,s0,thomas_tanner,L,L,165,28,136,32,83,135,172,93,32,47,7,2,Male,east asian,24.2,66,60-70,41-45,
|
174 |
+
20231003,s1,allen_evans,L,L,173,30,143,33,83,137,177,103,34,48,9,3,Male,east asian,24,72,70-80,41-45,
|
175 |
+
20231004,s0,corey_coleman,M,M,157,26,128,28,75,125,160,91,28,42,10,3,Female,south asian,24.7,61,60-70,25-30,
|
176 |
+
20231004,s1,ashley_reyes,L,L,169,30,140,33,89,139,181,98,33,49,11,3,Male,south asian,24.2,69,60-70,41-45,
|
177 |
+
20231005,s0,glenn_richardson,M,L,170,30,137,31,84,132,172,96,31,46,10,3,Male,southeast asian,24.2,70,60-70,25-30,
|
178 |
+
20231005,s1,sheri_white,M,M,159,25,132,31,76,127,162,94,29,45,9,3,Female,other/mixed,23.7,60,60-70,31-35,
|
179 |
+
20231006,s0,david_vega,L,XL,181,27,151,33,84,133,172,102,33,52,10,5,Female,caucasian,20.1,66,60-70,36-40,
|
180 |
+
20231006,s1,kirk_flowers,M,L,168,28,138,31,85,138,177,99,32,46,8,1,Male,hispanic,24.8,70,60-70,18-24,
|
181 |
+
20231009,s0,clayton_bradley,L,XL,178,28,148,32,92,140,180,104,32,50,10,3,Male,african american,20.1,64,60-70,25-30,
|
182 |
+
20231009,s1,patrick_hall,M,M,162,24,135,28,79,125,160,91,26,46,9,2,Female,east asian,18.8,49,45-50,25-30,
|
183 |
+
20231010,s0,albert_hammond,XL,XXL,175,28,148,32,87,142,180,109,31,53,12,3,Female,hispanic,26.7,82,80-90,25-30,
|
184 |
+
20231010,s1,cheryl_johnson,M,M,149,25,122,29,75,121,153,91,29,43,9,4,Female,south asian,24.7,55,50-60,31-35,
|
185 |
+
20231011,s0,devon_norris,L,XL,181,31,148,44,84,137,178,103,34,56,11,3,Male,caucasian,21.3,68,60-70,18-24,
|
186 |
+
20231011,s1,daniel_kim,M,XL,167,26,141,27,80,131,163,99,31,48,10,3,Female,caucasian,24.7,69,60-70,31-35,
|
187 |
+
20231012,s0,michael_lee,L,XL,183,30,153,49,90,150,187,100,33,49,10,3,Male,hispanic,23.5,79,79-80,18-24,
|
188 |
+
20231012,s1,johnathan_good,L,L,171,25,142,31,82,133,165,101,29,46,8,1,Female,caucasian,22.2,65,60-70,31-35,
|
189 |
+
20231013,s0,shelley_jones,L,L,158,25,131,32,81,128,163,91,31,42,9,3,Female,caucasian,24.8,62,60-70,36-40,
|
190 |
+
20231013,s1,lucas_flores,L,L,170,28,142,28,81,133,170,98,30,51,11,3,Female,caucasian,21.2,67,60-70,31-35,
|
191 |
+
20231016,s0,holly_keller,L,XL,181,30,146,33,81,143,181,103,33,46,10,1,Male,caucasian,24.7,81,80-90,18-24,
|
192 |
+
20231016,s1,diane_williams,L,XL,176,28,147,30,76,129,171,108,28,54,9,3,Female,other/mixed,19.1,59,50-60,31-35,
|
193 |
+
20231017,s0,emily_farmer,L,XL,183,31,150,35,95,152,195,110,34,56,12,3,Male,african american,24.4,82,80-90,18-24,
|
194 |
+
20231017,s1,shawn_smith,M,M,149,24,124,29,74,114,149,88,27,45,8,2,Female,caucasian,22.5,50,50-60,41-45,
|
195 |
+
20231018,s0,scott_hutchinson,L,L,174,30,143,35,80,141,178,95,33,48,11,3,Male,southeast asian,24.1,73,70-80,18-24,
|
196 |
+
20231018,s1,debra_melton,M,M,155,27,129,30,82,135,170,91,28,46,10,3,Male,african american,24.6,59,50-60,31-35,
|
197 |
+
20231019,s0,douglas_martin,M,L,175,28,144,33,90,137,180,102,30,47,8,3,Male,caucasian,22.2,68,60-70,41-45,
|
198 |
+
20231019,s1,brady_pearson,M,L,163,25,138,27,74,121,157,99,30,46,9,3,Female,caucasian,24.1,64,60-70,36-40,
|
199 |
+
20231020,s0,anthony_chen,M,M,163,27,132,30,79,127,161,95,29,49,11,4,Female,east asian,18.8,50,45-50,18-24,
|
200 |
+
20231020,s1,steven_jackson,L,L,172,27,145,35,85,135,171,102,28,47,11,3,Female,african american,23.3,69,60-70,25-30,
|
201 |
+
20231023,s0,christopher_green,M,M,149,20,124,33,72,119,149,93,29,42,8,4,Female,south asian,22.5,50,45-50,18-24,
|
202 |
+
20231023,s1,shannon_burns,XL,XXL,180,30,152,38,88,147,184,100,33,49,10,3,Male,caucasian,24.7,80,80-90,31-35,
|
203 |
+
20231024,s0,john_barr,M,M,159,25,131,30,73,118,155,94,26,44,9,3,Female,caucasian,22.9,58,50-60,31-35,
|
204 |
+
20231024,s1,william_mendoza,M,M,159,25,134,27,78,127,160,91,29,42,10,2,Female,caucasian,23.7,60,50-60,36-40,
|
205 |
+
20231025,s0,logan_santos,XL,XL,170,28,141,36,77,127,165,98,36,47,10,4,Male,southeast asian,32.9,95,90-100,31-35,
|
206 |
+
20231025,s1,alicia_drake,L,L,180,29,147,38,89,141,177,102,28,52,11,3,Male,african american,18.5,60,50-60,31-35,
|
207 |
+
20231026,s0,arthur_byrd,XL,XL,164,28,138,35,78,125,165,91,34,47,8,3,Male,southeast asian,32.7,88.2,80-90,46-50,
|
208 |
+
20231026,s1,amy_rosales,M,L,160,29,135,39,86,134,171,97,30,45,10,2,Male,african american,24.2,62,60-70,31-35,
|
209 |
+
20231027,s0,george_james,M,M,159,29,129,29,85,132,165,91,28,43,9,3,Male,southeast asian,19.8,50,45-50,18-24,
|
210 |
+
20231027,s1,stacie_cross,L,XL,159,29,134,33,82,132,168,96,32,48,9,3,Male,caucasian,24.5,62,60-70,41-45,
|
211 |
+
20231030,s0,angela_garcia,M,L,149,26,124,29,74,121,155,89,28,43,9,3,Female,south asian,20.7,46,45-50,18-24,
|
212 |
+
20231030,s1,allison_harris,L,XL,173,30,145,30,88,134,174,100,32,51,10,3,Male,east asian,21.3,64,60-70,41-45,
|
213 |
+
20231031,s0,jason_brown,L,L,165,27,136,32,80,129,166,9t5,30,50,11,4,Female,east asian,24.6,67,60-70,41-45,
|
214 |
+
20231031,s1,eric_dickerson,L,XL,172,29,140,38,86,138,177,103,31,47,8,3,Male,southeast asian,23.9,71,70-80,41-45,
|
215 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
216 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
217 |
+
20231101,s0,joshua_smith,L,L,156,24,131,36,80,124,158,95,30,46,8,2,Female,southeast asian,24.7,57,50-60,36-40,
|
218 |
+
20231101,s1,dean_krause,XL,XXL,172,30,143,35,86,134,173,99,32,50,10,2,Male,caucasian,24.7,73,70-80,41-45,
|
219 |
+
20231102,s0,samuel_rogers,L,XL,173,30,141,35,87,142,179,100,34,47,7,2,Male,southeast asian,23.4,70,70-80,46-50,
|
220 |
+
20231102,s1,trevor_riley,XL,XXL,183,32,153,40,90,145,188,110,32,53,8,3,Male,caucasian,24.8,83,80-90,18-24,
|
221 |
+
20231103,s0,allison_house,L,XL,169,27,138,29,78,128,164,99,30,47,10,3,Female,hispanic,24.5,70,70-80,36-40,
|
222 |
+
20231103,s1,erica_sims,M,L,169,30,143,34,94,137,176,96,31,48,10,3,Male,south asian,24.5,70,70-80,31-35,
|
223 |
+
20231106,s0,nancy_david,M,M,162,25,133,30,78,127,164,93,29,46,10,3,Female,east asian,22.5,59,50-60,18-24,
|
224 |
+
20231106,s1,amanda_rodgers,XL,XXL,176,31,145,32,74,134,174,101,31,50,9,3,Male,caucasian,24.8,77,70-80,46-50,
|
225 |
+
20231107,s0,amy_snow,M,M,157,23,127,36,76,118,153,88,30,44,9,3,Female,east asian,21.5,53,50-60,18-24,
|
226 |
+
20231107,s1,gregory_townsend,XL,XXL,179,31,148,45,89,140,177,99,30,48,9,3,Male,caucasian,24.7,79,80-90,36-40,
|
227 |
+
20231108,s0,nicholas_hicks,M,L,170,28,142,40,85,138,174,91,33,44,8,3,Male,hispanic,24.6,71,70-80,31-35,
|
228 |
+
20231108,s1,kurt_young,M,L,175,29,149,40,84,139,180,100,31,50,8,3,Male,african american,23.8,73,70-80,46-50,
|
229 |
+
20231109,s0,janet_walsh,L,XL,180,31,151,40,86,141,180,101,32,54,8,3,Male,caucasian,24,78,70-80,36-40,
|
230 |
+
20231110,s0,thomas_brown,M,XL,180,30,147,42,88,140,182,98,31,48,7,2,Male,caucasian,24,78,70-80,36-40,
|
231 |
+
20231110,s1,jerry_shaffer,M,L,170,27,140,40,82,135,174,96,30,48,7,2,Male,southeast asian,24.6,69,60-70,41-45,
|
232 |
+
20231113,s0,patricia_gutierrez,L,XXL,174,31,147,41,88,140,177,100,35,51,9,3,Male,caucasian,32.4,97,90-100,25-30,
|
233 |
+
20231113,s1,greg_clark,XL,XL,164,28,138,35,78,125,165,91,34,47,8,3,Male,southeast asian,32.7,88,80-90,46-50,
|
234 |
+
20231114,s0,autumn_garcia,L,XXL,174,31,147,41,88,140,177,100,35,51,9,3,Male,caucasian,32.4,97,90-100,25-30,
|
235 |
+
20231114,s1,logan_walton,XL,XL,164,28,138,35,78,125,165,91,34,47,8,3,Male,southeast asian,32.7,88,80-90,46-50,
|
236 |
+
20231115,s0,carly_patterson,XL,XXL,180,30,148,40,91,136,176,101,31,49,9,3,Male,caucasian,20.7,67,60-70,25-30,
|
237 |
+
20231115,s1,andrew_johnson,XL,XL,180,26,147,38,81,140,178,107,33,51,9,5,Female,caucasian,20.4,66,60-70,18-24,
|
238 |
+
20231116,s0,kimberly_williams,XL,XL,181,28,147,35,87,144,188,108,32,49,11,3,Male,african american,24.7,81,80-90,25-30,
|
239 |
+
20231116,s1,justin_ramirez,M,L,157,24,129,32,75,119,152,94,32,45,9,5,Female,southeast asian,23.5,58,50-60,46-50,
|
240 |
+
20231117,s0,randy_martin,M,L,158,26,133,30,77,124,157,97,31,45,9,3,Female,southeast asian,23.2,58,50-60,31-35,
|
241 |
+
20231117,s1,brooke_butler,L,XL,181,28,147,40,86,138,173,92,35,49,8,3,Male,caucasian,24.7,81,80-90,36-40,
|
242 |
+
20231120,s0,sarah_miller,L,L,169,25,140,34,85,136,174,102,32,48,10,3,Female,african american,24.2,69,60-70,46-50,
|
243 |
+
20231120,s1,anna_chambers,XL,XXL,191,28,154,40,93,153,193,103,31,51,10,3,Male,african american,18.6,68,60-70,31-35,
|
244 |
+
20231121,s0,alexandria_griffith,L,L,168,27,142,33,80,129,164,104,33,53,11,4,Female,caucasian,21.5,59,50-60,36-40,
|
245 |
+
20231121,s1,christopher_martinez,M,L,182,27,150,32,89,139,177,109,32,57,10,5,Female,south asian,19.3,64,60-70,18-24,
|
246 |
+
20231122,s0,frank_hayden,M,M,155,26,124,36,74,119,152,94,31,42,7,2,Female,east asian,24.6,59,50-60,25-30,
|
247 |
+
20231122,s1,harold_copeland,L,XL,181,31,148,44,84,137,178,103,34,56,11,3,Male,caucasian,21.3,68,60-70,18-24,
|
248 |
+
20231127,s0,steven_vang,M,M,167,27,139,27,84,130,165,90,27,41,8,1,Female,other/mixed,19.4,49,40-50,18-24,
|
249 |
+
20231127,s1,robyn_blackburn,M,L,169,29,137,42,85,131,165,93,32,46,8,3,Male,southeast asian,20.8,59.5,50-60,25-30,
|
250 |
+
20231128,s0,paul_nguyen,M,M,164,30,131,40,81,132,168,94,31,45,8,3,Male,southeast asian,21.6,58,50-60,18-24,
|
251 |
+
20231128,s1,michael_vargas,M,M,168,26,139,30,83,135,167,101,30,48,9,3,Female,south asian,20.9,59,50-60,36-40,
|
252 |
+
20231129,s0,brittney_goodwin,M,M,157,26,131,32,77,117,152,92,30,48,8,3,Female,east asian,19.9,49,40-50,25-30,
|
253 |
+
20231129,s1,dylan_jones,M,L,169,27,139,37,80,132,169,95,30,49,7,1,Male,african american,19.2,55,50-60,31-35,
|
254 |
+
20231130,s0,danielle_wu,XL,XXL,183,30,153,49,90,150,187,100,33,49,10,3,Male,hispanic,24.5,82,80-90,18-24,
|
255 |
+
20231130,s1,mary_barker,XL,XXL,186,29,153,38,92,151,190,109,32,51,8,2,Male,southeast asian,18.7,65,60-70,25-30,
|
256 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
257 |
+
,,,,,,,,,,,,,,,,,,,,,,,
|
258 |
+
20231201,s0,walter_park,M,L,167,27,135,36,78,129,164,93,28,47,8,3,Male,south asian,19.7,55,50-60,31-35,
|
259 |
+
20231201,s1,daniel_wiley,L,XL,166,25,137,35,88,133,169,95,33,47,10,2,Male,south asian,24.7,68,60-70,31-35,
|
260 |
+
20231204,s0,jacqueline_brewer,M,M,159,23,128,30,78,121,153,91,28,40,9,2,Female,east asian,19.2,49,45-50,25-30,
|
261 |
+
20231204,s1,sylvia_joseph,L,L,171,29,137,34,88,138,178,93,34,53,10,4,Male,southeast asian,24.3,71,70-80,36-40,
|
262 |
+
20231205,s0,sarah_roberts,L,L,174,28,142,37,86,144,182,100,33,51,8,2,Male,south asian,24.1,73,70-80,18-24,
|
263 |
+
20231205,s1,alexis_wyatt,M,M,159,24,131,31,76,125,158,92,29,45,9,3,Female,south asian,19.4,49,45-50,25-30,
|
264 |
+
20231206,s0,david_morgan,M,M,158,29,129,29,85,132,165,91,28,43,9,3,Male,southeast asian,19.6,49,45-50,18-24,
|
265 |
+
20231206,s1,virginia_perez,L,L,174,25,146,31,87,132,169,103,31,52,9,3,Female,caucasian,19.2,58,50-60,25-30,
|
266 |
+
20231207,s0,jodi_morrison,M,M,167,27,137,33,87,138,174,99,31,50,12,4,Female,african american,20,56,50-60,31-35,
|
267 |
+
20231207,s1,katie_white,M,L,163,24,133,32,81,127,166,94,31,48,10,3,Female,african american,20.3,54,50-60,25-30,
|
268 |
+
20231208,s0,ronald_guerra,L,L,164,27,136,35,75,130,165,97,34,47,10,4,Female,south asian,24.5,66,60-70,31-35,
|
269 |
+
20231208,s1,courtney_ross,L,L,167,30,138,34,80,130,166,98,33,49,10,3,Male,south asian,24.7,69,60-70,25-30,
|
270 |
+
20231211,s0,rebecca_brennan,M,M,159,25,134,29,76,125,161,101,27,51,9,4,Female,caucasian,19.4,49,40-50,18-24,
|
271 |
+
20231211,s1,seth_bowman,L,L,175,30,145,38,86,139,177,103,30,51,10,3,Male,caucasian,21.5,66,60-70,18-24,
|
272 |
+
20231212,s0,paul_arellano,L,L,176,29,144,40,83,142,182,103,31,49,7,2,Male,caucasian,19,59,50-60,25-30,
|
273 |
+
20231212,s1,justin_jackson,M,M,159,25,134,31,76,123,158,96,29,47,9,5,Female,caucasian,19,48,40-50,25-30,
|
274 |
+
20231213,s0,shawn_wright,L,L,174,29,143,32,83,139,175,103,32,47,10,4,Female,caucasian,18.8,57,50-60,25-30,
|
275 |
+
20231213,s1,brenda_jacobs,M,M,164,24,135,30,82,125,158,90,29,46,9,2,Female,hispanic,22.6,61,60-70,31-35,
|
276 |
+
20231214,s0,jeremy_allen,L,XL,174,32,145,37,87,145,184,102,30,55,10,5,Male,african american,23.1,70,70-80,18-24,
|
277 |
+
20231214,s1,douglas_hoffman,M,L,168,27,140,29,80,121,156,102,29,51,10,4,Female,caucasian,21.6,61,60-70,36-40,
|
278 |
+
20231215,s0,jeffrey_king,L,XL,186,29,148,40,94,150,190,104,34,51,10,3,Male,caucasian,21.4,74,70-80,19-25,
|
279 |
+
20231215,s1,dylan_lambert,M,M,159,25,132,35,79,128,162,95,31,46,10,5,Female,southeast asian,24.5,62,60-70,25-30,
|
280 |
+
20231218,s0,roger_mcguire,XL,L,182,30,150,40,90,142,183,104,33,51,10,3,Male,caucasian,22.3,74,70-80,18-24,
|
281 |
+
20231218,s1,kathryn_pruitt,L,XXL,188,30,158,41,98,153,194,117,34,52,10,2,Male,caucasian,20.9,74,70-80,18-24,
|
282 |
+
20231219,s0,randall_love,L,XL,180,28,149,34,86,144,183,112,34,54,10,5,Female,caucasian,23.1,75,70-80,31-35,
|
283 |
+
20231219,s1,erica_lee,L,L,172,27,145,30,85,141,180,102,30,47,11,3,Female,african american,23.3,71,70-80,25-30,
|
284 |
+
20231220,s0,victor_sloan,L,XL,185,30,150,43,96,148,190,104,34,50,8,3,Male,caucasian,24.2,83,80-90,18-24,
|
285 |
+
20231220,s1,amanda_wong,XL,XXL,191,30,154,43,91,146,187,106,34,51,9,3,Male,caucasian,24.6,90,90-100,31-35,
|
286 |
+
20231221,s0,sara_wilson,L,L,180,28,145,42,88,144,178,108,33,48,8,3,Male,east asian,24.6,80,70-80,25-30,
|
287 |
+
20231221,s1,edward_contreras,M,M,169,27,143,31,83,137,173,108,30,57,10,5,Female,south asian,24.9,71,70-80,25-30,
|
288 |
+
20231222,s0,denise_carter,M,M,163,25,135,35,81,132,167,95,28,45,9,3,Female,east asian,18.4,49,45-50,25-30,
|
289 |
+
20231222,s1,kenneth_fischer,M,L,172,27,139,37,85,139,173,94,30,47,8,3,Male,caucasian,19.6,58,50-60,31-35,
|
download.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
import sys
|
8 |
+
from pathlib import Path
|
9 |
+
|
10 |
+
import click
|
11 |
+
from loguru import logger
|
12 |
+
from nymeria.definitions import DataGroups
|
13 |
+
from nymeria.download_utils import DownloadManager
|
14 |
+
|
15 |
+
|
16 |
+
def get_groups(full: bool = False) -> list[DataGroups]:
|
17 |
+
"""
|
18 |
+
By default all data present in nymeria_download_urls.json will be downloaded.
|
19 |
+
For selective download, comment out lines to disable certain groups.
|
20 |
+
See nymeria/definitions.py GroupDefs for the files included by each group.
|
21 |
+
"""
|
22 |
+
return [
|
23 |
+
DataGroups.LICENSE,
|
24 |
+
DataGroups.metadata_json,
|
25 |
+
DataGroups.body,
|
26 |
+
DataGroups.recording_head,
|
27 |
+
DataGroups.recording_head_data_data_vrs,
|
28 |
+
DataGroups.recording_lwrist,
|
29 |
+
DataGroups.recording_rwrist,
|
30 |
+
DataGroups.recording_observer,
|
31 |
+
DataGroups.recording_observer_data_data_vrs,
|
32 |
+
DataGroups.narration_motion_narration_csv,
|
33 |
+
DataGroups.narration_atomic_action_csv,
|
34 |
+
DataGroups.narration_activity_summarization_csv,
|
35 |
+
DataGroups.semidense_observations,
|
36 |
+
]
|
37 |
+
|
38 |
+
|
39 |
+
@click.command()
|
40 |
+
@click.option(
|
41 |
+
"-i",
|
42 |
+
"url_json",
|
43 |
+
type=click.Path(file_okay=True, dir_okay=False, path_type=Path),
|
44 |
+
default=None,
|
45 |
+
required=True,
|
46 |
+
help="The json file contains download urls. Follow README.md instructions to access this file.",
|
47 |
+
)
|
48 |
+
@click.option(
|
49 |
+
"-o",
|
50 |
+
"rootdir",
|
51 |
+
type=click.Path(file_okay=False, dir_okay=True, writable=True, path_type=Path),
|
52 |
+
default=None,
|
53 |
+
help="The root directory to hold the downloaded dataset",
|
54 |
+
)
|
55 |
+
@click.option(
|
56 |
+
"-k",
|
57 |
+
"match_key",
|
58 |
+
default="2023",
|
59 |
+
help="Partial key used to filter sequences for downloading"
|
60 |
+
"Default key value = 2023, which include all available sequences",
|
61 |
+
)
|
62 |
+
def main(url_json: Path, rootdir: Path, match_key: str = "2023") -> None:
|
63 |
+
logger.remove()
|
64 |
+
logger.add(
|
65 |
+
sys.stdout,
|
66 |
+
colorize=True,
|
67 |
+
format="<level>{level: <7}</level> <blue>{name}.py:</blue><green>{function}</green><yellow>:{line}</yellow> {message}",
|
68 |
+
level="INFO",
|
69 |
+
)
|
70 |
+
|
71 |
+
dl = DownloadManager(url_json, out_rootdir=rootdir)
|
72 |
+
dl.download(match_key=match_key, selected_groups=get_groups(), ignore_existing=True)
|
73 |
+
|
74 |
+
|
75 |
+
if __name__ == "__main__":
|
76 |
+
main()
|
environment.yml
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: pymeria
|
2 |
+
channels:
|
3 |
+
- defaults
|
4 |
+
- conda-forge
|
5 |
+
dependencies:
|
6 |
+
- python=3.10
|
7 |
+
- numpy
|
8 |
+
- pillow
|
9 |
+
- click
|
10 |
+
- requests
|
11 |
+
- pymomentum
|
12 |
+
- tqdm
|
13 |
+
- pip
|
14 |
+
- pip:
|
15 |
+
- loguru
|
16 |
+
- projectaria-tools==1.5.5
|
17 |
+
- -e .
|
nymeria/__init__.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
nymeria/body_motion_provider.py
ADDED
@@ -0,0 +1,227 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
from pathlib import Path
|
8 |
+
|
9 |
+
import numpy as np
|
10 |
+
import pymomentum as pym
|
11 |
+
import torch
|
12 |
+
from loguru import logger
|
13 |
+
from nymeria.xsens_constants import XSensConstants
|
14 |
+
from projectaria_tools.core.sophus import SE3
|
15 |
+
from pymomentum.geometry import Character, Mesh
|
16 |
+
|
17 |
+
|
18 |
+
class BodyDataProvider:
|
19 |
+
_dt_norminal: float = 1.0e6 / 240.0
|
20 |
+
_dt_tolerance: int = 1000 # 1ms
|
21 |
+
_tcorrect_tolerance: int = 10_1000 # 10ms
|
22 |
+
|
23 |
+
# coordinates tranform between momentum and xsens
|
24 |
+
_A_Wx_Wm = torch.tensor([0.01, 0, 0, 0, 0, -0.01, 0, 0.01, 0]).reshape([3, 3])
|
25 |
+
|
26 |
+
def __init__(self, npzfile: str, glbfile: str) -> None:
|
27 |
+
if not Path(npzfile).is_file():
|
28 |
+
logger.error(f"{npzfile=} not found")
|
29 |
+
return
|
30 |
+
|
31 |
+
logger.info(f"loading xsens from {npzfile=}")
|
32 |
+
self.xsens_data: dict[str, np.ndarray] = dict(np.load(npzfile))
|
33 |
+
for k, v in self.xsens_data.items():
|
34 |
+
logger.info(f"{k=}, {v.shape=}")
|
35 |
+
|
36 |
+
self.__correct_timestamps()
|
37 |
+
self.__correct_quaternion()
|
38 |
+
|
39 |
+
# load glb if exist
|
40 |
+
self.character: Character = None
|
41 |
+
self.motion: np.ndarray = None
|
42 |
+
if Path(glbfile).is_file():
|
43 |
+
self.character, self.motion, _, fps = Character.load_gltf_with_motion(
|
44 |
+
glbfile
|
45 |
+
)
|
46 |
+
assert fps == self.xsens_data[XSensConstants.k_framerate]
|
47 |
+
assert self.motion.shape[0] == self.xsens_data[XSensConstants.k_frame_count]
|
48 |
+
assert self.character.has_mesh
|
49 |
+
|
50 |
+
@property
|
51 |
+
def momentum_template_mesh(self) -> Mesh | None:
|
52 |
+
if self.character is not None:
|
53 |
+
return self.character.mesh
|
54 |
+
else:
|
55 |
+
return None
|
56 |
+
|
57 |
+
def __correct_timestamps(self) -> None:
|
58 |
+
t_original = self.xsens_data[XSensConstants.k_timestamps_us]
|
59 |
+
dt_original = t_original[1:] - t_original[:-1]
|
60 |
+
invalid = np.abs(dt_original - self._dt_norminal) > self._dt_tolerance
|
61 |
+
num_invalid = invalid.sum()
|
62 |
+
percentage = num_invalid / t_original.size * 100.0
|
63 |
+
if num_invalid == 0:
|
64 |
+
return
|
65 |
+
logger.warning(f"number of invalid timestamps {num_invalid}, {percentage=}%")
|
66 |
+
dt_corrected = dt_original
|
67 |
+
dt_corrected[invalid] = int(self._dt_norminal)
|
68 |
+
dt_corrected = np.insert(dt_corrected, 0, 0)
|
69 |
+
t_corrected = t_original[0] + np.cumsum(dt_corrected)
|
70 |
+
|
71 |
+
t_diff = np.abs(t_corrected - t_original)
|
72 |
+
logger.info(f"after correct {t_diff[-1]= }us")
|
73 |
+
if t_diff[-1] > self._tcorrect_tolerance:
|
74 |
+
raise RuntimeError(f"corrected timestamps exceed tolerance {t_diff[-1]=}")
|
75 |
+
|
76 |
+
self.xsens_data[XSensConstants.k_timestamps_us] = t_corrected
|
77 |
+
|
78 |
+
def __correct_quaternion(self) -> None:
|
79 |
+
qWXYZ = self.xsens_data[XSensConstants.k_part_qWXYZ].reshape(
|
80 |
+
-1, XSensConstants.num_parts, 4
|
81 |
+
)
|
82 |
+
qn = np.linalg.norm(qWXYZ, axis=-1, keepdims=False)
|
83 |
+
invalid = qn < 0.1
|
84 |
+
if invalid.sum() == 0:
|
85 |
+
return
|
86 |
+
else:
|
87 |
+
logger.error(f"number of invalid quaternions {invalid.sum()}")
|
88 |
+
|
89 |
+
for p in range(XSensConstants.num_parts):
|
90 |
+
if qn[0, p] < 0.5:
|
91 |
+
qWXYZ[0, p] = np.array([1, 0, 0, 0])
|
92 |
+
for f in range(1, qn.shape[0]):
|
93 |
+
for p in range(XSensConstants.num_parts):
|
94 |
+
if qn[f, p] < 0.5:
|
95 |
+
qWXYZ[f, p] = qWXYZ[f - 1, p]
|
96 |
+
self.xsens_data[XSensConstants.k_part_qWXYZ] = qWXYZ.reshape(
|
97 |
+
-1, XSensConstants.num_parts * 4
|
98 |
+
)
|
99 |
+
|
100 |
+
def get_global_timespan_us(self) -> tuple[int, int]:
|
101 |
+
t_us = self.xsens_data[XSensConstants.k_timestamps_us]
|
102 |
+
return t_us[0], t_us[-1]
|
103 |
+
|
104 |
+
def get_T_w_h(self, timespan_ns: tuple[int, int] = None) -> tuple[list, list]:
|
105 |
+
head_idx = XSensConstants.part_names.index("Head")
|
106 |
+
num_parts = XSensConstants.num_parts
|
107 |
+
timestamps_ns = self.xsens_data[XSensConstants.k_timestamps_us] * 1e3
|
108 |
+
if timespan_ns is not None:
|
109 |
+
t_start, t_end = timespan_ns
|
110 |
+
i_start = np.searchsorted(timestamps_ns, t_start) + 240
|
111 |
+
i_end = np.searchsorted(timestamps_ns, t_end) - 240
|
112 |
+
assert i_start < i_end
|
113 |
+
else:
|
114 |
+
i_start = 0
|
115 |
+
i_end = None
|
116 |
+
|
117 |
+
head_q = self.xsens_data[XSensConstants.k_part_qWXYZ].reshape(-1, num_parts, 4)[
|
118 |
+
i_start:i_end, head_idx, :
|
119 |
+
]
|
120 |
+
head_t = self.xsens_data[XSensConstants.k_part_tXYZ].reshape(-1, num_parts, 3)[
|
121 |
+
i_start:i_end, head_idx, :
|
122 |
+
]
|
123 |
+
T_w_h: list[SE3] = SE3.from_quat_and_translation(
|
124 |
+
head_q[:, 0], head_q[:, 1:], head_t
|
125 |
+
)
|
126 |
+
t_ns: list[int] = timestamps_ns[i_start:i_end].tolist()
|
127 |
+
logger.info(f"get {len(T_w_h)} samples for computing alignment")
|
128 |
+
return T_w_h, t_ns
|
129 |
+
|
130 |
+
def __get_closest_timestamp_idx(self, t_us: int) -> int:
|
131 |
+
if t_us <= self.get_global_timespan_us()[0]:
|
132 |
+
return 0
|
133 |
+
if t_us >= self.get_global_timespan_us()[-1]:
|
134 |
+
return -1
|
135 |
+
|
136 |
+
timestamps = self.xsens_data[XSensConstants.k_timestamps_us]
|
137 |
+
idx_rr = np.searchsorted(timestamps, t_us)
|
138 |
+
idx_ll = idx_rr - 1
|
139 |
+
if abs(timestamps[idx_ll] - t_us) < abs(timestamps[idx_rr] - t_us):
|
140 |
+
return idx_ll
|
141 |
+
else:
|
142 |
+
return idx_rr
|
143 |
+
|
144 |
+
def get_posed_skeleton_and_skin(
|
145 |
+
self, t_us: int, T_W_Hx: SE3 = None
|
146 |
+
) -> tuple[np.ndarray, np.ndarray | None]:
|
147 |
+
"""
|
148 |
+
\brief Given a query timestamp, return the closest body motion.
|
149 |
+
\arg t_us: query timestamp in microsecond.
|
150 |
+
\arg T_W_Hx: optional SE3 alignment from XSens head to world coordinates,
|
151 |
+
computed from XSens to Aria world coordinaes alignment.
|
152 |
+
\return First element is XSens posed skeleton. Second element is posed vertices
|
153 |
+
of momentum mesh if the momentum retargetted results are loaded.
|
154 |
+
We only return the posed mesh vertices, since the triangles and normals stay the same.
|
155 |
+
"""
|
156 |
+
# find closest timestamp
|
157 |
+
idx: int = self.__get_closest_timestamp_idx(t_us)
|
158 |
+
|
159 |
+
# get XSens posed skeleton
|
160 |
+
q = self.xsens_data[XSensConstants.k_part_qWXYZ][idx]
|
161 |
+
t = self.xsens_data[XSensConstants.k_part_tXYZ][idx]
|
162 |
+
T_Wx_Px = BodyDataProvider.qt_to_se3(q, t)
|
163 |
+
T_W_Wx: SE3 = None
|
164 |
+
if T_W_Hx is not None:
|
165 |
+
head_idx = XSensConstants.part_names.index("Head")
|
166 |
+
T_Hx_Wx = T_Wx_Px[head_idx].inverse()
|
167 |
+
T_W_Wx = T_W_Hx @ T_Hx_Wx
|
168 |
+
T_W_Px = [T_W_Wx @ T_wx_px for T_wx_px in T_Wx_Px]
|
169 |
+
skel_xsens = BodyDataProvider.se3_to_skeleton(T_W_Px)
|
170 |
+
else:
|
171 |
+
skel_xsens = BodyDataProvider.se3_to_skeleton(T_Wx_Px)
|
172 |
+
|
173 |
+
# get Momentum posed mesh vertices
|
174 |
+
if self.character is not None:
|
175 |
+
motion = torch.tensor(self.motion[idx])
|
176 |
+
skel_state: torch.Tensor = pym.geometry.model_parameters_to_skeleton_state(
|
177 |
+
self.character, motion
|
178 |
+
)
|
179 |
+
skin_momentum: torch.Tensor = self.character.skin_points(skel_state)
|
180 |
+
|
181 |
+
if T_W_Wx is not None:
|
182 |
+
t_W_Wx = (
|
183 |
+
torch.tensor(T_W_Wx.translation()).to(torch.float32).reshape([3, 1])
|
184 |
+
)
|
185 |
+
R_W_Wx = torch.tensor(T_W_Wx.rotation().to_matrix()).to(torch.float32)
|
186 |
+
|
187 |
+
R_W_Wm = R_W_Wx @ self._A_Wx_Wm
|
188 |
+
skin_momentum = (R_W_Wm @ skin_momentum.T + t_W_Wx).T
|
189 |
+
else:
|
190 |
+
skin_momentum = (self._A_Wx_Wm @ skin_momentum.T).T
|
191 |
+
|
192 |
+
return skel_xsens, skin_momentum
|
193 |
+
|
194 |
+
@staticmethod
|
195 |
+
def qt_to_se3(part_qWXYZ: np.ndarray, part_tXYZ: np.ndarray) -> list[SE3]:
|
196 |
+
"""
|
197 |
+
\brief Helper function to convert a frame of skeleton representation from
|
198 |
+
list of quaternion + translation to SE3.
|
199 |
+
"""
|
200 |
+
q_WXYZ = part_qWXYZ.reshape(XSensConstants.num_parts, 4)
|
201 |
+
t_XYZ = part_tXYZ.reshape(XSensConstants.num_parts, 3)
|
202 |
+
return SE3.from_quat_and_translation(q_WXYZ[:, 0], q_WXYZ[:, 1:], t_XYZ)
|
203 |
+
|
204 |
+
@staticmethod
|
205 |
+
def se3_to_skeleton(part_se3: list[SE3]) -> np.ndarray:
|
206 |
+
"""
|
207 |
+
\brief Helper function to convert a frame of skeleton parameters to 3D wireframe
|
208 |
+
for visualization purposes.
|
209 |
+
"""
|
210 |
+
assert len(part_se3) == XSensConstants.num_parts
|
211 |
+
children = np.concatenate([b.translation() for b in part_se3[1:]], axis=0)
|
212 |
+
parents = np.concatenate(
|
213 |
+
[part_se3[p].translation() for p in XSensConstants.kintree_parents[1:]],
|
214 |
+
axis=0,
|
215 |
+
)
|
216 |
+
skeleton_cp = np.stack([children, parents], axis=1)
|
217 |
+
assert skeleton_cp.shape == (XSensConstants.num_bones, 2, 3)
|
218 |
+
return skeleton_cp.astype(np.float32)
|
219 |
+
|
220 |
+
|
221 |
+
def create_body_data_provider(
|
222 |
+
xdata_npz: str, xdata_glb: str
|
223 |
+
) -> BodyDataProvider | None:
|
224 |
+
if Path(xdata_npz).is_file():
|
225 |
+
return BodyDataProvider(npzfile=xdata_npz, glbfile=xdata_glb)
|
226 |
+
else:
|
227 |
+
return None
|
nymeria/data_provider.py
ADDED
@@ -0,0 +1,257 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
from dataclasses import dataclass
|
8 |
+
from pathlib import Path
|
9 |
+
|
10 |
+
import numpy as np
|
11 |
+
from loguru import logger
|
12 |
+
from nymeria.body_motion_provider import create_body_data_provider
|
13 |
+
from nymeria.handeye import HandEyeSolver
|
14 |
+
from nymeria.path_provider import SequencePathProvider
|
15 |
+
from nymeria.recording_data_provider import (
|
16 |
+
create_recording_data_provider,
|
17 |
+
RecordingDataProvider,
|
18 |
+
)
|
19 |
+
from projectaria_tools.core.mps import ClosedLoopTrajectoryPose
|
20 |
+
from projectaria_tools.core.sensor_data import TimeDomain
|
21 |
+
from projectaria_tools.core.sophus import SE3
|
22 |
+
|
23 |
+
|
24 |
+
@dataclass(frozen=True)
|
25 |
+
class NymeriaDataProviderConfig:
|
26 |
+
sequence_rootdir: Path
|
27 |
+
load_head: bool = True
|
28 |
+
load_observer: bool = True
|
29 |
+
load_wrist: bool = True
|
30 |
+
load_body: bool = True
|
31 |
+
|
32 |
+
# If true, the filtered semidense points are exported into a npz file at the first loading
|
33 |
+
view_cached_points: bool = True
|
34 |
+
# Parameters for filtering semidense points
|
35 |
+
th_invdep: float = 0.0004
|
36 |
+
th_dep: float = 0.02
|
37 |
+
max_point_count: int = 100_000
|
38 |
+
|
39 |
+
trajectory_sample_fps: float = 1
|
40 |
+
|
41 |
+
# Parameters for solving XSens to Aria world coordinates alignment
|
42 |
+
handeye_smooth: bool = False
|
43 |
+
handeye_window: int = 240 * 120
|
44 |
+
handeye_skip: int = 240 * 5
|
45 |
+
handeye_stride: int = 2
|
46 |
+
|
47 |
+
|
48 |
+
class NymeriaDataProvider(NymeriaDataProviderConfig):
|
49 |
+
def __init__(self, **kwargs) -> None:
|
50 |
+
super().__init__(**kwargs)
|
51 |
+
|
52 |
+
seq_pd = SequencePathProvider(self.sequence_rootdir)
|
53 |
+
|
54 |
+
# create data provider for Aria recordings and MPS output
|
55 |
+
self.recording_head = (
|
56 |
+
create_recording_data_provider(seq_pd.recording_head)
|
57 |
+
if self.load_head
|
58 |
+
else None
|
59 |
+
)
|
60 |
+
self.recording_lwrist = (
|
61 |
+
create_recording_data_provider(seq_pd.recording_lwrist)
|
62 |
+
if self.load_wrist
|
63 |
+
else None
|
64 |
+
)
|
65 |
+
self.recording_rwrist = (
|
66 |
+
create_recording_data_provider(seq_pd.recording_rwrist)
|
67 |
+
if self.load_wrist
|
68 |
+
else None
|
69 |
+
)
|
70 |
+
self.recording_observer = (
|
71 |
+
create_recording_data_provider(seq_pd.recording_observer)
|
72 |
+
if self.load_observer
|
73 |
+
else None
|
74 |
+
)
|
75 |
+
|
76 |
+
# create data provider for body motion
|
77 |
+
self.body_dp = (
|
78 |
+
create_body_data_provider(
|
79 |
+
xdata_npz=seq_pd.body_paths.xsens_processed,
|
80 |
+
xdata_glb=seq_pd.body_paths.momentum_model,
|
81 |
+
)
|
82 |
+
if self.load_body
|
83 |
+
else None
|
84 |
+
)
|
85 |
+
|
86 |
+
if self.body_dp is None and len(self.get_existing_recordings()) == 0:
|
87 |
+
raise RuntimeError(
|
88 |
+
"data provider is empty. "
|
89 |
+
"Make sure there is at least 1 recording or body motion"
|
90 |
+
)
|
91 |
+
|
92 |
+
# get overlapping timeline
|
93 |
+
self.timespan_ns: tuple[int, int] = self.__get_timespan_ns()
|
94 |
+
|
95 |
+
# compute xsens to aria world alignment
|
96 |
+
self.__compute_xsens_to_aria_alignment()
|
97 |
+
|
98 |
+
def get_existing_recordings(self) -> list[RecordingDataProvider]:
|
99 |
+
return [
|
100 |
+
x
|
101 |
+
for x in [
|
102 |
+
self.recording_head,
|
103 |
+
self.recording_observer,
|
104 |
+
self.recording_lwrist,
|
105 |
+
self.recording_rwrist,
|
106 |
+
]
|
107 |
+
if x is not None
|
108 |
+
]
|
109 |
+
|
110 |
+
def __get_timespan_ns(self, ignore_ns: int = 1e9) -> tuple[int, int]:
|
111 |
+
"""
|
112 |
+
\brief Compute overlapping timeline across all loaded data
|
113 |
+
"""
|
114 |
+
t_start = 0
|
115 |
+
t_end = None
|
116 |
+
if self.body_dp is not None:
|
117 |
+
t0, t1 = self.body_dp.get_global_timespan_us()
|
118 |
+
t_start = t0 * 1e3
|
119 |
+
t_end = t1 * 1e3
|
120 |
+
|
121 |
+
for rec in self.get_existing_recordings():
|
122 |
+
t0, t1 = rec.get_global_timespan_ns()
|
123 |
+
t_start = t_start if t_start > t0 else t0
|
124 |
+
t_end = t_end if t_end is None or t_end < t1 else t1
|
125 |
+
|
126 |
+
t_start += ignore_ns
|
127 |
+
t_end -= ignore_ns
|
128 |
+
assert t_start < t_end, f"invalid time span {t_start= }us, {t_end= }us"
|
129 |
+
|
130 |
+
t_start = int(t_start)
|
131 |
+
t_end = int(t_end)
|
132 |
+
duration = (t_end - t_start) / 1.0e9
|
133 |
+
logger.info(f"time span: {t_start= }us {t_end= }us {duration= }s")
|
134 |
+
return t_start, t_end
|
135 |
+
|
136 |
+
def get_synced_rgb_videos(self, t_ns_global: int) -> dict[str, any]:
|
137 |
+
data = {}
|
138 |
+
for rec in [self.recording_head, self.recording_observer]:
|
139 |
+
if rec is None and not rec.has_rgb:
|
140 |
+
continue
|
141 |
+
|
142 |
+
result = rec.get_rgb_image(t_ns_global, time_domain=TimeDomain.TIME_CODE)
|
143 |
+
if abs(result[-1] / 1e6) > 33: # 33ms
|
144 |
+
logger.warning(f"time difference for image query: {result[-1]} ms")
|
145 |
+
data[rec.tag] = result
|
146 |
+
return data
|
147 |
+
|
148 |
+
def get_all_pointclouds(self) -> dict[str, np.ndarray]:
|
149 |
+
data = {}
|
150 |
+
for rec in self.get_existing_recordings():
|
151 |
+
if not rec.has_pointcloud:
|
152 |
+
continue
|
153 |
+
|
154 |
+
if self.view_cached_points:
|
155 |
+
data[rec.tag] = rec.get_pointcloud_cached(
|
156 |
+
th_dep=self.th_dep,
|
157 |
+
th_invdep=self.th_invdep,
|
158 |
+
max_point_count=self.max_point_count,
|
159 |
+
)
|
160 |
+
else:
|
161 |
+
data[rec.tag] = rec.get_pointcloud(
|
162 |
+
th_dep=self.th_dep,
|
163 |
+
th_invdep=self.th_invdep,
|
164 |
+
max_point_count=self.max_point_count,
|
165 |
+
)
|
166 |
+
return data
|
167 |
+
|
168 |
+
def get_all_trajectories(self) -> dict[str, np.ndarray]:
|
169 |
+
data = {}
|
170 |
+
for rec in self.get_existing_recordings():
|
171 |
+
if rec.has_vrs and rec.has_pose:
|
172 |
+
data[rec.tag] = rec.sample_trajectory_world_device(
|
173 |
+
sample_fps=self.trajectory_sample_fps
|
174 |
+
)
|
175 |
+
return data
|
176 |
+
|
177 |
+
def get_synced_poses(self, t_ns_global: int) -> dict[str, any]:
|
178 |
+
data = {}
|
179 |
+
T_Wd_Hd = None
|
180 |
+
for rec in self.get_existing_recordings():
|
181 |
+
if rec is None or not rec.has_pose:
|
182 |
+
continue
|
183 |
+
|
184 |
+
pose: ClosedLoopTrajectoryPose = None
|
185 |
+
tdiff: int = None
|
186 |
+
pose, tdiff = rec.get_pose(t_ns_global, time_domain=TimeDomain.TIME_CODE)
|
187 |
+
if abs(tdiff / 1e6) > 2: # 2ms
|
188 |
+
logger.warning(f"time difference for pose query {tdiff/1e6} ms")
|
189 |
+
|
190 |
+
data[rec.tag] = pose
|
191 |
+
if rec.tag == "recording_head":
|
192 |
+
T_Wd_Hd: SE3 = pose.transform_world_device
|
193 |
+
|
194 |
+
if (
|
195 |
+
self.body_dp is not None
|
196 |
+
and self.recording_head is not None
|
197 |
+
and T_Wd_Hd is not None
|
198 |
+
):
|
199 |
+
T_Wd_Hx = T_Wd_Hd @ self.T_Hd_Hx(t_ns_global)
|
200 |
+
t_us = t_ns_global / 1e3
|
201 |
+
skel, skin = self.body_dp.get_posed_skeleton_and_skin(t_us, T_W_Hx=T_Wd_Hx)
|
202 |
+
data["xsens"] = skel
|
203 |
+
if skin is not None:
|
204 |
+
data["momentum"] = skin
|
205 |
+
return data
|
206 |
+
|
207 |
+
def __compute_xsens_to_aria_alignment(self) -> None:
|
208 |
+
"""
|
209 |
+
\brief Compute se3 transform from xsens head to aria head
|
210 |
+
This function will set self.Ts_Hd_Hx and self.t_ns_align
|
211 |
+
"""
|
212 |
+
if self.recording_head is None or self.body_dp is None:
|
213 |
+
self.Ts_Hd_Hx = [SE3.from_matrix(np.eye(4))]
|
214 |
+
self.t_ns_align = None
|
215 |
+
return
|
216 |
+
else:
|
217 |
+
logger.info("compute alignment from xsens head to aria headset")
|
218 |
+
assert self.body_dp is not None
|
219 |
+
assert self.recording_head is not None
|
220 |
+
|
221 |
+
# get synchronized trajectory
|
222 |
+
xsens_traj = self.body_dp.get_T_w_h(self.timespan_ns)
|
223 |
+
T_Wx_Hx: list[SE3] = xsens_traj[0]
|
224 |
+
t_ns: list[int] = xsens_traj[-1]
|
225 |
+
T_Wd_Hd: list[SE3] = []
|
226 |
+
for t in t_ns:
|
227 |
+
pose, _ = self.recording_head.get_pose(t, TimeDomain.TIME_CODE)
|
228 |
+
T_Wd_Hd.append(pose.transform_world_device)
|
229 |
+
|
230 |
+
# solve handeye
|
231 |
+
handeye = HandEyeSolver(
|
232 |
+
stride=self.handeye_stride,
|
233 |
+
smooth=self.handeye_smooth,
|
234 |
+
skip=self.handeye_skip,
|
235 |
+
window=self.handeye_window,
|
236 |
+
)
|
237 |
+
self.Ts_Hd_Hx: list[SE3] = handeye(
|
238 |
+
T_Wa_A=T_Wd_Hd,
|
239 |
+
T_Wb_B=T_Wx_Hx,
|
240 |
+
)
|
241 |
+
if len(self.Ts_Hd_Hx) > 1:
|
242 |
+
self.t_ns_align = t_ns[0 :: self.handeye_skip]
|
243 |
+
else:
|
244 |
+
self.t_ns_align = None
|
245 |
+
|
246 |
+
def T_Hd_Hx(self, t_ns: int) -> SE3:
|
247 |
+
if self.t_ns_align is None:
|
248 |
+
return self.Ts_Hd_Hx[0]
|
249 |
+
|
250 |
+
if t_ns <= self.t_ns_align[0]:
|
251 |
+
return self.Ts_Hd_Hx[0]
|
252 |
+
|
253 |
+
if t_ns >= self.t_ns_align[-1]:
|
254 |
+
return self.Ts_Hd_Hx[-1]
|
255 |
+
|
256 |
+
idx = np.searchsorted(self.t_ns_align, t_ns)
|
257 |
+
return self.Ts_Hd_Hx[idx]
|
nymeria/data_viewer.py
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
from collections import deque
|
8 |
+
from dataclasses import dataclass
|
9 |
+
from pathlib import Path
|
10 |
+
from uuid import uuid4
|
11 |
+
|
12 |
+
import numpy as np
|
13 |
+
import rerun as rr
|
14 |
+
import rerun.blueprint as rrb
|
15 |
+
from loguru import logger
|
16 |
+
from nymeria.data_provider import NymeriaDataProvider
|
17 |
+
from PIL import Image
|
18 |
+
from projectaria_tools.core.sensor_data import ImageData
|
19 |
+
from projectaria_tools.core.sophus import SE3
|
20 |
+
from tqdm import tqdm
|
21 |
+
|
22 |
+
|
23 |
+
@dataclass(frozen=True)
|
24 |
+
class ViewerConfig:
|
25 |
+
output_rrd: Path = None
|
26 |
+
sample_fps: float = 10
|
27 |
+
rotate_rgb: bool = True
|
28 |
+
downsample_rgb: bool = True
|
29 |
+
jpeg_quality: int = 90
|
30 |
+
traj_tail_length: int = 100
|
31 |
+
|
32 |
+
ep_recording_head: str = "recording_head/2d"
|
33 |
+
ep_recording_observer: str = "recording_observer/2d"
|
34 |
+
|
35 |
+
point_radii: float = 0.008
|
36 |
+
line_radii: float = 0.008
|
37 |
+
skel_radii: float = 0.01
|
38 |
+
|
39 |
+
|
40 |
+
class NymeriaViewer(ViewerConfig):
|
41 |
+
palette: dict[str, list] = {
|
42 |
+
"recording_head": [255, 0, 0],
|
43 |
+
"recording_lwrist": [0, 255, 0],
|
44 |
+
"recording_rwrist": [0, 0, 255],
|
45 |
+
"recording_observer": [61, 0, 118],
|
46 |
+
"pointcloud": [128, 128, 128, 128],
|
47 |
+
"momentum": [218, 234, 134],
|
48 |
+
}
|
49 |
+
color_skeleton = np.array(
|
50 |
+
[
|
51 |
+
[127, 0, 255],
|
52 |
+
[105, 34, 254],
|
53 |
+
[81, 71, 252],
|
54 |
+
[59, 103, 249],
|
55 |
+
[35, 136, 244],
|
56 |
+
[11, 167, 238],
|
57 |
+
[10, 191, 232],
|
58 |
+
[34, 214, 223],
|
59 |
+
[58, 232, 214],
|
60 |
+
[80, 244, 204],
|
61 |
+
[104, 252, 192],
|
62 |
+
[128, 254, 179],
|
63 |
+
[150, 252, 167],
|
64 |
+
[174, 244, 152],
|
65 |
+
[196, 232, 138],
|
66 |
+
[220, 214, 122],
|
67 |
+
[244, 191, 105],
|
68 |
+
[255, 167, 89],
|
69 |
+
[255, 136, 71],
|
70 |
+
[255, 103, 53],
|
71 |
+
[255, 71, 36],
|
72 |
+
[255, 34, 17],
|
73 |
+
]
|
74 |
+
)
|
75 |
+
|
76 |
+
def __init__(self, **kwargs) -> None:
|
77 |
+
super().__init__(**kwargs)
|
78 |
+
|
79 |
+
blueprint = rrb.Horizontal(
|
80 |
+
rrb.Spatial3DView(name="3d"),
|
81 |
+
rrb.Vertical(
|
82 |
+
rrb.Spatial2DView(name="2d participant", origin=self.ep_recording_head),
|
83 |
+
rrb.Spatial2DView(
|
84 |
+
name="2d observer", origin=self.ep_recording_observer
|
85 |
+
),
|
86 |
+
),
|
87 |
+
)
|
88 |
+
|
89 |
+
rr.init(
|
90 |
+
"nymeria data viewer",
|
91 |
+
spawn=(self.output_rrd is None),
|
92 |
+
recording_id=uuid4(),
|
93 |
+
default_blueprint=blueprint,
|
94 |
+
)
|
95 |
+
if self.output_rrd is not None:
|
96 |
+
rr.save(self.output_rrd)
|
97 |
+
|
98 |
+
rr.log("world", rr.ViewCoordinates.RIGHT_HAND_Z_UP, static=True)
|
99 |
+
|
100 |
+
self._init_mesh: bool = False
|
101 |
+
self._epaths_3d: set[str] = set()
|
102 |
+
self._traj_deques: dict[str, deque] = {}
|
103 |
+
|
104 |
+
def __call__(self, nymeria_dp: NymeriaDataProvider):
|
105 |
+
# add static scene
|
106 |
+
self.__log_pointcloud(nymeria_dp)
|
107 |
+
self.__log_trajectory(nymeria_dp)
|
108 |
+
|
109 |
+
# add dynamic scene
|
110 |
+
t_ns_start, t_ns_end = nymeria_dp.timespan_ns
|
111 |
+
dt: int = int(1e9 / self.sample_fps)
|
112 |
+
for idx, t_ns in tqdm(enumerate(range(t_ns_start, t_ns_end, dt))):
|
113 |
+
rr.set_time_sequence("frames", idx)
|
114 |
+
rr.set_time_nanos("timestamps_ns", t_ns)
|
115 |
+
|
116 |
+
self.__log_synced_video(t_ns, nymeria_dp)
|
117 |
+
self.__log_synced_poses(t_ns, nymeria_dp)
|
118 |
+
|
119 |
+
self.__set_viewpoint()
|
120 |
+
|
121 |
+
def __log_pointcloud(self, nymeria_dp: NymeriaDataProvider) -> None:
|
122 |
+
pointclouds = nymeria_dp.get_all_pointclouds()
|
123 |
+
for tag, pts in pointclouds.items():
|
124 |
+
logger.info(f"add point cloud {tag}")
|
125 |
+
cc = self.palette.get("pointcloud")
|
126 |
+
ep = f"world/semidense_pts/{tag}"
|
127 |
+
rr.log(
|
128 |
+
entity_path=ep,
|
129 |
+
entity=rr.Points3D(pts, colors=cc, radii=self.point_radii),
|
130 |
+
static=True,
|
131 |
+
)
|
132 |
+
self._epaths_3d.add(ep)
|
133 |
+
|
134 |
+
def __log_trajectory(self, nymeria_dp: NymeriaDataProvider) -> None:
|
135 |
+
trajs: dict[str, np.ndarray] = nymeria_dp.get_all_trajectories()
|
136 |
+
for tag, traj in trajs.items():
|
137 |
+
logger.info(f"add trajectory {tag}, {traj.shape=}")
|
138 |
+
ep = f"world/traj_full/{tag}"
|
139 |
+
rr.log(
|
140 |
+
ep,
|
141 |
+
rr.LineStrips3D(
|
142 |
+
traj[:, :3, 3], colors=self.palette.get(tag), radii=self.line_radii
|
143 |
+
),
|
144 |
+
static=True,
|
145 |
+
)
|
146 |
+
self._epaths_3d.add(ep)
|
147 |
+
|
148 |
+
def __log_synced_video(self, t_ns: int, nymeria_dp: NymeriaDataProvider) -> None:
|
149 |
+
images: dict[str, tuple] = nymeria_dp.get_synced_rgb_videos(t_ns)
|
150 |
+
for tag, data in images.items():
|
151 |
+
rgb: ImageData = data[0]
|
152 |
+
|
153 |
+
if self.downsample_rgb:
|
154 |
+
rgb = rgb.to_numpy_array()[::2, ::2, :]
|
155 |
+
rgb = Image.fromarray(rgb.astype(np.uint8))
|
156 |
+
if self.rotate_rgb:
|
157 |
+
rgb = rgb.rotate(-90)
|
158 |
+
|
159 |
+
if tag in self.ep_recording_head:
|
160 |
+
ep = self.ep_recording_head
|
161 |
+
elif tag in self.ep_recording_observer:
|
162 |
+
ep = self.ep_recording_observer
|
163 |
+
rr.log(
|
164 |
+
f"{ep}/214-1", rr.Image(rgb).compress(jpeg_quality=self.jpeg_quality)
|
165 |
+
)
|
166 |
+
|
167 |
+
def __log_synced_poses(self, t_ns: int, nymeria_dp: NymeriaDataProvider) -> None:
|
168 |
+
poses: dict[str, any] = nymeria_dp.get_synced_poses(t_ns)
|
169 |
+
|
170 |
+
self._T_mv: SE3 = None
|
171 |
+
for tag, val in poses.items():
|
172 |
+
if "recording" in tag and self.traj_tail_length > 0:
|
173 |
+
traj = self._traj_deques.setdefault(tag, deque())
|
174 |
+
if self.traj_tail_length > 0 and len(traj) == self.traj_tail_length:
|
175 |
+
traj.popleft()
|
176 |
+
t = val.transform_world_device.translation()
|
177 |
+
traj.append(t.squeeze().tolist())
|
178 |
+
ep = f"world/traj_tail/{tag}"
|
179 |
+
rr.log(
|
180 |
+
ep,
|
181 |
+
rr.LineStrips3D(
|
182 |
+
traj, colors=self.palette.get(tag), radii=self.line_radii
|
183 |
+
),
|
184 |
+
)
|
185 |
+
self._epaths_3d.add(ep)
|
186 |
+
|
187 |
+
if tag == "xsens":
|
188 |
+
ep = "world/body/xsens_skel"
|
189 |
+
logger.debug(f"xsens skeleton {val.shape = }")
|
190 |
+
rr.log(
|
191 |
+
ep,
|
192 |
+
rr.LineStrips3D(
|
193 |
+
val, colors=self.color_skeleton, radii=self.skel_radii
|
194 |
+
),
|
195 |
+
static=False,
|
196 |
+
)
|
197 |
+
self._epaths_3d.add(ep)
|
198 |
+
if tag == "momentum":
|
199 |
+
ep = "world/body/momentum_mesh"
|
200 |
+
if self._init_mesh:
|
201 |
+
rr.log_components(ep, [rr.components.Position3DBatch(val)])
|
202 |
+
else:
|
203 |
+
faces = nymeria_dp.body_dp.momentum_template_mesh.faces
|
204 |
+
normals = nymeria_dp.body_dp.momentum_template_mesh.normals
|
205 |
+
rr.log(
|
206 |
+
ep,
|
207 |
+
rr.Mesh3D(
|
208 |
+
triangle_indices=faces,
|
209 |
+
vertex_positions=val,
|
210 |
+
vertex_normals=normals,
|
211 |
+
vertex_colors=self.palette.get(tag),
|
212 |
+
),
|
213 |
+
)
|
214 |
+
self._init_mesh = True
|
215 |
+
self._epaths_3d.add(ep)
|
216 |
+
|
217 |
+
if tag == "recording_head":
|
218 |
+
self._T_mv = val.transform_world_device
|
219 |
+
|
220 |
+
def __set_viewpoint(self, add_rotation: bool = False):
|
221 |
+
if self._T_mv is None:
|
222 |
+
return
|
223 |
+
t = self._T_mv.translation() * -1.0
|
224 |
+
Rz = np.eye(3)
|
225 |
+
if add_rotation:
|
226 |
+
R = self._T_mv.rotation().to_matrix()
|
227 |
+
psi = np.arctan2(R[1, 0], R[0, 0])
|
228 |
+
Rz[0:2, 0:2] = np.array(
|
229 |
+
[np.cos(psi), -np.sin(psi), np.sin(psi), np.cos(psi)]
|
230 |
+
).reshape(2, 2)
|
231 |
+
|
232 |
+
for ep in self._epaths_3d:
|
233 |
+
rr.log(
|
234 |
+
ep,
|
235 |
+
rr.Transform3D(translation=t, mat3x3=Rz),
|
236 |
+
static=False,
|
237 |
+
)
|
nymeria/definitions.py
ADDED
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
import json
|
8 |
+
from dataclasses import dataclass, fields
|
9 |
+
from enum import Enum
|
10 |
+
|
11 |
+
"""
|
12 |
+
Each sequence folder follows the following structure.
|
13 |
+
Files might be missing if not downloaded.
|
14 |
+
├── LICENSE
|
15 |
+
├── metadata.json
|
16 |
+
├── body
|
17 |
+
│ ├── xdata_blueman.glb
|
18 |
+
│ ├── xdata.healthcheck
|
19 |
+
│ ├── xdata.mvnx
|
20 |
+
│ └── xdata.npz
|
21 |
+
├── narration
|
22 |
+
│ ├── activity_summarization.csv
|
23 |
+
│ ├── atomic_action.csv
|
24 |
+
│ └── motion_narration.csv
|
25 |
+
├── recording_head / recording_observer
|
26 |
+
│ ├── data
|
27 |
+
│ │ ├── data.vrs
|
28 |
+
│ │ ├── et.vrs
|
29 |
+
│ │ └── motion.vrs
|
30 |
+
│ └── mps
|
31 |
+
│ ├── eye_gaze
|
32 |
+
│ │ ├── general_eye_gaze.csv
|
33 |
+
│ │ └── personalized_eye_gaze.csv
|
34 |
+
│ └── slam
|
35 |
+
│ ├── closed_loop_trajectory.csv
|
36 |
+
│ ├── online_calibration.jsonl
|
37 |
+
│ ├── open_loop_trajectory.csv
|
38 |
+
│ ├── semidense_observations.csv.gz
|
39 |
+
│ ├── semidense_points.csv.gz
|
40 |
+
│ └── summary.json
|
41 |
+
└── recording_rwrist / recording_lwrist
|
42 |
+
├── data
|
43 |
+
│ ├── data.vrs
|
44 |
+
│ └── motion.vrs
|
45 |
+
└── mps
|
46 |
+
└── slam
|
47 |
+
├── closed_loop_trajectory.csv
|
48 |
+
├── online_calibration.jsonl
|
49 |
+
├── open_loop_trajectory.csv
|
50 |
+
├── semidense_observations.csv.gz
|
51 |
+
├── semidense_points.csv.gz
|
52 |
+
└── summary.json
|
53 |
+
|
54 |
+
"""
|
55 |
+
|
56 |
+
NYMERIA_VERSION: str = "v0.0"
|
57 |
+
|
58 |
+
|
59 |
+
@dataclass(frozen=True)
|
60 |
+
class MetaFiles:
|
61 |
+
license: str = "LICENSE"
|
62 |
+
metadata_json: str = "metadata.json"
|
63 |
+
|
64 |
+
|
65 |
+
@dataclass(frozen=True)
|
66 |
+
class Subpaths(MetaFiles):
|
67 |
+
body: str = "body"
|
68 |
+
text: str = "narration"
|
69 |
+
|
70 |
+
recording_head: str = "recording_head"
|
71 |
+
recording_lwrist: str = "recording_lwrist"
|
72 |
+
recording_rwrist: str = "recording_rwrist"
|
73 |
+
recording_observer: str = "recording_observer"
|
74 |
+
|
75 |
+
vrs: str = "data"
|
76 |
+
mps: str = "mps"
|
77 |
+
mps_slam: str = "mps/slam"
|
78 |
+
mps_gaze: str = "mps/eye_gaze"
|
79 |
+
|
80 |
+
|
81 |
+
@dataclass(frozen=True)
|
82 |
+
class BodyFiles:
|
83 |
+
xsens_processed: str = f"{Subpaths.body}/xdata.npz"
|
84 |
+
xsens_raw: str = f"{Subpaths.body}/xdata.mvnx"
|
85 |
+
momentum_model: str = f"{Subpaths.body}/xdata_blueman.glb"
|
86 |
+
|
87 |
+
|
88 |
+
@dataclass(frozen=True)
|
89 |
+
class TextFiles:
|
90 |
+
motion_narration: str = f"{Subpaths.text}/motion_narration.csv"
|
91 |
+
atomic_action: str = f"{Subpaths.text}/atomic_action.csv"
|
92 |
+
activity_summarization: str = f"{Subpaths.text}/activity_summarization.csv"
|
93 |
+
|
94 |
+
|
95 |
+
@dataclass(frozen=True)
|
96 |
+
class VrsFiles:
|
97 |
+
data: str = f"{Subpaths.vrs}/data.vrs"
|
98 |
+
motion: str = f"{Subpaths.vrs}/motion.vrs"
|
99 |
+
et: str = f"{Subpaths.vrs}/et.vrs"
|
100 |
+
|
101 |
+
|
102 |
+
@dataclass(frozen=True)
|
103 |
+
class SlamFiles:
|
104 |
+
closed_loop_trajectory: str = f"{Subpaths.mps_slam}/closed_loop_trajectory.csv"
|
105 |
+
online_calibration: str = f"{Subpaths.mps_slam}/online_calibration.jsonl"
|
106 |
+
open_loop_trajectory: str = f"{Subpaths.mps_slam}/open_loop_trajectory.csv"
|
107 |
+
semidense_points: str = f"{Subpaths.mps_slam}/semidense_points.csv.gz"
|
108 |
+
semidense_observations: str = f"{Subpaths.mps_slam}/semidense_observations.csv.gz"
|
109 |
+
location_summary: str = f"{Subpaths.mps_slam}/summary.json"
|
110 |
+
|
111 |
+
|
112 |
+
@dataclass(frozen=True)
|
113 |
+
class GazeFiles:
|
114 |
+
general_gaze: str = f"{Subpaths.mps_gaze}/general_eye_gaze.csv"
|
115 |
+
personalized_gaze: str = f"{Subpaths.mps_gaze}/personalized_eye_gaze.csv"
|
116 |
+
|
117 |
+
|
118 |
+
class DataGroups(Enum):
|
119 |
+
"""
|
120 |
+
\brief Each variable defines one atomic downloadable element
|
121 |
+
"""
|
122 |
+
|
123 |
+
LICENSE = Subpaths.license
|
124 |
+
metadata_json = Subpaths.metadata_json
|
125 |
+
|
126 |
+
body = Subpaths.body
|
127 |
+
|
128 |
+
recording_head = Subpaths.recording_head
|
129 |
+
recording_head_data_data_vrs = f"{Subpaths.recording_head}/{VrsFiles.data}"
|
130 |
+
recording_lwrist = Subpaths.recording_lwrist
|
131 |
+
recording_rwrist = Subpaths.recording_rwrist
|
132 |
+
recording_observer = Subpaths.recording_observer
|
133 |
+
recording_observer_data_data_vrs = f"{Subpaths.recording_observer}/{VrsFiles.data}"
|
134 |
+
|
135 |
+
narration_motion_narration_csv = TextFiles.motion_narration
|
136 |
+
narration_atomic_action_csv = TextFiles.atomic_action
|
137 |
+
narration_activity_summarization_csv = TextFiles.activity_summarization
|
138 |
+
|
139 |
+
semidense_observations = "semidense_observations"
|
140 |
+
|
141 |
+
|
142 |
+
def get_group_definitions() -> dict[str, list]:
|
143 |
+
"""
|
144 |
+
\brief Definition of DataGroups
|
145 |
+
File paths are relative with respect to each sequence folder.
|
146 |
+
Some sequences might missing certain files/data groups
|
147 |
+
due to errors occured from data collection or processing.
|
148 |
+
There is one url per data group per sequence.
|
149 |
+
Data groups with multiple files are packed into zip files.
|
150 |
+
"""
|
151 |
+
AriaFiles = (
|
152 |
+
[f.default for f in fields(VrsFiles) if "data" not in f.name]
|
153 |
+
+ [f.default for f in fields(SlamFiles) if "observations" not in f.name]
|
154 |
+
+ [f.default for f in fields(GazeFiles)]
|
155 |
+
)
|
156 |
+
miniAriaFiles = [f.default for f in fields(VrsFiles) if "et" not in f.name] + [
|
157 |
+
f.default for f in fields(SlamFiles) if "observerations" not in f.name
|
158 |
+
]
|
159 |
+
|
160 |
+
g_defs = {x.name: [x.value] for x in DataGroups}
|
161 |
+
g_defs[DataGroups.body.name] = [x.default for x in fields(BodyFiles)]
|
162 |
+
|
163 |
+
for x in [DataGroups.recording_head, DataGroups.recording_observer]:
|
164 |
+
g_defs[x.name] = [f"{x.name}/{f}" for f in AriaFiles]
|
165 |
+
|
166 |
+
for x in [DataGroups.recording_rwrist, DataGroups.recording_lwrist]:
|
167 |
+
g_defs[x.name] = [f"{x.name}/{f}" for f in miniAriaFiles]
|
168 |
+
|
169 |
+
g_defs[DataGroups.semidense_observations.name] = []
|
170 |
+
for x in fields(Subpaths):
|
171 |
+
if "recording" in x.name:
|
172 |
+
g_defs[DataGroups.semidense_observations.name].append(
|
173 |
+
f"{x.default}/{SlamFiles.semidense_observations}"
|
174 |
+
)
|
175 |
+
|
176 |
+
print("=== group definitions (group_name: [group_files]) ===")
|
177 |
+
print(json.dumps(g_defs, indent=2))
|
178 |
+
|
179 |
+
return g_defs
|
180 |
+
|
181 |
+
|
182 |
+
# get_group_definitions()
|
nymeria/download_utils.py
ADDED
@@ -0,0 +1,289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
import hashlib
|
8 |
+
import json
|
9 |
+
import shutil
|
10 |
+
import tempfile
|
11 |
+
from dataclasses import dataclass
|
12 |
+
from enum import Enum
|
13 |
+
from pathlib import Path
|
14 |
+
from zipfile import is_zipfile, ZipFile
|
15 |
+
|
16 |
+
import requests
|
17 |
+
from loguru import logger
|
18 |
+
from requests.adapters import HTTPAdapter
|
19 |
+
from requests.packages.urllib3.util.retry import Retry
|
20 |
+
from tqdm import tqdm
|
21 |
+
|
22 |
+
from .definitions import DataGroups, NYMERIA_VERSION
|
23 |
+
|
24 |
+
|
25 |
+
class DlConfig(Enum):
|
26 |
+
CHUCK_SIZE_BYTE = 8192
|
27 |
+
READ_BYTE = 4096
|
28 |
+
RETRY = 5
|
29 |
+
BACKOFF_FACTOR = 3
|
30 |
+
|
31 |
+
|
32 |
+
class DlStatus(Enum):
|
33 |
+
UNKNOWN = None
|
34 |
+
SUCCESS = "success"
|
35 |
+
IGNORED = "ignored, file already downloaded"
|
36 |
+
WARN_NOTFOUND = "warning, missing download link"
|
37 |
+
ERR_SHA1SUM = "error, sha1sum mismatch"
|
38 |
+
ERR_MEMORY = "error, insufficient disk space"
|
39 |
+
ERR_NETWORK = "error, network"
|
40 |
+
|
41 |
+
|
42 |
+
@dataclass
|
43 |
+
class DlLink:
|
44 |
+
filename: str
|
45 |
+
sha1sum: str
|
46 |
+
file_size_bytes: int
|
47 |
+
download_url: str
|
48 |
+
|
49 |
+
data_group: DataGroups
|
50 |
+
status: DlStatus = DlStatus.UNKNOWN
|
51 |
+
|
52 |
+
def __post_init__(self) -> None:
|
53 |
+
prefix = f"Nymeria_{NYMERIA_VERSION}_"
|
54 |
+
if prefix not in self.filename:
|
55 |
+
self.status = (
|
56 |
+
f"Version mismatch with the release {NYMERIA_VERSION}. "
|
57 |
+
f"Please download the latest url json"
|
58 |
+
)
|
59 |
+
raise ValueError(self.status)
|
60 |
+
self.filename = self.filename.replace(prefix, "")
|
61 |
+
|
62 |
+
@property
|
63 |
+
def seq_name(self) -> str:
|
64 |
+
return "_".join(self.filename.split("_")[0:6])
|
65 |
+
|
66 |
+
@property
|
67 |
+
def logdir(self) -> str:
|
68 |
+
return "logs"
|
69 |
+
|
70 |
+
def __check_outdir(self, outdir: Path) -> None:
|
71 |
+
assert (
|
72 |
+
outdir.name == self.seq_name
|
73 |
+
), f"Output directory name ({outdir.name}) mismatch with sequence {self.seq_name}"
|
74 |
+
outdir.mkdir(exist_ok=True)
|
75 |
+
|
76 |
+
def get(self, outdir: Path, ignore_existing: bool = True) -> None:
|
77 |
+
"""This function throws error if not successful"""
|
78 |
+
flag = outdir / self.logdir / self.data_group.name
|
79 |
+
if flag.is_file() and ignore_existing:
|
80 |
+
self.status = DlStatus.IGNORED
|
81 |
+
return
|
82 |
+
|
83 |
+
self.__check_outdir(outdir)
|
84 |
+
|
85 |
+
with tempfile.TemporaryDirectory() as tmpdir:
|
86 |
+
tmp_filename = Path(tmpdir) / self.filename
|
87 |
+
logger.info(f"Download {self.filename} -> {tmp_filename}")
|
88 |
+
|
89 |
+
session = requests.Session()
|
90 |
+
"""
|
91 |
+
Retry will be triggered for the following cases
|
92 |
+
(429) Too Many Requests
|
93 |
+
(500) Internal Server Error
|
94 |
+
(502) Bad Gateway
|
95 |
+
(503) Service Unavailable
|
96 |
+
(504) Gateway Timeout
|
97 |
+
"""
|
98 |
+
retries = Retry(
|
99 |
+
total=DlConfig.RETRY.value,
|
100 |
+
backoff_factor=DlConfig.BACKOFF_FACTOR.value,
|
101 |
+
status_forcelist=[429, 500, 502, 503, 504],
|
102 |
+
)
|
103 |
+
|
104 |
+
session.mount("https://", HTTPAdapter(max_retries=retries))
|
105 |
+
with session.get(self.download_url, stream=True) as r:
|
106 |
+
|
107 |
+
free_outdir = shutil.disk_usage(outdir).free
|
108 |
+
free_tmpdir = shutil.disk_usage(tmpdir).free
|
109 |
+
if (
|
110 |
+
free_outdir < self.file_size_bytes
|
111 |
+
or free_tmpdir < self.file_size_bytes
|
112 |
+
):
|
113 |
+
self.status = DlStatus.ERR_MEMORY
|
114 |
+
raise RuntimeError(
|
115 |
+
"Insufficient disk space. "
|
116 |
+
f"Require {self.file_size_bytes}B, "
|
117 |
+
f"tmpdir availabel {free_tmpdir}B, outdir available {free_outdir}B"
|
118 |
+
)
|
119 |
+
|
120 |
+
with open(tmp_filename, "wb") as f:
|
121 |
+
sha1 = hashlib.sha1()
|
122 |
+
progress_bar = tqdm(
|
123 |
+
total=self.file_size_bytes, unit="iB", unit_scale=True
|
124 |
+
)
|
125 |
+
for chunk in r.iter_content(
|
126 |
+
chunk_size=DlConfig.CHUCK_SIZE_BYTE.value
|
127 |
+
):
|
128 |
+
progress_bar.update(len(chunk))
|
129 |
+
f.write(chunk)
|
130 |
+
sha1.update(chunk)
|
131 |
+
computed = sha1.hexdigest()
|
132 |
+
if self.sha1sum != computed:
|
133 |
+
self.status = DlStatus.ERR_SHA1SUM
|
134 |
+
raise RuntimeError(
|
135 |
+
f"sha1sum mismatch, computed {computed}, expected {self.sha1sum}"
|
136 |
+
)
|
137 |
+
progress_bar.close()
|
138 |
+
|
139 |
+
try:
|
140 |
+
r.raise_for_status()
|
141 |
+
except Exception as e:
|
142 |
+
self.status = DlStatus.ERR_NETWORK
|
143 |
+
raise RuntimeError(e)
|
144 |
+
|
145 |
+
# move from tmp -> dst
|
146 |
+
if is_zipfile(tmp_filename):
|
147 |
+
logger.info("unzip")
|
148 |
+
with ZipFile(tmp_filename) as zf:
|
149 |
+
zf.extractall(outdir)
|
150 |
+
else:
|
151 |
+
dst_file = outdir / self.data_group.value
|
152 |
+
dst_file.parent.mkdir(exist_ok=True, parents=True)
|
153 |
+
shutil.move(src=tmp_filename, dst=dst_file)
|
154 |
+
|
155 |
+
logger.info(f"Download {self.filename} -> {outdir}")
|
156 |
+
self.status = DlStatus.SUCCESS
|
157 |
+
|
158 |
+
# create a flag
|
159 |
+
flag.parent.mkdir(exist_ok=True)
|
160 |
+
flag.touch()
|
161 |
+
|
162 |
+
|
163 |
+
class DownloadManager:
|
164 |
+
def __init__(self, url_json: Path, out_rootdir: Path) -> None:
|
165 |
+
self.url_json = url_json
|
166 |
+
assert self.url_json.is_file(), f"{self.url_json} not found"
|
167 |
+
|
168 |
+
self.out_rootdir = out_rootdir
|
169 |
+
self.out_rootdir.mkdir(exist_ok=True)
|
170 |
+
|
171 |
+
with open(self.url_json, "r") as f:
|
172 |
+
data = json.load(f)
|
173 |
+
self._sequences = data.get("sequences", {})
|
174 |
+
assert len(
|
175 |
+
self._sequences
|
176 |
+
), "No sequence found. Please check the json file is correct."
|
177 |
+
self.__get_data_summary()
|
178 |
+
self._logs = {}
|
179 |
+
|
180 |
+
@property
|
181 |
+
def sequences(self) -> dict[str, any]:
|
182 |
+
return self._sequences
|
183 |
+
|
184 |
+
@property
|
185 |
+
def logfile(self) -> Path:
|
186 |
+
return self.out_rootdir / "download_summary.json"
|
187 |
+
|
188 |
+
def __get_data_summary(self):
|
189 |
+
missing = {x.name: {"count": 0, "sequences": []} for x in DataGroups}
|
190 |
+
for seq, dgs in self.sequences.items():
|
191 |
+
for dg in DataGroups:
|
192 |
+
if dg.name not in dgs:
|
193 |
+
missing[dg.name]["count"] += 1
|
194 |
+
missing[dg.name]["sequences"].append(seq)
|
195 |
+
fname = self.logfile.with_name("data_summary.json")
|
196 |
+
with open(fname, "w") as f:
|
197 |
+
json.dump(
|
198 |
+
{
|
199 |
+
"missing_files": missing,
|
200 |
+
"available_sequences": list(self.sequences.keys()),
|
201 |
+
},
|
202 |
+
f,
|
203 |
+
indent=2,
|
204 |
+
)
|
205 |
+
logger.info(f"save data summary to {fname}")
|
206 |
+
|
207 |
+
def __prepare(
|
208 |
+
self,
|
209 |
+
match_key: str,
|
210 |
+
selected_groups: list["DataGroups"],
|
211 |
+
) -> set["DataGroups"]:
|
212 |
+
selected_groups += [DataGroups.LICENSE, DataGroups.metadata_json]
|
213 |
+
selected_groups = set(selected_groups)
|
214 |
+
|
215 |
+
num_seqs = 0
|
216 |
+
total_gb = 0
|
217 |
+
self._logs = {}
|
218 |
+
|
219 |
+
for seq, dgs in self.sequences.items():
|
220 |
+
if match_key not in seq:
|
221 |
+
continue
|
222 |
+
|
223 |
+
num_seqs += 1
|
224 |
+
self._logs[seq] = {}
|
225 |
+
for dg in selected_groups:
|
226 |
+
if dg.name not in dgs:
|
227 |
+
self._logs[seq][dg.name] = DlStatus.WARN_NOTFOUND.value
|
228 |
+
else:
|
229 |
+
self._logs[seq][dg.name] = None
|
230 |
+
dl = DlLink(**{**dgs.get(dg.name, {}), "data_group": dg})
|
231 |
+
total_gb += dl.file_size_bytes / (2**30)
|
232 |
+
|
233 |
+
# populate confirmation msg
|
234 |
+
msg = "\t" + "\n\t".join([x.value for x in selected_groups])
|
235 |
+
free_disk_gb = shutil.disk_usage(self.out_rootdir).free / (2**30)
|
236 |
+
confirm = (
|
237 |
+
input(
|
238 |
+
f"Download summary\n"
|
239 |
+
f" Output rootdir: {self.out_rootdir}\n"
|
240 |
+
f" Number sequences: {num_seqs}\n"
|
241 |
+
f" Total memory (GB): {total_gb}\n"
|
242 |
+
f" Available free disk space (GB): {free_disk_gb}\n"
|
243 |
+
f" Selected data groups:\n{msg}\n"
|
244 |
+
f"Proceed: [y/n] "
|
245 |
+
).lower()
|
246 |
+
== "y"
|
247 |
+
)
|
248 |
+
if not confirm:
|
249 |
+
exit(1)
|
250 |
+
return selected_groups
|
251 |
+
|
252 |
+
def __logging(self, **kwargs) -> None:
|
253 |
+
self._logs.update(**kwargs)
|
254 |
+
|
255 |
+
with open(self.logfile, "w") as f:
|
256 |
+
json.dump(self._logs, f, indent=2)
|
257 |
+
|
258 |
+
def download(
|
259 |
+
self,
|
260 |
+
match_key: str,
|
261 |
+
selected_groups: list["DataGroups"],
|
262 |
+
ignore_existing: bool = True,
|
263 |
+
) -> None:
|
264 |
+
selected_groups = self.__prepare(match_key, selected_groups)
|
265 |
+
|
266 |
+
summary = {x.name: 0 for x in DlStatus}
|
267 |
+
for seq_name, dgs in self.sequences.items():
|
268 |
+
if match_key not in seq_name:
|
269 |
+
continue
|
270 |
+
|
271 |
+
outdir = self.out_rootdir / seq_name
|
272 |
+
for dg in selected_groups:
|
273 |
+
if dg.name not in dgs:
|
274 |
+
continue
|
275 |
+
|
276 |
+
dl = DlLink(**{**dgs[dg.name], "data_group": dg})
|
277 |
+
try:
|
278 |
+
dl.get(outdir, ignore_existing=ignore_existing)
|
279 |
+
except Exception as e:
|
280 |
+
logger.error(f"downloading failure:, {e}")
|
281 |
+
|
282 |
+
summary[dl.status.name] += 1
|
283 |
+
self._logs[dl.seq_name][dl.data_group.name] = dl.status.value
|
284 |
+
self.__logging()
|
285 |
+
|
286 |
+
self.__logging(download_summary=summary)
|
287 |
+
logger.info(f"Dataset download to {self.out_rootdir}")
|
288 |
+
logger.info(f"Brief download summary: {json.dumps(summary, indent=2)}")
|
289 |
+
logger.info(f"Detailed summary saved to {self.logfile}")
|
nymeria/handeye.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
import numpy as np
|
8 |
+
from loguru import logger
|
9 |
+
from projectaria_tools.core.sophus import SE3
|
10 |
+
|
11 |
+
|
12 |
+
class HandEyeSolver:
|
13 |
+
def __init__(self, smooth: bool, window: int, skip: int = 240, stride: int = 1):
|
14 |
+
self.stride = int(stride)
|
15 |
+
self.smooth = smooth
|
16 |
+
self.skip = int(skip)
|
17 |
+
self.window = int(window)
|
18 |
+
if self.window < 240:
|
19 |
+
self.smooth = False
|
20 |
+
|
21 |
+
def so3xR3(self, T_Wa_A: list[SE3], T_Wb_B: list[SE3]) -> SE3:
|
22 |
+
"""
|
23 |
+
\return T_A_B using so3xR3 SVD decomposition.
|
24 |
+
"""
|
25 |
+
assert len(T_Wa_A) == len(T_Wb_B)
|
26 |
+
|
27 |
+
N = len(T_Wa_A) - self.stride
|
28 |
+
se3_A1_A2 = [T_Wa_A[i].inverse() @ T_Wa_A[i + self.stride] for i in range(N)]
|
29 |
+
se3_B1_B2 = [T_Wb_B[i].inverse() @ T_Wb_B[i + self.stride] for i in range(N)]
|
30 |
+
|
31 |
+
# solve for R
|
32 |
+
log_A1_A2 = [x.rotation().log() for x in se3_A1_A2]
|
33 |
+
log_B1_B2 = [x.rotation().log() for x in se3_B1_B2]
|
34 |
+
A = np.stack(log_A1_A2, axis=-1).squeeze()
|
35 |
+
B = np.stack(log_B1_B2, axis=-1).squeeze()
|
36 |
+
logger.debug(f"{A.shape=}, {B.shape=}")
|
37 |
+
|
38 |
+
matrixU, S, matrixVh = np.linalg.svd(
|
39 |
+
B @ A.transpose(), full_matrices=True, compute_uv=True
|
40 |
+
)
|
41 |
+
logger.debug(f"{matrixU.shape=}, {S.shape=}, {matrixVh.shape=}")
|
42 |
+
|
43 |
+
RX = matrixVh @ matrixU.transpose()
|
44 |
+
if np.linalg.det(RX) < 0:
|
45 |
+
RX[2, :] = RX[2, :] * -1.0
|
46 |
+
|
47 |
+
# solve for t
|
48 |
+
jacobian = [x.rotation().to_matrix() - np.eye(3) for x in se3_A1_A2]
|
49 |
+
jacobian = np.concatenate(jacobian, axis=0)
|
50 |
+
assert jacobian.shape == (N * 3, 3)
|
51 |
+
logger.debug(f"{jacobian.shape=}")
|
52 |
+
residual = [
|
53 |
+
RX @ b.translation().reshape(3, 1) - a.translation().reshape(3, 1)
|
54 |
+
for a, b in zip(se3_A1_A2, se3_B1_B2)
|
55 |
+
]
|
56 |
+
residual = np.concatenate(residual, axis=0)
|
57 |
+
assert residual.shape == (N * 3, 1)
|
58 |
+
logger.debug(f"{residual.shape=}")
|
59 |
+
JTJ = jacobian.T @ jacobian
|
60 |
+
JTr = jacobian.T @ residual
|
61 |
+
tX = np.linalg.lstsq(JTJ, JTr, rcond=None)[0]
|
62 |
+
|
63 |
+
T_A_B = np.ndarray([3, 4])
|
64 |
+
T_A_B[:3, :3] = RX
|
65 |
+
T_A_B[:3, 3] = tX.squeeze()
|
66 |
+
logger.debug(f"{T_A_B=}\n")
|
67 |
+
T_A_B = SE3.from_matrix3x4(T_A_B)
|
68 |
+
return T_A_B
|
69 |
+
|
70 |
+
def __call__(self, T_Wa_A: list[SE3], T_Wb_B: list[SE3]) -> list[SE3]:
|
71 |
+
N = len(T_Wa_A)
|
72 |
+
assert N == len(T_Wb_B)
|
73 |
+
if self.window >= N or not self.smooth:
|
74 |
+
T_A_B = self.so3xR3(T_Wa_A, T_Wb_B)
|
75 |
+
return [T_A_B]
|
76 |
+
|
77 |
+
Ts_A_B = []
|
78 |
+
for i in range(0, N, self.skip):
|
79 |
+
istart = int(i - self.window / 2)
|
80 |
+
if istart < 0:
|
81 |
+
istart = 0
|
82 |
+
iend = istart + self.window
|
83 |
+
if iend >= N:
|
84 |
+
iend = -1
|
85 |
+
istart = N - self.window
|
86 |
+
|
87 |
+
t_wa_a = T_Wa_A[istart:iend]
|
88 |
+
t_wb_b = T_Wb_B[istart:iend]
|
89 |
+
T_A_B = self.so3xR3(t_wa_a, t_wb_b)
|
90 |
+
Ts_A_B.append(T_A_B)
|
91 |
+
return Ts_A_B
|
nymeria/narration_provider.py
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
from pathlib import Path
|
8 |
+
|
9 |
+
|
10 |
+
class NarrationProvider:
|
11 |
+
def __init__(self, rootdir: Path):
|
12 |
+
pass
|
nymeria/path_provider.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
from dataclasses import fields
|
8 |
+
from pathlib import Path
|
9 |
+
|
10 |
+
from nymeria.definitions import BodyFiles, MetaFiles, Subpaths, TextFiles
|
11 |
+
|
12 |
+
|
13 |
+
class SequencePathProvider:
|
14 |
+
"""
|
15 |
+
\brief Each sequence contains the following subfolders:
|
16 |
+
- recording_head
|
17 |
+
- recording_lwrist
|
18 |
+
- recording_rwrist
|
19 |
+
- recording_observer
|
20 |
+
- body
|
21 |
+
- narration
|
22 |
+
"""
|
23 |
+
|
24 |
+
def __init__(self, rootdir: Path) -> None:
|
25 |
+
assert rootdir.is_dir(), f"{rootdir=} not found"
|
26 |
+
self.rootdir = rootdir
|
27 |
+
self.license = rootdir / MetaFiles.license
|
28 |
+
self.metadata = rootdir / MetaFiles.metadata_json
|
29 |
+
self.body_paths = BodyFiles(
|
30 |
+
**{
|
31 |
+
f.name: str(rootdir / getattr(BodyFiles, f.name))
|
32 |
+
for f in fields(BodyFiles)
|
33 |
+
}
|
34 |
+
)
|
35 |
+
self.narration_paths = TextFiles(
|
36 |
+
**{
|
37 |
+
f.name: str(rootdir / getattr(TextFiles, f.name))
|
38 |
+
for f in fields(TextFiles)
|
39 |
+
}
|
40 |
+
)
|
41 |
+
|
42 |
+
self.recording_head: Path = rootdir / Subpaths.recording_head
|
43 |
+
self.recording_lwrist: Path = rootdir / Subpaths.recording_lwrist
|
44 |
+
self.recording_rwrist: Path = rootdir / Subpaths.recording_rwrist
|
45 |
+
self.recording_observer: Path = rootdir / Subpaths.recording_observer
|
46 |
+
|
47 |
+
def __repr__(self) -> str:
|
48 |
+
return (
|
49 |
+
f"SequencePaths(\n"
|
50 |
+
f" license={self.license},\n"
|
51 |
+
f" metadata={self.metadata},\n"
|
52 |
+
f" body_files={self.body_files},\n"
|
53 |
+
f" recording_head={self.recording_head},\n"
|
54 |
+
f" recording_observer={self.recording_observer},\n"
|
55 |
+
f" recording_lwrist={self.recording_lwrist},\n"
|
56 |
+
f" recording_rwrist={self.recording_rwrist}\n)"
|
57 |
+
)
|
nymeria/recording_data_provider.py
ADDED
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
from enum import Enum
|
8 |
+
from pathlib import Path
|
9 |
+
|
10 |
+
import numpy as np
|
11 |
+
from loguru import logger
|
12 |
+
|
13 |
+
from nymeria.definitions import Subpaths, VrsFiles
|
14 |
+
from projectaria_tools.core import data_provider, mps
|
15 |
+
from projectaria_tools.core.data_provider import VrsDataProvider
|
16 |
+
from projectaria_tools.core.mps import (
|
17 |
+
ClosedLoopTrajectoryPose,
|
18 |
+
MpsDataPathsProvider,
|
19 |
+
MpsDataProvider,
|
20 |
+
)
|
21 |
+
from projectaria_tools.core.sensor_data import (
|
22 |
+
ImageData,
|
23 |
+
ImageDataRecord,
|
24 |
+
TimeDomain,
|
25 |
+
TimeQueryOptions,
|
26 |
+
)
|
27 |
+
from projectaria_tools.core.stream_id import StreamId
|
28 |
+
|
29 |
+
|
30 |
+
class AriaStream(Enum):
|
31 |
+
camera_slam_left = "1201-1"
|
32 |
+
camera_slam_right = "1201-2"
|
33 |
+
camera_rgb = "214-1"
|
34 |
+
imu_right = "1202-1"
|
35 |
+
imu_left = "1202-2"
|
36 |
+
|
37 |
+
|
38 |
+
class RecordingPathProvider:
|
39 |
+
"""
|
40 |
+
\brief This class will not check of input recording path is valid
|
41 |
+
"""
|
42 |
+
|
43 |
+
def __init__(self, recording_path: Path):
|
44 |
+
self.recording_path: Path = recording_path
|
45 |
+
self.tag: str = recording_path.name
|
46 |
+
|
47 |
+
@property
|
48 |
+
def data_vrsfile(self) -> Path:
|
49 |
+
return self.recording_path / VrsFiles.data
|
50 |
+
|
51 |
+
@property
|
52 |
+
def motion_vrsfile(self) -> Path:
|
53 |
+
return self.recording_path / VrsFiles.motion
|
54 |
+
|
55 |
+
@property
|
56 |
+
def mps_path(self) -> MpsDataPathsProvider | None:
|
57 |
+
mps_path = self.recording_path / Subpaths.mps
|
58 |
+
if mps_path.is_dir():
|
59 |
+
return MpsDataPathsProvider(str(mps_path))
|
60 |
+
else:
|
61 |
+
return None
|
62 |
+
|
63 |
+
@property
|
64 |
+
def points_npz_cache(self) -> Path:
|
65 |
+
return self.recording_path / Subpaths.mps_slam / "semidense_points_cache.npz"
|
66 |
+
|
67 |
+
|
68 |
+
class RecordingDataProvider(RecordingPathProvider):
|
69 |
+
def __init__(self, recording_path: Path) -> None:
|
70 |
+
super().__init__(recording_path)
|
71 |
+
|
72 |
+
self._vrs_dp = None
|
73 |
+
self._mps_dp = None
|
74 |
+
if not self.recording_path.is_dir():
|
75 |
+
return
|
76 |
+
|
77 |
+
# load vrs
|
78 |
+
if self.data_vrsfile.is_file():
|
79 |
+
self._vrs_dp = data_provider.create_vrs_data_provider(
|
80 |
+
str(self.data_vrsfile)
|
81 |
+
)
|
82 |
+
elif self.motion_vrsfile.is_file():
|
83 |
+
self._vrs_dp = data_provider.create_vrs_data_provider(
|
84 |
+
str(self.motion_vrsfile)
|
85 |
+
)
|
86 |
+
|
87 |
+
# load mps
|
88 |
+
if self.mps_path is not None:
|
89 |
+
self._mps_dp = MpsDataProvider(self.mps_path.get_data_paths())
|
90 |
+
|
91 |
+
@property
|
92 |
+
def vrs_dp(self) -> VrsDataProvider | None:
|
93 |
+
return self._vrs_dp
|
94 |
+
|
95 |
+
@property
|
96 |
+
def mps_dp(self) -> MpsDataProvider | None:
|
97 |
+
return self._mps_dp
|
98 |
+
|
99 |
+
def get_global_timespan_ns(self) -> tuple[int, int]:
|
100 |
+
if self.vrs_dp is None:
|
101 |
+
raise RuntimeError(
|
102 |
+
f"require {self.data_vrsfile=} or {self.motion_vrsfile=}"
|
103 |
+
)
|
104 |
+
|
105 |
+
t_start = self.vrs_dp.get_first_time_ns_all_streams(TimeDomain.TIME_CODE)
|
106 |
+
t_end = self.vrs_dp.get_last_time_ns_all_streams(TimeDomain.TIME_CODE)
|
107 |
+
return t_start, t_end
|
108 |
+
|
109 |
+
@property
|
110 |
+
def has_pointcloud(self) -> bool:
|
111 |
+
if self.mps_dp is None or not self.mps_dp.has_semidense_point_cloud():
|
112 |
+
return False
|
113 |
+
else:
|
114 |
+
return True
|
115 |
+
|
116 |
+
def get_pointcloud(
|
117 |
+
self,
|
118 |
+
th_invdep: float = 0.0004,
|
119 |
+
th_dep: float = 0.02,
|
120 |
+
max_point_count: int = 50_000,
|
121 |
+
cache_to_npz: bool = False,
|
122 |
+
) -> np.ndarray:
|
123 |
+
assert self.has_pointcloud, "recording has no point cloud"
|
124 |
+
points = self.mps_dp.get_semidense_point_cloud()
|
125 |
+
|
126 |
+
points = mps.utils.filter_points_from_confidence(
|
127 |
+
raw_points=points, threshold_dep=th_dep, threshold_invdep=th_invdep
|
128 |
+
)
|
129 |
+
points = mps.utils.filter_points_from_count(
|
130 |
+
raw_points=points, max_point_count=max_point_count
|
131 |
+
)
|
132 |
+
|
133 |
+
points = np.array([x.position_world for x in points])
|
134 |
+
|
135 |
+
if cache_to_npz:
|
136 |
+
np.savez(
|
137 |
+
self.points_npz_cache,
|
138 |
+
points=points,
|
139 |
+
threshold_dep=th_dep,
|
140 |
+
threshold_invdep=th_invdep,
|
141 |
+
max_point_count=max_point_count,
|
142 |
+
)
|
143 |
+
return points
|
144 |
+
|
145 |
+
def get_pointcloud_cached(
|
146 |
+
self,
|
147 |
+
th_invdep: float = 0.0004,
|
148 |
+
th_dep: float = 0.02,
|
149 |
+
max_point_count: int = 50_000,
|
150 |
+
) -> np.ndarray:
|
151 |
+
assert self.has_pointcloud, "recording has no point cloud"
|
152 |
+
if self.points_npz_cache.is_file():
|
153 |
+
logger.info(f"load cached point cloud from {self.points_npz_cache}")
|
154 |
+
return np.load(self.points_npz_cache)["points"]
|
155 |
+
|
156 |
+
return self.get_pointcloud(cache_to_npz=True)
|
157 |
+
|
158 |
+
@property
|
159 |
+
def has_vrs(self) -> bool:
|
160 |
+
return self.vrs_dp is not None
|
161 |
+
|
162 |
+
@property
|
163 |
+
def has_rgb(self) -> bool:
|
164 |
+
return self.has_vrs and self.vrs_dp.check_stream_is_active(StreamId("214-1"))
|
165 |
+
|
166 |
+
def get_rgb_image(
|
167 |
+
self, t_ns: int, time_domain: TimeDomain = TimeDomain.TIME_CODE
|
168 |
+
) -> tuple[ImageData, ImageDataRecord, int]:
|
169 |
+
assert self.has_rgb, "recording has no rgb video"
|
170 |
+
assert time_domain in [
|
171 |
+
TimeDomain.DEVICE_TIME,
|
172 |
+
TimeDomain.TIME_CODE,
|
173 |
+
], "unsupported time domain"
|
174 |
+
|
175 |
+
if time_domain == TimeDomain.TIME_CODE:
|
176 |
+
t_ns_device = self.vrs_dp.convert_from_timecode_to_device_time_ns(
|
177 |
+
timecode_time_ns=t_ns
|
178 |
+
)
|
179 |
+
else:
|
180 |
+
t_ns_device = t_ns
|
181 |
+
|
182 |
+
image_data, image_meta = self.vrs_dp.get_image_data_by_time_ns(
|
183 |
+
StreamId("214-1"),
|
184 |
+
time_ns=t_ns_device,
|
185 |
+
time_domain=TimeDomain.DEVICE_TIME,
|
186 |
+
time_query_options=TimeQueryOptions.CLOSEST,
|
187 |
+
)
|
188 |
+
t_diff = t_ns_device - image_meta.capture_timestamp_ns
|
189 |
+
|
190 |
+
return image_data, image_meta, t_diff
|
191 |
+
|
192 |
+
@property
|
193 |
+
def has_pose(self) -> bool:
|
194 |
+
if self.mps_dp is None or not self.mps_dp.has_closed_loop_poses():
|
195 |
+
return False
|
196 |
+
else:
|
197 |
+
return True
|
198 |
+
|
199 |
+
def get_pose(
|
200 |
+
self, t_ns: int, time_domain: TimeDomain
|
201 |
+
) -> tuple[ClosedLoopTrajectoryPose, int]:
|
202 |
+
t_ns = int(t_ns)
|
203 |
+
assert self.has_pose, "recording has no closed loop trajectory"
|
204 |
+
assert time_domain in [
|
205 |
+
TimeDomain.DEVICE_TIME,
|
206 |
+
TimeDomain.TIME_CODE,
|
207 |
+
], "unsupported time domain"
|
208 |
+
|
209 |
+
if time_domain == TimeDomain.TIME_CODE:
|
210 |
+
assert self.vrs_dp, "require vrs for time domain mapping"
|
211 |
+
t_ns_device = self.vrs_dp.convert_from_timecode_to_device_time_ns(
|
212 |
+
timecode_time_ns=t_ns
|
213 |
+
)
|
214 |
+
|
215 |
+
else:
|
216 |
+
t_ns_device = t_ns
|
217 |
+
|
218 |
+
pose = self.mps_dp.get_closed_loop_pose(t_ns_device, TimeQueryOptions.CLOSEST)
|
219 |
+
t_diff = pose.tracking_timestamp.total_seconds() * 1e9 - t_ns_device
|
220 |
+
return pose, t_diff
|
221 |
+
|
222 |
+
def sample_trajectory_world_device(self, sample_fps: float = 1) -> np.ndarray:
|
223 |
+
assert self.has_pose, "recording has no closed loop trajectory"
|
224 |
+
assert self.has_vrs, "current implementation assume vrs is loaded."
|
225 |
+
t_start, t_end = self.get_global_timespan_ns()
|
226 |
+
t_start = self.vrs_dp.convert_from_timecode_to_device_time_ns(t_start)
|
227 |
+
t_end = self.vrs_dp.convert_from_timecode_to_device_time_ns(t_end)
|
228 |
+
|
229 |
+
dt = int(1e9 / sample_fps)
|
230 |
+
traj_world_device = []
|
231 |
+
for t_ns in range(t_start, t_end, dt):
|
232 |
+
pose = self.mps_dp.get_closed_loop_pose(t_ns, TimeQueryOptions.CLOSEST)
|
233 |
+
traj_world_device.append(
|
234 |
+
pose.transform_world_device.to_matrix().astype(np.float32)
|
235 |
+
)
|
236 |
+
|
237 |
+
traj_world_device = np.stack(traj_world_device, axis=0)
|
238 |
+
return traj_world_device
|
239 |
+
|
240 |
+
|
241 |
+
def create_recording_data_provider(
|
242 |
+
recording_path: Path,
|
243 |
+
) -> RecordingDataProvider | None:
|
244 |
+
if not recording_path.is_dir():
|
245 |
+
return None
|
246 |
+
|
247 |
+
dp = RecordingDataProvider(recording_path)
|
248 |
+
if dp.vrs_dp is None and dp.mps_dp is None:
|
249 |
+
return None
|
250 |
+
else:
|
251 |
+
return dp
|
nymeria/sequence_attributes.py
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
from dataclasses import dataclass
|
8 |
+
|
9 |
+
|
10 |
+
@dataclass
|
11 |
+
class SequenceAttributes:
|
12 |
+
date: str
|
13 |
+
session_id: str
|
14 |
+
fake_name: str
|
15 |
+
act_id: str
|
16 |
+
uid: str
|
17 |
+
|
18 |
+
location: str
|
19 |
+
script: str
|
20 |
+
action_duration_sec: float = -1
|
21 |
+
|
22 |
+
has_two_participants: bool = False
|
23 |
+
pt2: str = None
|
24 |
+
body_motion: bool = False
|
25 |
+
|
26 |
+
head_data: bool = False
|
27 |
+
head_slam: bool = False
|
28 |
+
head_trajectory_m: float = None
|
29 |
+
head_duration_sec: float = None
|
30 |
+
head_general_gaze: bool = False
|
31 |
+
head_personalized_gaze: bool = False
|
32 |
+
|
33 |
+
left_wrist_data: bool = False
|
34 |
+
left_wrist_slam: bool = False
|
35 |
+
left_wrist_trajectory_m: float = None
|
36 |
+
left_wrist_duration_sec: float = None
|
37 |
+
|
38 |
+
right_wrist_data: bool = False
|
39 |
+
right_wrist_slam: bool = False
|
40 |
+
right_wrist_trajectory_m: float = None
|
41 |
+
right_wrist_duration_sec: float = None
|
42 |
+
|
43 |
+
observer_data: bool = False
|
44 |
+
observer_slam: bool = False
|
45 |
+
observer_general_gaze: bool = False
|
46 |
+
observer_personalized_gaze: bool = False
|
47 |
+
observer_trajectory_m: float = None
|
48 |
+
observer_duration_sec: float = None
|
49 |
+
|
50 |
+
timesync: bool = False
|
51 |
+
|
52 |
+
motion_narration: bool = False
|
53 |
+
atomic_action: bool = False
|
54 |
+
activity_summarization: bool = False
|
55 |
+
|
56 |
+
participant_gender: str = None
|
57 |
+
participant_height_cm: float = -1
|
58 |
+
participant_weight_kg: float = -1
|
59 |
+
participant_bmi: float = -1
|
60 |
+
participant_age_group: str = None
|
61 |
+
participant_ethnicity: str = None
|
62 |
+
participant_xsens_suit_size: str = None
|
nymeria/xsens_constants.py
ADDED
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
|
8 |
+
class XSensConstants:
|
9 |
+
"""
|
10 |
+
\brief Transformations segment_tXYZ and segment_qWXYZ are defined as
|
11 |
+
from XSens segment/part coordinates to XSens world coordinates.
|
12 |
+
See XSens manual for details.
|
13 |
+
"""
|
14 |
+
|
15 |
+
num_parts: int = 23
|
16 |
+
num_bones: int = 22
|
17 |
+
num_sensors: int = 17
|
18 |
+
k_timestamps_us: str = "timestamps_us"
|
19 |
+
k_frame_count: str = "frameCount"
|
20 |
+
k_framerate: str = "frameRate"
|
21 |
+
k_part_tXYZ: str = "segment_tXYZ"
|
22 |
+
k_part_qWXYZ: str = "segment_qWXYZ"
|
23 |
+
k_ipose_part_tXYZ: str = "identity_segment_tXYZ"
|
24 |
+
k_ipose_part_qWXYZ: str = "identity_segment_qWXYZ"
|
25 |
+
k_tpose_part_tXYZ: str = "tpose_segment_tXYZ"
|
26 |
+
k_tpose_part_qWXYZ: str = "tpose_segment_qWXYZ"
|
27 |
+
k_foot_contacts: str = "foot_contacts"
|
28 |
+
k_sensor_tXYZ: str = "sensor_tXYZ"
|
29 |
+
k_sensor_qWXYZ: str = "sensor_qWXYZ"
|
30 |
+
part_names = [
|
31 |
+
"Pelvis",
|
32 |
+
"L5",
|
33 |
+
"L3",
|
34 |
+
"T12",
|
35 |
+
"T8",
|
36 |
+
"Neck",
|
37 |
+
"Head",
|
38 |
+
"R_Shoulder",
|
39 |
+
"R_UpperArm",
|
40 |
+
"R_Forearm",
|
41 |
+
"R_Hand",
|
42 |
+
"L_Shoulder",
|
43 |
+
"L_UpperArm",
|
44 |
+
"L_Forearm",
|
45 |
+
"L_Hand",
|
46 |
+
"R_UpperLeg",
|
47 |
+
"R_LowerLeg",
|
48 |
+
"R_Foot",
|
49 |
+
"R_Toe",
|
50 |
+
"L_UpperLeg",
|
51 |
+
"L_LowerLeg",
|
52 |
+
"L_Foot",
|
53 |
+
"L_Toe",
|
54 |
+
] # num = 23
|
55 |
+
kintree_parents: list[int] = [
|
56 |
+
-1,
|
57 |
+
0,
|
58 |
+
1,
|
59 |
+
2,
|
60 |
+
3,
|
61 |
+
4,
|
62 |
+
5,
|
63 |
+
4,
|
64 |
+
7,
|
65 |
+
8,
|
66 |
+
9,
|
67 |
+
4,
|
68 |
+
11,
|
69 |
+
12,
|
70 |
+
13,
|
71 |
+
0,
|
72 |
+
15,
|
73 |
+
16,
|
74 |
+
17,
|
75 |
+
0,
|
76 |
+
19,
|
77 |
+
20,
|
78 |
+
21,
|
79 |
+
] # num = 23
|
80 |
+
sensor_names: list[int] = [
|
81 |
+
"Pelvis",
|
82 |
+
"T8",
|
83 |
+
"Head",
|
84 |
+
"RightShoulder",
|
85 |
+
"RightUpperArm",
|
86 |
+
"RightForeArm",
|
87 |
+
"RightHand",
|
88 |
+
"LeftShoulder",
|
89 |
+
"LeftUpperArm",
|
90 |
+
"LeftForeArm",
|
91 |
+
"LeftHand",
|
92 |
+
"RightUpperLeg",
|
93 |
+
"RightLowerLeg",
|
94 |
+
"RightFoot",
|
95 |
+
"LeftUpperLeg",
|
96 |
+
"LeftLowerLeg",
|
97 |
+
"LeftFoot",
|
98 |
+
] # num = 17
|
setup.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
from setuptools import find_packages, setup
|
8 |
+
|
9 |
+
setup(
|
10 |
+
name="nymeria",
|
11 |
+
version="0.0.1",
|
12 |
+
packages=find_packages(),
|
13 |
+
author="Lingni Ma",
|
14 |
+
author_email="lingni.ma@meta.com",
|
15 |
+
description="The official repo to support the Nymeria dataset",
|
16 |
+
python_requires=">=3.10",
|
17 |
+
install_requires=["click", "requests", "tqdm"],
|
18 |
+
)
|
viewer.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
+
# All rights reserved.
|
3 |
+
#
|
4 |
+
# This source code is licensed under the license found in the
|
5 |
+
# LICENSE file in the root directory of this source tree.
|
6 |
+
|
7 |
+
import sys
|
8 |
+
from pathlib import Path
|
9 |
+
|
10 |
+
import click
|
11 |
+
import rerun as rr
|
12 |
+
from loguru import logger
|
13 |
+
from nymeria.data_provider import NymeriaDataProvider
|
14 |
+
from nymeria.data_viewer import NymeriaViewer
|
15 |
+
|
16 |
+
|
17 |
+
@click.command()
|
18 |
+
@click.option(
|
19 |
+
"-i", "sequence_dir", type=Path, required=True, help="The directory of sequence "
|
20 |
+
)
|
21 |
+
@click.option(
|
22 |
+
"-s", "save_rrd", is_flag=True, default=False, help="Save rerun into logfile"
|
23 |
+
)
|
24 |
+
def main(sequence_dir: Path, save_rrd: bool) -> None:
|
25 |
+
logger.remove()
|
26 |
+
logger.add(
|
27 |
+
sys.stdout,
|
28 |
+
colorize=True,
|
29 |
+
format="<level>{level: <7}</level> <blue>{name}.py:</blue><green>{function}</green><yellow>:{line}</yellow> {message}",
|
30 |
+
level="INFO",
|
31 |
+
)
|
32 |
+
|
33 |
+
# See NymeriaDataProviderConfig for configuration
|
34 |
+
nymeria_dp = NymeriaDataProvider(sequence_rootdir=sequence_dir, load_wrist=True)
|
35 |
+
|
36 |
+
output_rrd: Path = sequence_dir / "nymeria.rrd" if save_rrd else None
|
37 |
+
viewer = NymeriaViewer(output_rrd=output_rrd)
|
38 |
+
viewer(nymeria_dp)
|
39 |
+
if save_rrd:
|
40 |
+
logger.info(f"Save visualization to {output_rrd=}")
|
41 |
+
|
42 |
+
rr.disconnect()
|
43 |
+
|
44 |
+
|
45 |
+
if __name__ == "__main__":
|
46 |
+
main()
|