fishcharlie
commited on
Commit
•
c5b0eb2
1
Parent(s):
04dc265
Initial commit
Browse files- .gitattributes +2 -0
- .gitignore +1 -0
- LICENSE +7 -0
- README.md +32 -0
- data/faa/faa_xml_data.csv +3 -0
- data_processing_notebook.ipynb +104 -0
.gitattributes
CHANGED
@@ -57,3 +57,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
57 |
# Video files - compressed
|
58 |
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
59 |
*.webm filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
57 |
# Video files - compressed
|
58 |
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
59 |
*.webm filter=lfs diff=lfs merge=lfs -text
|
60 |
+
|
61 |
+
*.csv filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
s3_data
|
LICENSE
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Copyright 2024 Charlie Fish
|
2 |
+
|
3 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
4 |
+
|
5 |
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
6 |
+
|
7 |
+
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
README.md
CHANGED
@@ -1,3 +1,35 @@
|
|
1 |
---
|
2 |
license: mit
|
3 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
license: mit
|
3 |
---
|
4 |
+
|
5 |
+
# AirportStatusBot Data
|
6 |
+
|
7 |
+
This Hugging Face Dataset includes data released by my [AirportStatusBot](https://github.com/fishcharlie/AirportStatusBot).
|
8 |
+
|
9 |
+
## Data
|
10 |
+
|
11 |
+
#### `data/faa/faa_xml_data.csv`
|
12 |
+
|
13 |
+
This is a CSV file that contains the [raw data from the FAA](https://nasstatus.faa.gov/api/airport-status-information) that AirportStatusBot has captured. There is no header in this file, and each line/row is a string of XML data directly from the FAA.
|
14 |
+
|
15 |
+
There are no duplicates in this file, and they are sorted by the `Update_Time` field in the XML data.
|
16 |
+
|
17 |
+
## Other Files
|
18 |
+
|
19 |
+
#### `data_processing_notebook.ipynb`
|
20 |
+
|
21 |
+
This is a Jupyter Notebook that I used to process the raw XML data into a CSV file. If you have access to the RAW data that AirportStatusBot has captured, you can use this notebook to process the data into the CSV file (`data/faa/faa_xml_data.csv`).
|
22 |
+
|
23 |
+
## Other Notes
|
24 |
+
|
25 |
+
- There is no guarantee that this data is complete. There may be missing data. However I'm fairly confident that the data provided is as accurate as the FAA provides.
|
26 |
+
- The date range of data included is currently from 2023-12-17 to 2024-12-24. If you would like to see data more recent than this, please feel free to reach out to me.
|
27 |
+
- I would **love** to hear about how you use this data. Please reach out to me and let me know how you're using it!
|
28 |
+
|
29 |
+
## Contact
|
30 |
+
|
31 |
+
If you have any questions or concerns, please feel free to [contact me](https://charlie.fish/contact).
|
32 |
+
|
33 |
+
## License
|
34 |
+
|
35 |
+
[MIT](LICENSE)
|
data/faa/faa_xml_data.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5faa8a23fc1f04a069da5de3b8e9df7c33bc2595973ad865535aed36952946fd
|
3 |
+
size 37414794
|
data_processing_notebook.ipynb
ADDED
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"metadata": {},
|
6 |
+
"source": [
|
7 |
+
"# Data Processing Notebook\n",
|
8 |
+
"\n",
|
9 |
+
"The purpose of this notebook is to help me process the RAW S3 data into the right format to upload to Hugging Face."
|
10 |
+
]
|
11 |
+
},
|
12 |
+
{
|
13 |
+
"cell_type": "code",
|
14 |
+
"execution_count": 1,
|
15 |
+
"metadata": {},
|
16 |
+
"outputs": [],
|
17 |
+
"source": [
|
18 |
+
"# Import dependencies\n",
|
19 |
+
"import os\n",
|
20 |
+
"from datetime import datetime"
|
21 |
+
]
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"cell_type": "code",
|
25 |
+
"execution_count": 2,
|
26 |
+
"metadata": {},
|
27 |
+
"outputs": [],
|
28 |
+
"source": [
|
29 |
+
"# Make directory called `data/faa` if it doesn't already exist.\n",
|
30 |
+
"if not os.path.exists('data/faa'):\n",
|
31 |
+
"\tos.makedirs('data/faa')"
|
32 |
+
]
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"cell_type": "code",
|
36 |
+
"execution_count": 3,
|
37 |
+
"metadata": {},
|
38 |
+
"outputs": [],
|
39 |
+
"source": [
|
40 |
+
"# For each file in `s3_data` directory\n",
|
41 |
+
"xml_data_strings = []\n",
|
42 |
+
"for file in os.listdir('s3_data'):\n",
|
43 |
+
"\t# If file doesn't end in `.txt`, continue\n",
|
44 |
+
"\tif not file.endswith('.txt'):\n",
|
45 |
+
"\t\tcontinue\n",
|
46 |
+
"\t# Open the file in read mode\n",
|
47 |
+
"\twith open(f's3_data/{file}', 'r') as f:\n",
|
48 |
+
"\t\t# Read the file content\n",
|
49 |
+
"\t\tcontent = f.read()\n",
|
50 |
+
"\n",
|
51 |
+
"\t\t# Split the content by the separator\n",
|
52 |
+
"\t\tparts = content.split('<><><><><>')\n",
|
53 |
+
"\n",
|
54 |
+
"\t\txml_data = ''\n",
|
55 |
+
"\t\t# Get the XML data as a string\n",
|
56 |
+
"\t\tif len(parts) == 1:\n",
|
57 |
+
"\t\t\txml_data_strings.append(parts[0].split('---')[1].strip())\n",
|
58 |
+
"\t\telse:\n",
|
59 |
+
"\t\t\tfor part in parts:\n",
|
60 |
+
"\t\t\t\txml_data_strings.append(part.split('---')[1].strip())\n",
|
61 |
+
"\n",
|
62 |
+
"# Delete any duplicate strings in the xml_data_strings list\n",
|
63 |
+
"xml_data_strings = list(set(xml_data_strings))\n",
|
64 |
+
"\n",
|
65 |
+
"# Sort the XML data strings by the `<AIRPORT_STATUS_INFORMATION><Update_Time>Wed Jul 10 17:18:22 2024 GMT</Update_Time>` date\n",
|
66 |
+
"def extract_update_time(xml_string):\n",
|
67 |
+
"\tstart = xml_string.find(\"<Update_Time>\") + len(\"<Update_Time>\")\n",
|
68 |
+
"\tend = xml_string.find(\"</Update_Time>\")\n",
|
69 |
+
"\tdate_str = xml_string[start:end].strip()\n",
|
70 |
+
"\treturn datetime.strptime(date_str, \"%a %b %d %H:%M:%S %Y %Z\")\n",
|
71 |
+
"xml_data_strings.sort(key=extract_update_time)\n",
|
72 |
+
"\n",
|
73 |
+
"# Write the XML data to a single file in the `data/faa` directory with each XML data string on a new line\n",
|
74 |
+
"with open('data/faa/faa_xml_data.csv', 'w') as f:\n",
|
75 |
+
"\tfor xml_data in xml_data_strings:\n",
|
76 |
+
"\t\tf.write(xml_data + '\\n')\n",
|
77 |
+
"\n",
|
78 |
+
"# Delete the xml_data_strings list to free up memory\n",
|
79 |
+
"del xml_data_strings"
|
80 |
+
]
|
81 |
+
}
|
82 |
+
],
|
83 |
+
"metadata": {
|
84 |
+
"kernelspec": {
|
85 |
+
"display_name": "Python 3",
|
86 |
+
"language": "python",
|
87 |
+
"name": "python3"
|
88 |
+
},
|
89 |
+
"language_info": {
|
90 |
+
"codemirror_mode": {
|
91 |
+
"name": "ipython",
|
92 |
+
"version": 3
|
93 |
+
},
|
94 |
+
"file_extension": ".py",
|
95 |
+
"mimetype": "text/x-python",
|
96 |
+
"name": "python",
|
97 |
+
"nbconvert_exporter": "python",
|
98 |
+
"pygments_lexer": "ipython3",
|
99 |
+
"version": "3.13.0"
|
100 |
+
}
|
101 |
+
},
|
102 |
+
"nbformat": 4,
|
103 |
+
"nbformat_minor": 2
|
104 |
+
}
|