giswqs commited on
Commit
9ad0e2d
β€’
1 Parent(s): 15bd62d

Add timelapse app

Browse files
This view is limited to 50 files because it contains too many changes. Β  See raw diff
.gitignore ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+ # *.html
6
+ private/
7
+ .vscode/
8
+
9
+ # C extensions
10
+ *.so
11
+
12
+ # Distribution / packaging
13
+ .Python
14
+ build/
15
+ develop-eggs/
16
+ dist/
17
+ downloads/
18
+ eggs/
19
+ .eggs/
20
+ lib/
21
+ lib64/
22
+ parts/
23
+ sdist/
24
+ var/
25
+ wheels/
26
+ pip-wheel-metadata/
27
+ share/python-wheels/
28
+ *.egg-info/
29
+ .installed.cfg
30
+ *.egg
31
+ MANIFEST
32
+
33
+ # PyInstaller
34
+ # Usually these files are written by a python script from a template
35
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
36
+ *.manifest
37
+ *.spec
38
+
39
+ # Installer logs
40
+ pip-log.txt
41
+ pip-delete-this-directory.txt
42
+
43
+ # Unit test / coverage reports
44
+ htmlcov/
45
+ .tox/
46
+ .nox/
47
+ .coverage
48
+ .coverage.*
49
+ .cache
50
+ nosetests.xml
51
+ coverage.xml
52
+ *.cover
53
+ *.py,cover
54
+ .hypothesis/
55
+ .pytest_cache/
56
+
57
+ # Translations
58
+ *.mo
59
+ *.pot
60
+
61
+ # Django stuff:
62
+ *.log
63
+ local_settings.py
64
+ db.sqlite3
65
+ db.sqlite3-journal
66
+
67
+ # Flask stuff:
68
+ instance/
69
+ .webassets-cache
70
+
71
+ # Scrapy stuff:
72
+ .scrapy
73
+
74
+ # Sphinx documentation
75
+ docs/_build/
76
+
77
+ # PyBuilder
78
+ target/
79
+
80
+ # Jupyter Notebook
81
+ .ipynb_checkpoints
82
+
83
+ # IPython
84
+ profile_default/
85
+ ipython_config.py
86
+
87
+ # pyenv
88
+ .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98
+ __pypackages__/
99
+
100
+ # Celery stuff
101
+ celerybeat-schedule
102
+ celerybeat.pid
103
+
104
+ # SageMath parsed files
105
+ *.sage.py
106
+
107
+ # Environments
108
+ .env
109
+ .venv
110
+ env/
111
+ venv/
112
+ ENV/
113
+ env.bak/
114
+ venv.bak/
115
+
116
+ # Spyder project settings
117
+ .spyderproject
118
+ .spyproject
119
+
120
+ # Rope project settings
121
+ .ropeproject
122
+
123
+ # mkdocs documentation
124
+ /site
125
+
126
+ # mypy
127
+ .mypy_cache/
128
+ .dmypy.json
129
+ dmypy.json
130
+
131
+ # Pyre type checker
132
+ .pyre/
Home.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
+
4
+ st.set_page_config(layout="wide")
5
+
6
+ st.sidebar.title("About")
7
+ st.sidebar.info(
8
+ """
9
+ Web App URL: <https://geospatial.streamlitapp.com>
10
+ GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
11
+ """
12
+ )
13
+
14
+ st.sidebar.title("Contact")
15
+ st.sidebar.info(
16
+ """
17
+ Qiusheng Wu: <https://wetlands.io>
18
+ [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/c/QiushengWu) | [LinkedIn](https://www.linkedin.com/in/qiushengwu)
19
+ """
20
+ )
21
+
22
+ st.sidebar.title("Support")
23
+ st.sidebar.info(
24
+ """
25
+ If you want to reward my work, I'd love a cup of coffee from you. Thanks!
26
+ [buymeacoffee.com/giswqs](http://buymeacoffee.com/giswqs)
27
+ """
28
+ )
29
+
30
+
31
+ st.title("Streamlit for Geospatial Applications")
32
+
33
+ st.markdown(
34
+ """
35
+ This multi-page web app demonstrates various interactive web apps created using [streamlit](https://streamlit.io) and open-source mapping libraries,
36
+ such as [leafmap](https://leafmap.org), [geemap](https://geemap.org), [pydeck](https://deckgl.readthedocs.io), and [kepler.gl](https://docs.kepler.gl/docs/keplergl-jupyter).
37
+ This is an open-source project and you are very welcome to contribute your comments, questions, resources, and apps as [issues](https://github.com/giswqs/streamlit-geospatial/issues) or
38
+ [pull requests](https://github.com/giswqs/streamlit-geospatial/pulls) to the [GitHub repository](https://github.com/giswqs/streamlit-geospatial).
39
+
40
+ """
41
+ )
42
+
43
+ st.info("Click on the left sidebar menu to navigate to the different apps.")
44
+
45
+ st.subheader("Timelapse of Satellite Imagery")
46
+ st.markdown(
47
+ """
48
+ The following timelapse animations were created using the Timelapse web app. Click `Timelapse` on the left sidebar menu to create your own timelapse for any location around the globe.
49
+ """
50
+ )
51
+
52
+ row1_col1, row1_col2 = st.columns(2)
53
+ with row1_col1:
54
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/spain.gif")
55
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/las_vegas.gif")
56
+
57
+ with row1_col2:
58
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/goes.gif")
59
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/fire.gif")
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2021 Qiusheng Wu
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
Procfile ADDED
@@ -0,0 +1 @@
 
1
+ web: sh setup.sh && streamlit run Home.py
app-bk.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from multiapp import MultiApp
3
+ from apps import (
4
+ basemaps,
5
+ census,
6
+ cesium,
7
+ deck,
8
+ device_loc,
9
+ gee,
10
+ gee_datasets,
11
+ heatmap,
12
+ home,
13
+ housing,
14
+ # hurricane,
15
+ plotly_maps,
16
+ raster,
17
+ timelapse,
18
+ vector,
19
+ wms,
20
+ xy,
21
+ )
22
+
23
+ st.set_page_config(layout="wide")
24
+
25
+
26
+ apps = MultiApp()
27
+
28
+ # Add all your application here
29
+
30
+ apps.add_app("Home", home.app)
31
+ apps.add_app("Create Timelapse", timelapse.app)
32
+ # apps.add_app("Hurricane Mapping", hurricane.app)
33
+ apps.add_app("U.S. Real Estate Data", housing.app)
34
+ apps.add_app("U.S. Census Data", census.app)
35
+ apps.add_app("Visualize Raster Data", raster.app)
36
+ apps.add_app("Visualize Vector Data", vector.app)
37
+ apps.add_app("Search Basemaps", basemaps.app)
38
+ apps.add_app("Pydeck Gallery", deck.app)
39
+ apps.add_app("Heatmaps", heatmap.app)
40
+ apps.add_app("Add Points from XY", xy.app)
41
+ apps.add_app("Add Web Map Service (WMS)", wms.app)
42
+ apps.add_app("Google Earth Engine (GEE)", gee.app)
43
+ apps.add_app("Awesome GEE Community Datasets", gee_datasets.app)
44
+ apps.add_app("Geolocation", device_loc.app)
45
+ apps.add_app("Cesium 3D Map", cesium.app)
46
+ apps.add_app("Plotly", plotly_maps.app)
47
+
48
+ # The main app
49
+ apps.run()
app.py CHANGED
@@ -1,4 +1,50 @@
1
  import streamlit as st
 
2
 
3
- x = st.slider('Select a value')
4
- st.write(x, 'squared is', x * x)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
 
4
+ st.set_page_config(layout="wide")
5
+
6
+ st.sidebar.title("About")
7
+ st.sidebar.info(
8
+ """
9
+ Web App URL: <https://geospatial.streamlitapp.com>
10
+ GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
11
+ """
12
+ )
13
+
14
+ st.sidebar.title("Contact")
15
+ st.sidebar.info(
16
+ """
17
+ Qiusheng Wu: <https://wetlands.io>
18
+ [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/c/QiushengWu) | [LinkedIn](https://www.linkedin.com/in/qiushengwu)
19
+ """
20
+ )
21
+
22
+ st.title("Streamlit for Geospatial Applications")
23
+
24
+ st.markdown(
25
+ """
26
+ This multi-page web app demonstrates various interactive web apps created using [streamlit](https://streamlit.io) and open-source mapping libraries,
27
+ such as [leafmap](https://leafmap.org), [geemap](https://geemap.org), [pydeck](https://deckgl.readthedocs.io), and [kepler.gl](https://docs.kepler.gl/docs/keplergl-jupyter).
28
+ This is an open-source project and you are very welcome to contribute your comments, questions, resources, and apps as [issues](https://github.com/giswqs/streamlit-geospatial/issues) or
29
+ [pull requests](https://github.com/giswqs/streamlit-geospatial/pulls) to the [GitHub repository](https://github.com/giswqs/streamlit-geospatial).
30
+
31
+ """
32
+ )
33
+
34
+ st.info("Click on the left sidebar menu to navigate to the different apps.")
35
+
36
+ st.subheader("Timelapse of Satellite Imagery")
37
+ st.markdown(
38
+ """
39
+ The following timelapse animations were created using the Timelapse web app. Click `Timelapse` on the left sidebar menu to create your own timelapse for any location around the globe.
40
+ """
41
+ )
42
+
43
+ row1_col1, row1_col2 = st.columns(2)
44
+ with row1_col1:
45
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/spain.gif")
46
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/las_vegas.gif")
47
+
48
+ with row1_col2:
49
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/goes.gif")
50
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/fire.gif")
apps/basemaps.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
+
4
+
5
+ def app():
6
+ st.title("Searching Basemaps")
7
+ st.markdown(
8
+ """
9
+ This app is a demonstration of searching and loading basemaps from [xyzservices](https://github.com/geopandas/xyzservices) and [Quick Map Services (QMS)](https://github.com/nextgis/quickmapservices). Selecting from 1000+ basemaps with a few clicks.
10
+ """
11
+ )
12
+
13
+ with st.expander("See demo"):
14
+ st.image("https://i.imgur.com/0SkUhZh.gif")
15
+
16
+ row1_col1, row1_col2 = st.columns([3, 1])
17
+ width = 800
18
+ height = 600
19
+ tiles = None
20
+
21
+ with row1_col2:
22
+
23
+ checkbox = st.checkbox("Search Quick Map Services (QMS)")
24
+ keyword = st.text_input("Enter a keyword to search and press Enter:")
25
+ empty = st.empty()
26
+
27
+ if keyword:
28
+ options = leafmap.search_xyz_services(keyword=keyword)
29
+ if checkbox:
30
+ qms = leafmap.search_qms(keyword=keyword)
31
+ if qms is not None:
32
+ options = options + qms
33
+
34
+ tiles = empty.multiselect(
35
+ "Select XYZ tiles to add to the map:", options)
36
+
37
+ with row1_col1:
38
+ m = leafmap.Map()
39
+
40
+ if tiles is not None:
41
+ for tile in tiles:
42
+ m.add_xyz_service(tile)
43
+
44
+ m.to_streamlit(width, height)
apps/census.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
+
4
+
5
+ def app():
6
+ st.title("Using U.S. Census Data")
7
+ st.markdown(
8
+ """
9
+ This app is a demonstration of using the [U.S. Census Bureau](https://www.census.gov/) TIGERweb Web Map Service (WMS). A complete list of WMS layers can be found [here](https://tigerweb.geo.census.gov/tigerwebmain/TIGERweb_wms.html).
10
+ """
11
+ )
12
+
13
+ if "first_index" not in st.session_state:
14
+ st.session_state["first_index"] = 60
15
+ else:
16
+ st.session_state["first_index"] = 0
17
+
18
+ row1_col1, row1_col2 = st.columns([3, 1])
19
+ width = 800
20
+ height = 600
21
+
22
+ census_dict = leafmap.get_census_dict()
23
+ with row1_col2:
24
+
25
+ wms = st.selectbox("Select a WMS", list(census_dict.keys()), index=11)
26
+ layer = st.selectbox(
27
+ "Select a layer",
28
+ census_dict[wms]["layers"],
29
+ index=st.session_state["first_index"],
30
+ )
31
+
32
+ with row1_col1:
33
+ m = leafmap.Map()
34
+ m.add_census_data(wms, layer)
35
+ m.to_streamlit(width, height)
apps/cesium.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
1
+ import leafmap
2
+ import streamlit as st
3
+
4
+
5
+ def app():
6
+ st.title("Cesium 3D Map")
7
+ html = "data/html/sfo_buildings.html"
8
+ leafmap.cesium_to_streamlit(html, height=800)
apps/deck.py ADDED
@@ -0,0 +1,178 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ import pydeck as pdk
4
+ import pandas as pd
5
+
6
+
7
+ def globe_view():
8
+
9
+ """
10
+ GlobeView
11
+ =========
12
+
13
+ Over 33,000 power plants of the world plotted by their production capacity (given by height)
14
+ and fuel type (green if renewable) on an experimental deck.gl GlobeView.
15
+ """
16
+
17
+ COUNTRIES = "https://d2ad6b4ur7yvpq.cloudfront.net/naturalearth-3.3.0/ne_50m_admin_0_scale_rank.geojson"
18
+ POWER_PLANTS = "https://raw.githubusercontent.com/ajduberstein/geo_datasets/master/global_power_plant_database.csv"
19
+
20
+ df = pd.read_csv(POWER_PLANTS)
21
+
22
+ def is_green(fuel_type):
23
+ """Return a green RGB value if a facility uses a renewable fuel type"""
24
+ if fuel_type.lower() in (
25
+ "nuclear",
26
+ "water",
27
+ "wind",
28
+ "hydro",
29
+ "biomass",
30
+ "solar",
31
+ "geothermal",
32
+ ):
33
+ return [10, 230, 120]
34
+ return [230, 158, 10]
35
+
36
+ df["color"] = df["primary_fuel"].apply(is_green)
37
+
38
+ view_state = pdk.ViewState(latitude=51.47, longitude=0.45, zoom=2, min_zoom=2)
39
+
40
+ # Set height and width variables
41
+ view = pdk.View(type="_GlobeView", controller=True, width=1000, height=700)
42
+
43
+ layers = [
44
+ pdk.Layer(
45
+ "GeoJsonLayer",
46
+ id="base-map",
47
+ data=COUNTRIES,
48
+ stroked=False,
49
+ filled=True,
50
+ get_fill_color=[200, 200, 200],
51
+ ),
52
+ pdk.Layer(
53
+ "ColumnLayer",
54
+ id="power-plant",
55
+ data=df,
56
+ get_elevation="capacity_mw",
57
+ get_position=["longitude", "latitude"],
58
+ elevation_scale=100,
59
+ pickable=True,
60
+ auto_highlight=True,
61
+ radius=20000,
62
+ get_fill_color="color",
63
+ ),
64
+ ]
65
+
66
+ r = pdk.Deck(
67
+ views=[view],
68
+ initial_view_state=view_state,
69
+ tooltip={"text": "{name}, {primary_fuel} plant, {country}"},
70
+ layers=layers,
71
+ # Note that this must be set for the globe to be opaque
72
+ parameters={"cull": True},
73
+ )
74
+
75
+ return r
76
+
77
+
78
+ def geojson_layer():
79
+
80
+ """
81
+ GeoJsonLayer
82
+ ===========
83
+
84
+ Property values in Vancouver, Canada, adapted from the deck.gl example pages. Input data is in a GeoJSON format.
85
+ """
86
+
87
+ DATA_URL = "https://raw.githubusercontent.com/visgl/deck.gl-data/master/examples/geojson/vancouver-blocks.json"
88
+ LAND_COVER = [
89
+ [[-123.0, 49.196], [-123.0, 49.324], [-123.306, 49.324], [-123.306, 49.196]]
90
+ ]
91
+
92
+ INITIAL_VIEW_STATE = pdk.ViewState(
93
+ latitude=49.254, longitude=-123.13, zoom=11, max_zoom=16, pitch=45, bearing=0
94
+ )
95
+
96
+ polygon = pdk.Layer(
97
+ "PolygonLayer",
98
+ LAND_COVER,
99
+ stroked=False,
100
+ # processes the data as a flat longitude-latitude pair
101
+ get_polygon="-",
102
+ get_fill_color=[0, 0, 0, 20],
103
+ )
104
+
105
+ geojson = pdk.Layer(
106
+ "GeoJsonLayer",
107
+ DATA_URL,
108
+ opacity=0.8,
109
+ stroked=False,
110
+ filled=True,
111
+ extruded=True,
112
+ wireframe=True,
113
+ get_elevation="properties.valuePerSqm / 20",
114
+ get_fill_color="[255, 255, properties.growth * 255]",
115
+ get_line_color=[255, 255, 255],
116
+ )
117
+
118
+ r = pdk.Deck(layers=[polygon, geojson], initial_view_state=INITIAL_VIEW_STATE)
119
+ return r
120
+
121
+
122
+ def terrain():
123
+
124
+ """
125
+ TerrainLayer
126
+ ===========
127
+
128
+ Extruded terrain using AWS Open Data Terrain Tiles and Mapbox Satellite imagery
129
+ """
130
+
131
+ # Import Mapbox API Key from environment
132
+ MAPBOX_API_KEY = os.environ["MAPBOX_API_KEY"]
133
+
134
+ # AWS Open Data Terrain Tiles
135
+ TERRAIN_IMAGE = (
136
+ "https://s3.amazonaws.com/elevation-tiles-prod/terrarium/{z}/{x}/{y}.png"
137
+ )
138
+
139
+ # Define how to parse elevation tiles
140
+ ELEVATION_DECODER = {
141
+ "rScaler": 256,
142
+ "gScaler": 1,
143
+ "bScaler": 1 / 256,
144
+ "offset": -32768,
145
+ }
146
+
147
+ SURFACE_IMAGE = f"https://api.mapbox.com/v4/mapbox.satellite/{{z}}/{{x}}/{{y}}@2x.png?access_token={MAPBOX_API_KEY}"
148
+
149
+ terrain_layer = pdk.Layer(
150
+ "TerrainLayer",
151
+ elevation_decoder=ELEVATION_DECODER,
152
+ texture=SURFACE_IMAGE,
153
+ elevation_data=TERRAIN_IMAGE,
154
+ )
155
+
156
+ view_state = pdk.ViewState(
157
+ latitude=46.24, longitude=-122.18, zoom=11.5, bearing=140, pitch=60
158
+ )
159
+
160
+ r = pdk.Deck(terrain_layer, initial_view_state=view_state)
161
+ return r
162
+
163
+
164
+ def app():
165
+
166
+ st.title("Pydeck Gallery")
167
+
168
+ options = ["GeoJsonLayer", "GlobeView", "TerrainLayer"]
169
+
170
+ option = st.selectbox("Select a pydeck layer type", options)
171
+
172
+ if option == "GeoJsonLayer":
173
+ st.header("Property values in Vancouver, Canada")
174
+ st.pydeck_chart(geojson_layer())
175
+ # elif option == "GlobeView":
176
+ # st.pydeck_chart(globe_view())
177
+ elif option == "TerrainLayer":
178
+ st.pydeck_chart(terrain())
apps/device_loc.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from bokeh.models.widgets import Button
3
+ from bokeh.models import CustomJS
4
+ from streamlit_bokeh_events import streamlit_bokeh_events
5
+ import leafmap.foliumap as leafmap
6
+
7
+
8
+ def app():
9
+
10
+ loc_button = Button(label="Get Device Location", max_width=150)
11
+ loc_button.js_on_event(
12
+ "button_click",
13
+ CustomJS(
14
+ code="""
15
+ navigator.geolocation.getCurrentPosition(
16
+ (loc) => {
17
+ document.dispatchEvent(new CustomEvent("GET_LOCATION", {detail: {lat: loc.coords.latitude, lon: loc.coords.longitude}}))
18
+ }
19
+ )
20
+ """
21
+ ),
22
+ )
23
+ result = streamlit_bokeh_events(
24
+ loc_button,
25
+ events="GET_LOCATION",
26
+ key="get_location",
27
+ refresh_on_update=False,
28
+ override_height=75,
29
+ debounce_time=0,
30
+ )
31
+
32
+ if result:
33
+ if "GET_LOCATION" in result:
34
+ loc = result.get("GET_LOCATION")
35
+ lat = loc.get("lat")
36
+ lon = loc.get("lon")
37
+ st.write(f"Lat, Lon: {lat}, {lon}")
38
+
39
+ m = leafmap.Map(center=(lat, lon), zoom=16)
40
+ m.add_basemap("ROADMAP")
41
+ popup = f"lat, lon: {lat}, {lon}"
42
+ m.add_marker(location=(lat, lon), popup=popup)
43
+ m.to_streamlit()
apps/gee.py ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ee
2
+ import streamlit as st
3
+ import geemap.foliumap as geemap
4
+
5
+
6
+ def nlcd():
7
+
8
+ st.header("National Land Cover Database (NLCD)")
9
+
10
+ row1_col1, row1_col2 = st.columns([3, 1])
11
+ width = 950
12
+ height = 600
13
+
14
+ Map = geemap.Map()
15
+
16
+ # Select the seven NLCD epoches after 2000.
17
+ years = ["2001", "2004", "2006", "2008", "2011", "2013", "2016"]
18
+
19
+ # Get an NLCD image by year.
20
+ def getNLCD(year):
21
+ # Import the NLCD collection.
22
+ dataset = ee.ImageCollection("USGS/NLCD_RELEASES/2016_REL")
23
+
24
+ # Filter the collection by year.
25
+ nlcd = dataset.filter(ee.Filter.eq("system:index", year)).first()
26
+
27
+ # Select the land cover band.
28
+ landcover = nlcd.select("landcover")
29
+ return landcover
30
+
31
+ with row1_col2:
32
+ selected_year = st.multiselect("Select a year", years)
33
+ add_legend = st.checkbox("Show legend")
34
+
35
+ if selected_year:
36
+ for year in selected_year:
37
+ Map.addLayer(getNLCD(year), {}, "NLCD " + year)
38
+
39
+ if add_legend:
40
+ Map.add_legend(
41
+ legend_title="NLCD Land Cover Classification", builtin_legend="NLCD"
42
+ )
43
+ with row1_col1:
44
+ Map.to_streamlit(width=width, height=height)
45
+
46
+ else:
47
+ with row1_col1:
48
+ Map.to_streamlit(width=width, height=height)
49
+
50
+
51
+ def search_data():
52
+
53
+ st.header("Search Earth Engine Data Catalog")
54
+
55
+ Map = geemap.Map()
56
+
57
+ if "ee_assets" not in st.session_state:
58
+ st.session_state["ee_assets"] = None
59
+ if "asset_titles" not in st.session_state:
60
+ st.session_state["asset_titles"] = None
61
+
62
+ col1, col2 = st.columns([2, 1])
63
+
64
+ dataset = None
65
+ with col2:
66
+ keyword = st.text_input("Enter a keyword to search (e.g., elevation)", "")
67
+ if keyword:
68
+ ee_assets = geemap.search_ee_data(keyword)
69
+ asset_titles = [x["title"] for x in ee_assets]
70
+ dataset = st.selectbox("Select a dataset", asset_titles)
71
+ if len(ee_assets) > 0:
72
+ st.session_state["ee_assets"] = ee_assets
73
+ st.session_state["asset_titles"] = asset_titles
74
+
75
+ if dataset is not None:
76
+ with st.expander("Show dataset details", True):
77
+ index = asset_titles.index(dataset)
78
+ html = geemap.ee_data_html(st.session_state["ee_assets"][index])
79
+ st.markdown(html, True)
80
+
81
+ ee_id = ee_assets[index]["ee_id_snippet"]
82
+ uid = ee_assets[index]["uid"]
83
+ st.markdown(f"""**Earth Engine Snippet:** `{ee_id}`""")
84
+
85
+ vis_params = st.text_input(
86
+ "Enter visualization parameters as a dictionary", {}
87
+ )
88
+ layer_name = st.text_input("Enter a layer name", uid)
89
+ button = st.button("Add dataset to map")
90
+ if button:
91
+ vis = {}
92
+ try:
93
+ if vis_params.strip() == "":
94
+ # st.error("Please enter visualization parameters")
95
+ vis_params = "{}"
96
+ vis = eval(vis_params)
97
+ if not isinstance(vis, dict):
98
+ st.error("Visualization parameters must be a dictionary")
99
+ try:
100
+ Map.addLayer(eval(ee_id), vis, layer_name)
101
+ except Exception as e:
102
+ st.error(f"Error adding layer: {e}")
103
+ except Exception as e:
104
+ st.error(f"Invalid visualization parameters: {e}")
105
+
106
+ with col1:
107
+ Map.to_streamlit()
108
+ else:
109
+ with col1:
110
+ Map.to_streamlit()
111
+
112
+
113
+ def app():
114
+ st.title("Google Earth Engine Applications")
115
+
116
+ apps = ["National Land Cover Database (NLCD)", "Search Earth Engine Data Catalog"]
117
+
118
+ selected_app = st.selectbox("Select an app", apps)
119
+
120
+ if selected_app == "National Land Cover Database (NLCD)":
121
+ nlcd()
122
+ elif selected_app == "Search Earth Engine Data Catalog":
123
+ search_data()
apps/gee_datasets.py ADDED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ee
2
+ import streamlit as st
3
+ import geemap.foliumap as geemap
4
+
5
+ WIDTH = 1060
6
+ HEIGHT = 600
7
+
8
+
9
+ def function():
10
+ st.write("Not implemented yet.")
11
+ Map = geemap.Map()
12
+ Map.to_streamlit(WIDTH, HEIGHT)
13
+
14
+
15
+ def lulc_mrb_floodplain():
16
+
17
+ Map = geemap.Map()
18
+
19
+ State_boundaries = ee.FeatureCollection('users/giswqs/MRB/State_Boundaries')
20
+ State_style = State_boundaries.style(
21
+ **{'color': '808080', 'width': 1, 'fillColor': '00000000'}
22
+ )
23
+
24
+ MRB_boundary = ee.FeatureCollection('users/giswqs/MRB/MRB_Boundary')
25
+ MRB_style = MRB_boundary.style(
26
+ **{'color': '000000', 'width': 2, 'fillColor': '00000000'}
27
+ )
28
+
29
+ floodplain = ee.Image('users/giswqs/MRB/USGS_Floodplain')
30
+
31
+ class_values = [34, 38, 46, 50, 62]
32
+ class_palette = ['c500ff', '00ffc5', '00a9e6', '73004d', '004d73']
33
+
34
+ img_1950 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1950')
35
+ img_1950 = img_1950.set('b1_class_values', class_values)
36
+ img_1950 = img_1950.set('b1_class_palette', class_palette)
37
+
38
+ img_1960 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1960')
39
+ img_1960 = img_1960.set('b1_class_values', class_values)
40
+ img_1960 = img_1960.set('b1_class_palette', class_palette)
41
+
42
+ img_1970 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1970')
43
+ img_1970 = img_1970.set('b1_class_values', class_values)
44
+ img_1970 = img_1970.set('b1_class_palette', class_palette)
45
+
46
+ img_1980 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1980')
47
+ img_1980 = img_1980.set('b1_class_values', class_values)
48
+ img_1980 = img_1980.set('b1_class_palette', class_palette)
49
+
50
+ img_1990 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1990')
51
+ img_1990 = img_1990.set('b1_class_values', class_values)
52
+ img_1990 = img_1990.set('b1_class_palette', class_palette)
53
+
54
+ img_2000 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_2000')
55
+ img_2000 = img_2000.set('b1_class_values', class_values)
56
+ img_2000 = img_2000.set('b1_class_palette', class_palette)
57
+
58
+ Map.addLayer(floodplain, {'palette': ['cccccc']}, 'Floodplain', True, 0.5)
59
+ Map.addLayer(img_2000, {}, 'Major Transitions 1941-2000')
60
+ Map.addLayer(img_1990, {}, 'Major Transitions 1941-1990')
61
+ Map.addLayer(img_1980, {}, 'Major Transitions 1941-1980')
62
+ Map.addLayer(img_1970, {}, 'Major Transitions 1941-1970')
63
+ Map.addLayer(img_1960, {}, 'Major Transitions 1941-1960')
64
+ Map.addLayer(img_1950, {}, 'Major Transitions 1941-1950')
65
+
66
+ Map.addLayer(State_style, {}, 'State Boundaries')
67
+ Map.addLayer(MRB_style, {}, 'MRB Boundary')
68
+
69
+ Map.to_streamlit(WIDTH, HEIGHT)
70
+
71
+
72
+ def global_mangrove_watch():
73
+ """https://samapriya.github.io/awesome-gee-community-datasets/projects/mangrove/"""
74
+ Map = geemap.Map()
75
+ gmw2007 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2007_v2")
76
+ gmw2008 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2008_v2")
77
+ gmw2009 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2009_v2")
78
+ gmw2010 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2010_v2")
79
+ gmw2015 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2015_v2")
80
+ gmw2016 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2016_v2")
81
+ gmw1996 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_1996_v2")
82
+
83
+ Map.addLayer(
84
+ ee.Image().paint(gmw1996, 0, 3),
85
+ {"palette": ["228B22"]},
86
+ 'Global Mangrove Watch 1996',
87
+ )
88
+ Map.addLayer(
89
+ ee.Image().paint(gmw2007, 0, 3),
90
+ {"palette": ["228B22"]},
91
+ 'Global Mangrove Watch 2007',
92
+ )
93
+ Map.addLayer(
94
+ ee.Image().paint(gmw2008, 0, 3),
95
+ {"palette": ["228B22"]},
96
+ 'Global Mangrove Watch 2008',
97
+ )
98
+ Map.addLayer(
99
+ ee.Image().paint(gmw2009, 0, 3),
100
+ {"palette": ["228B22"]},
101
+ 'Global Mangrove Watch 2009',
102
+ )
103
+ Map.addLayer(
104
+ ee.Image().paint(gmw2010, 0, 3),
105
+ {"palette": ["228B22"]},
106
+ 'Global Mangrove Watch 2010',
107
+ )
108
+ Map.addLayer(
109
+ ee.Image().paint(gmw2015, 0, 3),
110
+ {"palette": ["228B22"]},
111
+ 'Global Mangrove Watch 2015',
112
+ )
113
+ Map.addLayer(
114
+ ee.Image().paint(gmw2016, 0, 3),
115
+ {"palette": ["228B22"]},
116
+ 'Global Mangrove Watch 2015',
117
+ )
118
+
119
+ Map.to_streamlit(WIDTH, HEIGHT)
120
+
121
+
122
+ def app():
123
+
124
+ st.title("Awesome GEE Community Datasets")
125
+
126
+ st.markdown(
127
+ """
128
+
129
+ This app is for exploring the [Awesome GEE Community Datasets](https://samapriya.github.io/awesome-gee-community-datasets). Work in progress.
130
+
131
+ """
132
+ )
133
+
134
+ datasets = {
135
+ "Population & Socioeconomic": {
136
+ "High Resolution Settlement Layer": "function()",
137
+ "World Settlement Footprint (2015)": "function()",
138
+ "Gridded Population of the World": "function()",
139
+ "geoBoundaries Global Database": "function()",
140
+ "West Africa Coastal Vulnerability Mapping": "function()",
141
+ "Relative Wealth Index (RWI)": "function()",
142
+ "Social Connectedness Index (SCI)": "function()",
143
+ "Native Land (Indigenous Land Maps)": "function()",
144
+ },
145
+ "Geophysical, Biological & Biogeochemical": {
146
+ "Geomorpho90m Geomorphometric Layers": "function()",
147
+ },
148
+ "Land Use and Land Cover": {
149
+ "Global Mangrove Watch": "global_mangrove_watch()",
150
+ "Mississippi River Basin Floodplain Land Use Change (1941-2000)": "lulc_mrb_floodplain()",
151
+ },
152
+ "Hydrology": {
153
+ "Global Shoreline Dataset": "function()",
154
+ },
155
+ "Agriculture, Vegetation and Forestry": {
156
+ "Landfire Mosaics LF v2.0.0": "function()",
157
+ },
158
+ "Global Utilities, Assets and Amenities Layers": {
159
+ "Global Power": "function()",
160
+ },
161
+ "EarthEnv Biodiversity ecosystems & climate Layers": {
162
+ "Global Consensus Landcover": "function()",
163
+ },
164
+ "Weather and Climate Layers": {
165
+ "Global Reference Evapotranspiration Layers": "function()",
166
+ },
167
+ "Global Events Layers": {
168
+ "Global Fire Atlas (2003-2016)": "function()",
169
+ },
170
+ }
171
+
172
+ row1_col1, row1_col2, _ = st.columns([1.2, 1.8, 1])
173
+
174
+ with row1_col1:
175
+ category = st.selectbox("Select a category", datasets.keys(), index=2)
176
+ with row1_col2:
177
+ dataset = st.selectbox("Select a dataset", datasets[category].keys())
178
+
179
+ Map = geemap.Map()
180
+
181
+ if dataset:
182
+ eval(datasets[category][dataset])
183
+
184
+ else:
185
+ Map = geemap.Map()
186
+ Map.to_streamlit(WIDTH, HEIGHT)
apps/heatmap.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
+
4
+
5
+ def app():
6
+
7
+ st.title('Heatmaps')
8
+
9
+ filepath = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_cities.csv"
10
+ m = leafmap.Map(tiles="stamentoner")
11
+ m.add_heatmap(
12
+ filepath,
13
+ latitude="latitude",
14
+ longitude="longitude",
15
+ value="pop_max",
16
+ name="Heat map",
17
+ radius=20,
18
+ )
19
+ m.to_streamlit(width=700, height=500)
apps/home.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
+
4
+
5
+ def app():
6
+ st.title("Streamlit for Geospatial Applications")
7
+
8
+ st.markdown(
9
+ """
10
+ This multi-page web app demonstrates various interactive web apps created using [streamlit](https://streamlit.io) and open-source mapping libraries,
11
+ such as [leafmap](https://leafmap.org), [geemap](https://geemap.org), [pydeck](https://deckgl.readthedocs.io), and [kepler.gl](https://docs.kepler.gl/docs/keplergl-jupyter).
12
+ This is an open-source project and you are very welcome to contribute your comments, questions, resources, and apps as [issues](https://github.com/giswqs/streamlit-geospatial/issues) or
13
+ [pull requests](https://github.com/giswqs/streamlit-geospatial/pulls) to the [GitHub repository](https://github.com/giswqs/streamlit-geospatial).
14
+
15
+ """
16
+ )
17
+
18
+ st.info("Click on the left sidebar menu to navigate to the different apps.")
19
+
20
+ st.subheader("Timelapse of Satellite Imagery")
21
+ st.markdown(
22
+ """
23
+ The following timelapse animations were created using the Timelapse web app. Click `Create Timelapse` on the left sidebar menu to create your own timelapse for any location around the globe.
24
+ """
25
+ )
26
+
27
+ row1_col1, row1_col2 = st.columns(2)
28
+ with row1_col1:
29
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/spain.gif")
30
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/las_vegas.gif")
31
+
32
+ with row1_col2:
33
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/goes.gif")
34
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/fire.gif")
apps/housing.py ADDED
@@ -0,0 +1,457 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datetime
2
+ import os
3
+ import pathlib
4
+ import requests
5
+ import zipfile
6
+ import pandas as pd
7
+ import pydeck as pdk
8
+ import geopandas as gpd
9
+ import streamlit as st
10
+ import leafmap.colormaps as cm
11
+ from leafmap.common import hex_to_rgb
12
+
13
+
14
+ STREAMLIT_STATIC_PATH = pathlib.Path(st.__path__[0]) / "static"
15
+ # We create a downloads directory within the streamlit static asset directory
16
+ # and we write output files to it
17
+ DOWNLOADS_PATH = STREAMLIT_STATIC_PATH / "downloads"
18
+ if not DOWNLOADS_PATH.is_dir():
19
+ DOWNLOADS_PATH.mkdir()
20
+
21
+ # Data source: https://www.realtor.com/research/data/
22
+ # link_prefix = "https://econdata.s3-us-west-2.amazonaws.com/Reports/"
23
+ link_prefix = "https://raw.githubusercontent.com/giswqs/data/main/housing/"
24
+
25
+ data_links = {
26
+ "weekly": {
27
+ "national": link_prefix + "Core/listing_weekly_core_aggregate_by_country.csv",
28
+ "metro": link_prefix + "Core/listing_weekly_core_aggregate_by_metro.csv",
29
+ },
30
+ "monthly_current": {
31
+ "national": link_prefix + "Core/RDC_Inventory_Core_Metrics_Country.csv",
32
+ "state": link_prefix + "Core/RDC_Inventory_Core_Metrics_State.csv",
33
+ "metro": link_prefix + "Core/RDC_Inventory_Core_Metrics_Metro.csv",
34
+ "county": link_prefix + "Core/RDC_Inventory_Core_Metrics_County.csv",
35
+ "zip": link_prefix + "Core/RDC_Inventory_Core_Metrics_Zip.csv",
36
+ },
37
+ "monthly_historical": {
38
+ "national": link_prefix + "Core/RDC_Inventory_Core_Metrics_Country_History.csv",
39
+ "state": link_prefix + "Core/RDC_Inventory_Core_Metrics_State_History.csv",
40
+ "metro": link_prefix + "Core/RDC_Inventory_Core_Metrics_Metro_History.csv",
41
+ "county": link_prefix + "Core/RDC_Inventory_Core_Metrics_County_History.csv",
42
+ "zip": link_prefix + "Core/RDC_Inventory_Core_Metrics_Zip_History.csv",
43
+ },
44
+ "hotness": {
45
+ "metro": link_prefix
46
+ + "Hotness/RDC_Inventory_Hotness_Metrics_Metro_History.csv",
47
+ "county": link_prefix
48
+ + "Hotness/RDC_Inventory_Hotness_Metrics_County_History.csv",
49
+ "zip": link_prefix + "Hotness/RDC_Inventory_Hotness_Metrics_Zip_History.csv",
50
+ },
51
+ }
52
+
53
+
54
+ def get_data_columns(df, category, frequency="monthly"):
55
+ if frequency == "monthly":
56
+ if category.lower() == "county":
57
+ del_cols = ["month_date_yyyymm", "county_fips", "county_name"]
58
+ elif category.lower() == "state":
59
+ del_cols = ["month_date_yyyymm", "state", "state_id"]
60
+ elif category.lower() == "national":
61
+ del_cols = ["month_date_yyyymm", "country"]
62
+ elif category.lower() == "metro":
63
+ del_cols = ["month_date_yyyymm", "cbsa_code", "cbsa_title", "HouseholdRank"]
64
+ elif category.lower() == "zip":
65
+ del_cols = ["month_date_yyyymm", "postal_code", "zip_name", "flag"]
66
+ elif frequency == "weekly":
67
+ if category.lower() == "national":
68
+ del_cols = ["week_end_date", "geo_country"]
69
+ elif category.lower() == "metro":
70
+ del_cols = ["week_end_date", "cbsa_code", "cbsa_title", "hh_rank"]
71
+
72
+ cols = df.columns.values.tolist()
73
+
74
+ for col in cols:
75
+ if col.strip() in del_cols:
76
+ cols.remove(col)
77
+ if category.lower() == "metro":
78
+ return cols[2:]
79
+ else:
80
+ return cols[1:]
81
+
82
+
83
+ @st.cache
84
+ def get_inventory_data(url):
85
+ df = pd.read_csv(url)
86
+ url = url.lower()
87
+ if "county" in url:
88
+ df["county_fips"] = df["county_fips"].map(str)
89
+ df["county_fips"] = df["county_fips"].str.zfill(5)
90
+ elif "state" in url:
91
+ df["STUSPS"] = df["state_id"].str.upper()
92
+ elif "metro" in url:
93
+ df["cbsa_code"] = df["cbsa_code"].map(str)
94
+ elif "zip" in url:
95
+ df["postal_code"] = df["postal_code"].map(str)
96
+ df["postal_code"] = df["postal_code"].str.zfill(5)
97
+
98
+ if "listing_weekly_core_aggregate_by_country" in url:
99
+ columns = get_data_columns(df, "national", "weekly")
100
+ for column in columns:
101
+ if column != "median_days_on_market_by_day_yy":
102
+ df[column] = df[column].str.rstrip("%").astype(float) / 100
103
+ if "listing_weekly_core_aggregate_by_metro" in url:
104
+ columns = get_data_columns(df, "metro", "weekly")
105
+ for column in columns:
106
+ if column != "median_days_on_market_by_day_yy":
107
+ df[column] = df[column].str.rstrip("%").astype(float) / 100
108
+ df["cbsa_code"] = df["cbsa_code"].str[:5]
109
+ return df
110
+
111
+
112
+ def filter_weekly_inventory(df, week):
113
+ df = df[df["week_end_date"] == week]
114
+ return df
115
+
116
+
117
+ def get_start_end_year(df):
118
+ start_year = int(str(df["month_date_yyyymm"].min())[:4])
119
+ end_year = int(str(df["month_date_yyyymm"].max())[:4])
120
+ return start_year, end_year
121
+
122
+
123
+ def get_periods(df):
124
+ return [str(d) for d in list(set(df["month_date_yyyymm"].tolist()))]
125
+
126
+
127
+ @st.cache
128
+ def get_geom_data(category):
129
+
130
+ prefix = (
131
+ "https://raw.githubusercontent.com/giswqs/streamlit-geospatial/master/data/"
132
+ )
133
+ links = {
134
+ "national": prefix + "us_nation.geojson",
135
+ "state": prefix + "us_states.geojson",
136
+ "county": prefix + "us_counties.geojson",
137
+ "metro": prefix + "us_metro_areas.geojson",
138
+ "zip": "https://www2.census.gov/geo/tiger/GENZ2018/shp/cb_2018_us_zcta510_500k.zip",
139
+ }
140
+
141
+ if category.lower() == "zip":
142
+ r = requests.get(links[category])
143
+ out_zip = os.path.join(DOWNLOADS_PATH, "cb_2018_us_zcta510_500k.zip")
144
+ with open(out_zip, "wb") as code:
145
+ code.write(r.content)
146
+ zip_ref = zipfile.ZipFile(out_zip, "r")
147
+ zip_ref.extractall(DOWNLOADS_PATH)
148
+ gdf = gpd.read_file(out_zip.replace("zip", "shp"))
149
+ else:
150
+ gdf = gpd.read_file(links[category])
151
+ return gdf
152
+
153
+
154
+ def join_attributes(gdf, df, category):
155
+
156
+ new_gdf = None
157
+ if category == "county":
158
+ new_gdf = gdf.merge(df, left_on="GEOID", right_on="county_fips", how="outer")
159
+ elif category == "state":
160
+ new_gdf = gdf.merge(df, left_on="STUSPS", right_on="STUSPS", how="outer")
161
+ elif category == "national":
162
+ if "geo_country" in df.columns.values.tolist():
163
+ df["country"] = None
164
+ df.loc[0, "country"] = "United States"
165
+ new_gdf = gdf.merge(df, left_on="NAME", right_on="country", how="outer")
166
+ elif category == "metro":
167
+ new_gdf = gdf.merge(df, left_on="CBSAFP", right_on="cbsa_code", how="outer")
168
+ elif category == "zip":
169
+ new_gdf = gdf.merge(df, left_on="GEOID10", right_on="postal_code", how="outer")
170
+ return new_gdf
171
+
172
+
173
+ def select_non_null(gdf, col_name):
174
+ new_gdf = gdf[~gdf[col_name].isna()]
175
+ return new_gdf
176
+
177
+
178
+ def select_null(gdf, col_name):
179
+ new_gdf = gdf[gdf[col_name].isna()]
180
+ return new_gdf
181
+
182
+
183
+ def get_data_dict(name):
184
+ in_csv = os.path.join(os.getcwd(), "data/realtor_data_dict.csv")
185
+ df = pd.read_csv(in_csv)
186
+ label = list(df[df["Name"] == name]["Label"])[0]
187
+ desc = list(df[df["Name"] == name]["Description"])[0]
188
+ return label, desc
189
+
190
+
191
+ def get_weeks(df):
192
+ seq = list(set(df[~df["week_end_date"].isnull()]["week_end_date"].tolist()))
193
+ weeks = [
194
+ datetime.date(int(d.split("/")[2]), int(d.split("/")[0]), int(d.split("/")[1]))
195
+ for d in seq
196
+ ]
197
+ weeks.sort()
198
+ return weeks
199
+
200
+
201
+ def get_saturday(in_date):
202
+ idx = (in_date.weekday() + 1) % 7
203
+ sat = in_date + datetime.timedelta(6 - idx)
204
+ return sat
205
+
206
+
207
+ def app():
208
+
209
+ st.title("U.S. Real Estate Data and Market Trends")
210
+ st.markdown(
211
+ """**Introduction:** This interactive dashboard is designed for visualizing U.S. real estate data and market trends at multiple levels (i.e., national,
212
+ state, county, and metro). The data sources include [Real Estate Data](https://www.realtor.com/research/data) from realtor.com and
213
+ [Cartographic Boundary Files](https://www.census.gov/geographies/mapping-files/time-series/geo/carto-boundary-file.html) from U.S. Census Bureau.
214
+ Several open-source packages are used to process the data and generate the visualizations, e.g., [streamlit](https://streamlit.io),
215
+ [geopandas](https://geopandas.org), [leafmap](https://leafmap.org), and [pydeck](https://deckgl.readthedocs.io).
216
+ """
217
+ )
218
+
219
+ with st.expander("See a demo"):
220
+ st.image("https://i.imgur.com/Z3dk6Tr.gif")
221
+
222
+ row1_col1, row1_col2, row1_col3, row1_col4, row1_col5 = st.columns(
223
+ [0.6, 0.8, 0.6, 1.4, 2]
224
+ )
225
+ with row1_col1:
226
+ frequency = st.selectbox("Monthly/weekly data", ["Monthly", "Weekly"])
227
+ with row1_col2:
228
+ types = ["Current month data", "Historical data"]
229
+ if frequency == "Weekly":
230
+ types.remove("Current month data")
231
+ cur_hist = st.selectbox(
232
+ "Current/historical data",
233
+ types,
234
+ )
235
+ with row1_col3:
236
+ if frequency == "Monthly":
237
+ scale = st.selectbox(
238
+ "Scale", ["National", "State", "Metro", "County"], index=3
239
+ )
240
+ else:
241
+ scale = st.selectbox("Scale", ["National", "Metro"], index=1)
242
+
243
+ gdf = get_geom_data(scale.lower())
244
+
245
+ if frequency == "Weekly":
246
+ inventory_df = get_inventory_data(data_links["weekly"][scale.lower()])
247
+ weeks = get_weeks(inventory_df)
248
+ with row1_col1:
249
+ selected_date = st.date_input("Select a date", value=weeks[-1])
250
+ saturday = get_saturday(selected_date)
251
+ selected_period = saturday.strftime("%-m/%-d/%Y")
252
+ if saturday not in weeks:
253
+ st.error(
254
+ "The selected date is not available in the data. Please select a date between {} and {}".format(
255
+ weeks[0], weeks[-1]
256
+ )
257
+ )
258
+ selected_period = weeks[-1].strftime("%-m/%-d/%Y")
259
+ inventory_df = get_inventory_data(data_links["weekly"][scale.lower()])
260
+ inventory_df = filter_weekly_inventory(inventory_df, selected_period)
261
+
262
+ if frequency == "Monthly":
263
+ if cur_hist == "Current month data":
264
+ inventory_df = get_inventory_data(
265
+ data_links["monthly_current"][scale.lower()]
266
+ )