Fix GitHub vulnerability
Browse files- Procfile +0 -1
- README.md +3 -3
- app-bk.py +0 -49
- app.css +0 -4
- app.py +0 -48
- apps/basemaps.py +0 -44
- apps/census.py +0 -35
- apps/cesium.py +0 -8
- apps/deck.py +0 -178
- apps/device_loc.py +0 -43
- apps/gee.py +0 -123
- apps/gee_datasets.py +0 -186
- apps/heatmap.py +0 -19
- apps/home.py +0 -34
- apps/housing.py +0 -457
- apps/hurricane.py +0 -52
- apps/plotly_maps.py +0 -17
- apps/raster.py +0 -77
- apps/rois.py +0 -174
- apps/timelapse.py +0 -1314
- apps/vector.py +0 -98
- apps/wms.py +0 -68
- apps/xy.py +0 -65
- environment-bk.yml +0 -17
- index.html +0 -39
- multiapp.py +0 -81
- pages/10_π_Earth_Engine_Datasets.py +15 -10
- pages/13_ποΈ_Global_Building_Footprints.py +27 -24
- pages/1_π·_Timelapse.py +22 -15
- pages/2_π _U.S._Housing.py +3 -3
- pages/3_πͺ_Split_Map.py +2 -2
- pages/4_π₯_Heatmap.py +1 -1
- pages/5_π_Marker_Cluster.py +5 -5
- pages/6_πΊοΈ_Basemaps.py +2 -3
- pages/7_π¦_Web_Map_Service.py +23 -5
- pages/8_ποΈ_Raster_Data_Visualization.py +15 -4
- pages/9_π²_Vector_Data_Visualization.py +12 -1
- postBuild +0 -6
- requirements.txt +7 -6
- setup.sh +0 -18
- streamlit_app.py +0 -43
- streamlit_call.py +0 -14
Procfile
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
web: sh setup.sh && streamlit run Home.py
|
|
|
|
README.md
CHANGED
@@ -1,11 +1,11 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
emoji: π₯
|
4 |
colorFrom: indigo
|
5 |
colorTo: green
|
6 |
sdk: streamlit
|
7 |
-
sdk_version: 1.
|
8 |
-
app_file:
|
9 |
pinned: false
|
10 |
license: mit
|
11 |
---
|
|
|
1 |
---
|
2 |
+
title: Streamlit
|
3 |
emoji: π₯
|
4 |
colorFrom: indigo
|
5 |
colorTo: green
|
6 |
sdk: streamlit
|
7 |
+
sdk_version: 1.34.0
|
8 |
+
app_file: Home.py
|
9 |
pinned: false
|
10 |
license: mit
|
11 |
---
|
app-bk.py
DELETED
@@ -1,49 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
from multiapp import MultiApp
|
3 |
-
from apps import (
|
4 |
-
basemaps,
|
5 |
-
census,
|
6 |
-
cesium,
|
7 |
-
deck,
|
8 |
-
device_loc,
|
9 |
-
gee,
|
10 |
-
gee_datasets,
|
11 |
-
heatmap,
|
12 |
-
home,
|
13 |
-
housing,
|
14 |
-
# hurricane,
|
15 |
-
plotly_maps,
|
16 |
-
raster,
|
17 |
-
timelapse,
|
18 |
-
vector,
|
19 |
-
wms,
|
20 |
-
xy,
|
21 |
-
)
|
22 |
-
|
23 |
-
st.set_page_config(layout="wide")
|
24 |
-
|
25 |
-
|
26 |
-
apps = MultiApp()
|
27 |
-
|
28 |
-
# Add all your application here
|
29 |
-
|
30 |
-
apps.add_app("Home", home.app)
|
31 |
-
apps.add_app("Create Timelapse", timelapse.app)
|
32 |
-
# apps.add_app("Hurricane Mapping", hurricane.app)
|
33 |
-
apps.add_app("U.S. Real Estate Data", housing.app)
|
34 |
-
apps.add_app("U.S. Census Data", census.app)
|
35 |
-
apps.add_app("Visualize Raster Data", raster.app)
|
36 |
-
apps.add_app("Visualize Vector Data", vector.app)
|
37 |
-
apps.add_app("Search Basemaps", basemaps.app)
|
38 |
-
apps.add_app("Pydeck Gallery", deck.app)
|
39 |
-
apps.add_app("Heatmaps", heatmap.app)
|
40 |
-
apps.add_app("Add Points from XY", xy.app)
|
41 |
-
apps.add_app("Add Web Map Service (WMS)", wms.app)
|
42 |
-
apps.add_app("Google Earth Engine (GEE)", gee.app)
|
43 |
-
apps.add_app("Awesome GEE Community Datasets", gee_datasets.app)
|
44 |
-
apps.add_app("Geolocation", device_loc.app)
|
45 |
-
apps.add_app("Cesium 3D Map", cesium.app)
|
46 |
-
apps.add_app("Plotly", plotly_maps.app)
|
47 |
-
|
48 |
-
# The main app
|
49 |
-
apps.run()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app.css
DELETED
@@ -1,4 +0,0 @@
|
|
1 |
-
.flex
|
2 |
-
{
|
3 |
-
overflow:auto;
|
4 |
-
}
|
|
|
|
|
|
|
|
|
|
app.py
DELETED
@@ -1,48 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import leafmap.foliumap as leafmap
|
3 |
-
|
4 |
-
st.set_page_config(layout="wide")
|
5 |
-
|
6 |
-
st.sidebar.info(
|
7 |
-
"""
|
8 |
-
- Web App URL: <https://streamlit.gishub.org>
|
9 |
-
- GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
|
10 |
-
"""
|
11 |
-
)
|
12 |
-
|
13 |
-
st.sidebar.title("Contact")
|
14 |
-
st.sidebar.info(
|
15 |
-
"""
|
16 |
-
Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/c/QiushengWu) | [LinkedIn](https://www.linkedin.com/in/qiushengwu)
|
17 |
-
"""
|
18 |
-
)
|
19 |
-
|
20 |
-
st.title("Streamlit for Geospatial Applications")
|
21 |
-
|
22 |
-
st.markdown(
|
23 |
-
"""
|
24 |
-
This multi-page web app demonstrates various interactive web apps created using [streamlit](https://streamlit.io) and open-source mapping libraries,
|
25 |
-
such as [leafmap](https://leafmap.org), [geemap](https://geemap.org), [pydeck](https://deckgl.readthedocs.io), and [kepler.gl](https://docs.kepler.gl/docs/keplergl-jupyter).
|
26 |
-
This is an open-source project and you are very welcome to contribute your comments, questions, resources, and apps as [issues](https://github.com/giswqs/streamlit-geospatial/issues) or
|
27 |
-
[pull requests](https://github.com/giswqs/streamlit-geospatial/pulls) to the [GitHub repository](https://github.com/giswqs/streamlit-geospatial).
|
28 |
-
|
29 |
-
"""
|
30 |
-
)
|
31 |
-
|
32 |
-
st.info("Click on the left sidebar menu to navigate to the different apps.")
|
33 |
-
|
34 |
-
st.subheader("Timelapse of Satellite Imagery")
|
35 |
-
st.markdown(
|
36 |
-
"""
|
37 |
-
The following timelapse animations were created using the Timelapse web app. Click `Timelapse` on the left sidebar menu to create your own timelapse for any location around the globe.
|
38 |
-
"""
|
39 |
-
)
|
40 |
-
|
41 |
-
row1_col1, row1_col2 = st.columns(2)
|
42 |
-
with row1_col1:
|
43 |
-
st.image("https://github.com/giswqs/data/raw/main/timelapse/spain.gif")
|
44 |
-
st.image("https://github.com/giswqs/data/raw/main/timelapse/las_vegas.gif")
|
45 |
-
|
46 |
-
with row1_col2:
|
47 |
-
st.image("https://github.com/giswqs/data/raw/main/timelapse/goes.gif")
|
48 |
-
st.image("https://github.com/giswqs/data/raw/main/timelapse/fire.gif")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/basemaps.py
DELETED
@@ -1,44 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import leafmap.foliumap as leafmap
|
3 |
-
|
4 |
-
|
5 |
-
def app():
|
6 |
-
st.title("Searching Basemaps")
|
7 |
-
st.markdown(
|
8 |
-
"""
|
9 |
-
This app is a demonstration of searching and loading basemaps from [xyzservices](https://github.com/geopandas/xyzservices) and [Quick Map Services (QMS)](https://github.com/nextgis/quickmapservices). Selecting from 1000+ basemaps with a few clicks.
|
10 |
-
"""
|
11 |
-
)
|
12 |
-
|
13 |
-
with st.expander("See demo"):
|
14 |
-
st.image("https://i.imgur.com/0SkUhZh.gif")
|
15 |
-
|
16 |
-
row1_col1, row1_col2 = st.columns([3, 1])
|
17 |
-
width = 800
|
18 |
-
height = 600
|
19 |
-
tiles = None
|
20 |
-
|
21 |
-
with row1_col2:
|
22 |
-
|
23 |
-
checkbox = st.checkbox("Search Quick Map Services (QMS)")
|
24 |
-
keyword = st.text_input("Enter a keyword to search and press Enter:")
|
25 |
-
empty = st.empty()
|
26 |
-
|
27 |
-
if keyword:
|
28 |
-
options = leafmap.search_xyz_services(keyword=keyword)
|
29 |
-
if checkbox:
|
30 |
-
qms = leafmap.search_qms(keyword=keyword)
|
31 |
-
if qms is not None:
|
32 |
-
options = options + qms
|
33 |
-
|
34 |
-
tiles = empty.multiselect(
|
35 |
-
"Select XYZ tiles to add to the map:", options)
|
36 |
-
|
37 |
-
with row1_col1:
|
38 |
-
m = leafmap.Map()
|
39 |
-
|
40 |
-
if tiles is not None:
|
41 |
-
for tile in tiles:
|
42 |
-
m.add_xyz_service(tile)
|
43 |
-
|
44 |
-
m.to_streamlit(width, height)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/census.py
DELETED
@@ -1,35 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import leafmap.foliumap as leafmap
|
3 |
-
|
4 |
-
|
5 |
-
def app():
|
6 |
-
st.title("Using U.S. Census Data")
|
7 |
-
st.markdown(
|
8 |
-
"""
|
9 |
-
This app is a demonstration of using the [U.S. Census Bureau](https://www.census.gov/) TIGERweb Web Map Service (WMS). A complete list of WMS layers can be found [here](https://tigerweb.geo.census.gov/tigerwebmain/TIGERweb_wms.html).
|
10 |
-
"""
|
11 |
-
)
|
12 |
-
|
13 |
-
if "first_index" not in st.session_state:
|
14 |
-
st.session_state["first_index"] = 60
|
15 |
-
else:
|
16 |
-
st.session_state["first_index"] = 0
|
17 |
-
|
18 |
-
row1_col1, row1_col2 = st.columns([3, 1])
|
19 |
-
width = 800
|
20 |
-
height = 600
|
21 |
-
|
22 |
-
census_dict = leafmap.get_census_dict()
|
23 |
-
with row1_col2:
|
24 |
-
|
25 |
-
wms = st.selectbox("Select a WMS", list(census_dict.keys()), index=11)
|
26 |
-
layer = st.selectbox(
|
27 |
-
"Select a layer",
|
28 |
-
census_dict[wms]["layers"],
|
29 |
-
index=st.session_state["first_index"],
|
30 |
-
)
|
31 |
-
|
32 |
-
with row1_col1:
|
33 |
-
m = leafmap.Map()
|
34 |
-
m.add_census_data(wms, layer)
|
35 |
-
m.to_streamlit(width, height)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/cesium.py
DELETED
@@ -1,8 +0,0 @@
|
|
1 |
-
import leafmap
|
2 |
-
import streamlit as st
|
3 |
-
|
4 |
-
|
5 |
-
def app():
|
6 |
-
st.title("Cesium 3D Map")
|
7 |
-
html = "data/html/sfo_buildings.html"
|
8 |
-
leafmap.cesium_to_streamlit(html, height=800)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/deck.py
DELETED
@@ -1,178 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import streamlit as st
|
3 |
-
import pydeck as pdk
|
4 |
-
import pandas as pd
|
5 |
-
|
6 |
-
|
7 |
-
def globe_view():
|
8 |
-
|
9 |
-
"""
|
10 |
-
GlobeView
|
11 |
-
=========
|
12 |
-
|
13 |
-
Over 33,000 power plants of the world plotted by their production capacity (given by height)
|
14 |
-
and fuel type (green if renewable) on an experimental deck.gl GlobeView.
|
15 |
-
"""
|
16 |
-
|
17 |
-
COUNTRIES = "https://d2ad6b4ur7yvpq.cloudfront.net/naturalearth-3.3.0/ne_50m_admin_0_scale_rank.geojson"
|
18 |
-
POWER_PLANTS = "https://raw.githubusercontent.com/ajduberstein/geo_datasets/master/global_power_plant_database.csv"
|
19 |
-
|
20 |
-
df = pd.read_csv(POWER_PLANTS)
|
21 |
-
|
22 |
-
def is_green(fuel_type):
|
23 |
-
"""Return a green RGB value if a facility uses a renewable fuel type"""
|
24 |
-
if fuel_type.lower() in (
|
25 |
-
"nuclear",
|
26 |
-
"water",
|
27 |
-
"wind",
|
28 |
-
"hydro",
|
29 |
-
"biomass",
|
30 |
-
"solar",
|
31 |
-
"geothermal",
|
32 |
-
):
|
33 |
-
return [10, 230, 120]
|
34 |
-
return [230, 158, 10]
|
35 |
-
|
36 |
-
df["color"] = df["primary_fuel"].apply(is_green)
|
37 |
-
|
38 |
-
view_state = pdk.ViewState(latitude=51.47, longitude=0.45, zoom=2, min_zoom=2)
|
39 |
-
|
40 |
-
# Set height and width variables
|
41 |
-
view = pdk.View(type="_GlobeView", controller=True, width=1000, height=700)
|
42 |
-
|
43 |
-
layers = [
|
44 |
-
pdk.Layer(
|
45 |
-
"GeoJsonLayer",
|
46 |
-
id="base-map",
|
47 |
-
data=COUNTRIES,
|
48 |
-
stroked=False,
|
49 |
-
filled=True,
|
50 |
-
get_fill_color=[200, 200, 200],
|
51 |
-
),
|
52 |
-
pdk.Layer(
|
53 |
-
"ColumnLayer",
|
54 |
-
id="power-plant",
|
55 |
-
data=df,
|
56 |
-
get_elevation="capacity_mw",
|
57 |
-
get_position=["longitude", "latitude"],
|
58 |
-
elevation_scale=100,
|
59 |
-
pickable=True,
|
60 |
-
auto_highlight=True,
|
61 |
-
radius=20000,
|
62 |
-
get_fill_color="color",
|
63 |
-
),
|
64 |
-
]
|
65 |
-
|
66 |
-
r = pdk.Deck(
|
67 |
-
views=[view],
|
68 |
-
initial_view_state=view_state,
|
69 |
-
tooltip={"text": "{name}, {primary_fuel} plant, {country}"},
|
70 |
-
layers=layers,
|
71 |
-
# Note that this must be set for the globe to be opaque
|
72 |
-
parameters={"cull": True},
|
73 |
-
)
|
74 |
-
|
75 |
-
return r
|
76 |
-
|
77 |
-
|
78 |
-
def geojson_layer():
|
79 |
-
|
80 |
-
"""
|
81 |
-
GeoJsonLayer
|
82 |
-
===========
|
83 |
-
|
84 |
-
Property values in Vancouver, Canada, adapted from the deck.gl example pages. Input data is in a GeoJSON format.
|
85 |
-
"""
|
86 |
-
|
87 |
-
DATA_URL = "https://raw.githubusercontent.com/visgl/deck.gl-data/master/examples/geojson/vancouver-blocks.json"
|
88 |
-
LAND_COVER = [
|
89 |
-
[[-123.0, 49.196], [-123.0, 49.324], [-123.306, 49.324], [-123.306, 49.196]]
|
90 |
-
]
|
91 |
-
|
92 |
-
INITIAL_VIEW_STATE = pdk.ViewState(
|
93 |
-
latitude=49.254, longitude=-123.13, zoom=11, max_zoom=16, pitch=45, bearing=0
|
94 |
-
)
|
95 |
-
|
96 |
-
polygon = pdk.Layer(
|
97 |
-
"PolygonLayer",
|
98 |
-
LAND_COVER,
|
99 |
-
stroked=False,
|
100 |
-
# processes the data as a flat longitude-latitude pair
|
101 |
-
get_polygon="-",
|
102 |
-
get_fill_color=[0, 0, 0, 20],
|
103 |
-
)
|
104 |
-
|
105 |
-
geojson = pdk.Layer(
|
106 |
-
"GeoJsonLayer",
|
107 |
-
DATA_URL,
|
108 |
-
opacity=0.8,
|
109 |
-
stroked=False,
|
110 |
-
filled=True,
|
111 |
-
extruded=True,
|
112 |
-
wireframe=True,
|
113 |
-
get_elevation="properties.valuePerSqm / 20",
|
114 |
-
get_fill_color="[255, 255, properties.growth * 255]",
|
115 |
-
get_line_color=[255, 255, 255],
|
116 |
-
)
|
117 |
-
|
118 |
-
r = pdk.Deck(layers=[polygon, geojson], initial_view_state=INITIAL_VIEW_STATE)
|
119 |
-
return r
|
120 |
-
|
121 |
-
|
122 |
-
def terrain():
|
123 |
-
|
124 |
-
"""
|
125 |
-
TerrainLayer
|
126 |
-
===========
|
127 |
-
|
128 |
-
Extruded terrain using AWS Open Data Terrain Tiles and Mapbox Satellite imagery
|
129 |
-
"""
|
130 |
-
|
131 |
-
# Import Mapbox API Key from environment
|
132 |
-
MAPBOX_API_KEY = os.environ["MAPBOX_API_KEY"]
|
133 |
-
|
134 |
-
# AWS Open Data Terrain Tiles
|
135 |
-
TERRAIN_IMAGE = (
|
136 |
-
"https://s3.amazonaws.com/elevation-tiles-prod/terrarium/{z}/{x}/{y}.png"
|
137 |
-
)
|
138 |
-
|
139 |
-
# Define how to parse elevation tiles
|
140 |
-
ELEVATION_DECODER = {
|
141 |
-
"rScaler": 256,
|
142 |
-
"gScaler": 1,
|
143 |
-
"bScaler": 1 / 256,
|
144 |
-
"offset": -32768,
|
145 |
-
}
|
146 |
-
|
147 |
-
SURFACE_IMAGE = f"https://api.mapbox.com/v4/mapbox.satellite/{{z}}/{{x}}/{{y}}@2x.png?access_token={MAPBOX_API_KEY}"
|
148 |
-
|
149 |
-
terrain_layer = pdk.Layer(
|
150 |
-
"TerrainLayer",
|
151 |
-
elevation_decoder=ELEVATION_DECODER,
|
152 |
-
texture=SURFACE_IMAGE,
|
153 |
-
elevation_data=TERRAIN_IMAGE,
|
154 |
-
)
|
155 |
-
|
156 |
-
view_state = pdk.ViewState(
|
157 |
-
latitude=46.24, longitude=-122.18, zoom=11.5, bearing=140, pitch=60
|
158 |
-
)
|
159 |
-
|
160 |
-
r = pdk.Deck(terrain_layer, initial_view_state=view_state)
|
161 |
-
return r
|
162 |
-
|
163 |
-
|
164 |
-
def app():
|
165 |
-
|
166 |
-
st.title("Pydeck Gallery")
|
167 |
-
|
168 |
-
options = ["GeoJsonLayer", "GlobeView", "TerrainLayer"]
|
169 |
-
|
170 |
-
option = st.selectbox("Select a pydeck layer type", options)
|
171 |
-
|
172 |
-
if option == "GeoJsonLayer":
|
173 |
-
st.header("Property values in Vancouver, Canada")
|
174 |
-
st.pydeck_chart(geojson_layer())
|
175 |
-
# elif option == "GlobeView":
|
176 |
-
# st.pydeck_chart(globe_view())
|
177 |
-
elif option == "TerrainLayer":
|
178 |
-
st.pydeck_chart(terrain())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/device_loc.py
DELETED
@@ -1,43 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
from bokeh.models.widgets import Button
|
3 |
-
from bokeh.models import CustomJS
|
4 |
-
from streamlit_bokeh_events import streamlit_bokeh_events
|
5 |
-
import leafmap.foliumap as leafmap
|
6 |
-
|
7 |
-
|
8 |
-
def app():
|
9 |
-
|
10 |
-
loc_button = Button(label="Get Device Location", max_width=150)
|
11 |
-
loc_button.js_on_event(
|
12 |
-
"button_click",
|
13 |
-
CustomJS(
|
14 |
-
code="""
|
15 |
-
navigator.geolocation.getCurrentPosition(
|
16 |
-
(loc) => {
|
17 |
-
document.dispatchEvent(new CustomEvent("GET_LOCATION", {detail: {lat: loc.coords.latitude, lon: loc.coords.longitude}}))
|
18 |
-
}
|
19 |
-
)
|
20 |
-
"""
|
21 |
-
),
|
22 |
-
)
|
23 |
-
result = streamlit_bokeh_events(
|
24 |
-
loc_button,
|
25 |
-
events="GET_LOCATION",
|
26 |
-
key="get_location",
|
27 |
-
refresh_on_update=False,
|
28 |
-
override_height=75,
|
29 |
-
debounce_time=0,
|
30 |
-
)
|
31 |
-
|
32 |
-
if result:
|
33 |
-
if "GET_LOCATION" in result:
|
34 |
-
loc = result.get("GET_LOCATION")
|
35 |
-
lat = loc.get("lat")
|
36 |
-
lon = loc.get("lon")
|
37 |
-
st.write(f"Lat, Lon: {lat}, {lon}")
|
38 |
-
|
39 |
-
m = leafmap.Map(center=(lat, lon), zoom=16)
|
40 |
-
m.add_basemap("ROADMAP")
|
41 |
-
popup = f"lat, lon: {lat}, {lon}"
|
42 |
-
m.add_marker(location=(lat, lon), popup=popup)
|
43 |
-
m.to_streamlit()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/gee.py
DELETED
@@ -1,123 +0,0 @@
|
|
1 |
-
import ee
|
2 |
-
import streamlit as st
|
3 |
-
import geemap.foliumap as geemap
|
4 |
-
|
5 |
-
|
6 |
-
def nlcd():
|
7 |
-
|
8 |
-
st.header("National Land Cover Database (NLCD)")
|
9 |
-
|
10 |
-
row1_col1, row1_col2 = st.columns([3, 1])
|
11 |
-
width = 950
|
12 |
-
height = 600
|
13 |
-
|
14 |
-
Map = geemap.Map()
|
15 |
-
|
16 |
-
# Select the seven NLCD epoches after 2000.
|
17 |
-
years = ["2001", "2004", "2006", "2008", "2011", "2013", "2016"]
|
18 |
-
|
19 |
-
# Get an NLCD image by year.
|
20 |
-
def getNLCD(year):
|
21 |
-
# Import the NLCD collection.
|
22 |
-
dataset = ee.ImageCollection("USGS/NLCD_RELEASES/2016_REL")
|
23 |
-
|
24 |
-
# Filter the collection by year.
|
25 |
-
nlcd = dataset.filter(ee.Filter.eq("system:index", year)).first()
|
26 |
-
|
27 |
-
# Select the land cover band.
|
28 |
-
landcover = nlcd.select("landcover")
|
29 |
-
return landcover
|
30 |
-
|
31 |
-
with row1_col2:
|
32 |
-
selected_year = st.multiselect("Select a year", years)
|
33 |
-
add_legend = st.checkbox("Show legend")
|
34 |
-
|
35 |
-
if selected_year:
|
36 |
-
for year in selected_year:
|
37 |
-
Map.addLayer(getNLCD(year), {}, "NLCD " + year)
|
38 |
-
|
39 |
-
if add_legend:
|
40 |
-
Map.add_legend(
|
41 |
-
legend_title="NLCD Land Cover Classification", builtin_legend="NLCD"
|
42 |
-
)
|
43 |
-
with row1_col1:
|
44 |
-
Map.to_streamlit(width=width, height=height)
|
45 |
-
|
46 |
-
else:
|
47 |
-
with row1_col1:
|
48 |
-
Map.to_streamlit(width=width, height=height)
|
49 |
-
|
50 |
-
|
51 |
-
def search_data():
|
52 |
-
|
53 |
-
st.header("Search Earth Engine Data Catalog")
|
54 |
-
|
55 |
-
Map = geemap.Map()
|
56 |
-
|
57 |
-
if "ee_assets" not in st.session_state:
|
58 |
-
st.session_state["ee_assets"] = None
|
59 |
-
if "asset_titles" not in st.session_state:
|
60 |
-
st.session_state["asset_titles"] = None
|
61 |
-
|
62 |
-
col1, col2 = st.columns([2, 1])
|
63 |
-
|
64 |
-
dataset = None
|
65 |
-
with col2:
|
66 |
-
keyword = st.text_input("Enter a keyword to search (e.g., elevation)", "")
|
67 |
-
if keyword:
|
68 |
-
ee_assets = geemap.search_ee_data(keyword)
|
69 |
-
asset_titles = [x["title"] for x in ee_assets]
|
70 |
-
dataset = st.selectbox("Select a dataset", asset_titles)
|
71 |
-
if len(ee_assets) > 0:
|
72 |
-
st.session_state["ee_assets"] = ee_assets
|
73 |
-
st.session_state["asset_titles"] = asset_titles
|
74 |
-
|
75 |
-
if dataset is not None:
|
76 |
-
with st.expander("Show dataset details", True):
|
77 |
-
index = asset_titles.index(dataset)
|
78 |
-
html = geemap.ee_data_html(st.session_state["ee_assets"][index])
|
79 |
-
st.markdown(html, True)
|
80 |
-
|
81 |
-
ee_id = ee_assets[index]["ee_id_snippet"]
|
82 |
-
uid = ee_assets[index]["uid"]
|
83 |
-
st.markdown(f"""**Earth Engine Snippet:** `{ee_id}`""")
|
84 |
-
|
85 |
-
vis_params = st.text_input(
|
86 |
-
"Enter visualization parameters as a dictionary", {}
|
87 |
-
)
|
88 |
-
layer_name = st.text_input("Enter a layer name", uid)
|
89 |
-
button = st.button("Add dataset to map")
|
90 |
-
if button:
|
91 |
-
vis = {}
|
92 |
-
try:
|
93 |
-
if vis_params.strip() == "":
|
94 |
-
# st.error("Please enter visualization parameters")
|
95 |
-
vis_params = "{}"
|
96 |
-
vis = eval(vis_params)
|
97 |
-
if not isinstance(vis, dict):
|
98 |
-
st.error("Visualization parameters must be a dictionary")
|
99 |
-
try:
|
100 |
-
Map.addLayer(eval(ee_id), vis, layer_name)
|
101 |
-
except Exception as e:
|
102 |
-
st.error(f"Error adding layer: {e}")
|
103 |
-
except Exception as e:
|
104 |
-
st.error(f"Invalid visualization parameters: {e}")
|
105 |
-
|
106 |
-
with col1:
|
107 |
-
Map.to_streamlit()
|
108 |
-
else:
|
109 |
-
with col1:
|
110 |
-
Map.to_streamlit()
|
111 |
-
|
112 |
-
|
113 |
-
def app():
|
114 |
-
st.title("Google Earth Engine Applications")
|
115 |
-
|
116 |
-
apps = ["National Land Cover Database (NLCD)", "Search Earth Engine Data Catalog"]
|
117 |
-
|
118 |
-
selected_app = st.selectbox("Select an app", apps)
|
119 |
-
|
120 |
-
if selected_app == "National Land Cover Database (NLCD)":
|
121 |
-
nlcd()
|
122 |
-
elif selected_app == "Search Earth Engine Data Catalog":
|
123 |
-
search_data()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/gee_datasets.py
DELETED
@@ -1,186 +0,0 @@
|
|
1 |
-
import ee
|
2 |
-
import streamlit as st
|
3 |
-
import geemap.foliumap as geemap
|
4 |
-
|
5 |
-
WIDTH = 1060
|
6 |
-
HEIGHT = 600
|
7 |
-
|
8 |
-
|
9 |
-
def function():
|
10 |
-
st.write("Not implemented yet.")
|
11 |
-
Map = geemap.Map()
|
12 |
-
Map.to_streamlit(WIDTH, HEIGHT)
|
13 |
-
|
14 |
-
|
15 |
-
def lulc_mrb_floodplain():
|
16 |
-
|
17 |
-
Map = geemap.Map()
|
18 |
-
|
19 |
-
State_boundaries = ee.FeatureCollection('users/giswqs/MRB/State_Boundaries')
|
20 |
-
State_style = State_boundaries.style(
|
21 |
-
**{'color': '808080', 'width': 1, 'fillColor': '00000000'}
|
22 |
-
)
|
23 |
-
|
24 |
-
MRB_boundary = ee.FeatureCollection('users/giswqs/MRB/MRB_Boundary')
|
25 |
-
MRB_style = MRB_boundary.style(
|
26 |
-
**{'color': '000000', 'width': 2, 'fillColor': '00000000'}
|
27 |
-
)
|
28 |
-
|
29 |
-
floodplain = ee.Image('users/giswqs/MRB/USGS_Floodplain')
|
30 |
-
|
31 |
-
class_values = [34, 38, 46, 50, 62]
|
32 |
-
class_palette = ['c500ff', '00ffc5', '00a9e6', '73004d', '004d73']
|
33 |
-
|
34 |
-
img_1950 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1950')
|
35 |
-
img_1950 = img_1950.set('b1_class_values', class_values)
|
36 |
-
img_1950 = img_1950.set('b1_class_palette', class_palette)
|
37 |
-
|
38 |
-
img_1960 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1960')
|
39 |
-
img_1960 = img_1960.set('b1_class_values', class_values)
|
40 |
-
img_1960 = img_1960.set('b1_class_palette', class_palette)
|
41 |
-
|
42 |
-
img_1970 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1970')
|
43 |
-
img_1970 = img_1970.set('b1_class_values', class_values)
|
44 |
-
img_1970 = img_1970.set('b1_class_palette', class_palette)
|
45 |
-
|
46 |
-
img_1980 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1980')
|
47 |
-
img_1980 = img_1980.set('b1_class_values', class_values)
|
48 |
-
img_1980 = img_1980.set('b1_class_palette', class_palette)
|
49 |
-
|
50 |
-
img_1990 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_1990')
|
51 |
-
img_1990 = img_1990.set('b1_class_values', class_values)
|
52 |
-
img_1990 = img_1990.set('b1_class_palette', class_palette)
|
53 |
-
|
54 |
-
img_2000 = ee.Image('users/giswqs/MRB/Major_Transitions_1941_2000')
|
55 |
-
img_2000 = img_2000.set('b1_class_values', class_values)
|
56 |
-
img_2000 = img_2000.set('b1_class_palette', class_palette)
|
57 |
-
|
58 |
-
Map.addLayer(floodplain, {'palette': ['cccccc']}, 'Floodplain', True, 0.5)
|
59 |
-
Map.addLayer(img_2000, {}, 'Major Transitions 1941-2000')
|
60 |
-
Map.addLayer(img_1990, {}, 'Major Transitions 1941-1990')
|
61 |
-
Map.addLayer(img_1980, {}, 'Major Transitions 1941-1980')
|
62 |
-
Map.addLayer(img_1970, {}, 'Major Transitions 1941-1970')
|
63 |
-
Map.addLayer(img_1960, {}, 'Major Transitions 1941-1960')
|
64 |
-
Map.addLayer(img_1950, {}, 'Major Transitions 1941-1950')
|
65 |
-
|
66 |
-
Map.addLayer(State_style, {}, 'State Boundaries')
|
67 |
-
Map.addLayer(MRB_style, {}, 'MRB Boundary')
|
68 |
-
|
69 |
-
Map.to_streamlit(WIDTH, HEIGHT)
|
70 |
-
|
71 |
-
|
72 |
-
def global_mangrove_watch():
|
73 |
-
"""https://samapriya.github.io/awesome-gee-community-datasets/projects/mangrove/"""
|
74 |
-
Map = geemap.Map()
|
75 |
-
gmw2007 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2007_v2")
|
76 |
-
gmw2008 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2008_v2")
|
77 |
-
gmw2009 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2009_v2")
|
78 |
-
gmw2010 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2010_v2")
|
79 |
-
gmw2015 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2015_v2")
|
80 |
-
gmw2016 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_2016_v2")
|
81 |
-
gmw1996 = ee.FeatureCollection("projects/sat-io/open-datasets/GMW/GMW_1996_v2")
|
82 |
-
|
83 |
-
Map.addLayer(
|
84 |
-
ee.Image().paint(gmw1996, 0, 3),
|
85 |
-
{"palette": ["228B22"]},
|
86 |
-
'Global Mangrove Watch 1996',
|
87 |
-
)
|
88 |
-
Map.addLayer(
|
89 |
-
ee.Image().paint(gmw2007, 0, 3),
|
90 |
-
{"palette": ["228B22"]},
|
91 |
-
'Global Mangrove Watch 2007',
|
92 |
-
)
|
93 |
-
Map.addLayer(
|
94 |
-
ee.Image().paint(gmw2008, 0, 3),
|
95 |
-
{"palette": ["228B22"]},
|
96 |
-
'Global Mangrove Watch 2008',
|
97 |
-
)
|
98 |
-
Map.addLayer(
|
99 |
-
ee.Image().paint(gmw2009, 0, 3),
|
100 |
-
{"palette": ["228B22"]},
|
101 |
-
'Global Mangrove Watch 2009',
|
102 |
-
)
|
103 |
-
Map.addLayer(
|
104 |
-
ee.Image().paint(gmw2010, 0, 3),
|
105 |
-
{"palette": ["228B22"]},
|
106 |
-
'Global Mangrove Watch 2010',
|
107 |
-
)
|
108 |
-
Map.addLayer(
|
109 |
-
ee.Image().paint(gmw2015, 0, 3),
|
110 |
-
{"palette": ["228B22"]},
|
111 |
-
'Global Mangrove Watch 2015',
|
112 |
-
)
|
113 |
-
Map.addLayer(
|
114 |
-
ee.Image().paint(gmw2016, 0, 3),
|
115 |
-
{"palette": ["228B22"]},
|
116 |
-
'Global Mangrove Watch 2015',
|
117 |
-
)
|
118 |
-
|
119 |
-
Map.to_streamlit(WIDTH, HEIGHT)
|
120 |
-
|
121 |
-
|
122 |
-
def app():
|
123 |
-
|
124 |
-
st.title("Awesome GEE Community Datasets")
|
125 |
-
|
126 |
-
st.markdown(
|
127 |
-
"""
|
128 |
-
|
129 |
-
This app is for exploring the [Awesome GEE Community Datasets](https://samapriya.github.io/awesome-gee-community-datasets). Work in progress.
|
130 |
-
|
131 |
-
"""
|
132 |
-
)
|
133 |
-
|
134 |
-
datasets = {
|
135 |
-
"Population & Socioeconomic": {
|
136 |
-
"High Resolution Settlement Layer": "function()",
|
137 |
-
"World Settlement Footprint (2015)": "function()",
|
138 |
-
"Gridded Population of the World": "function()",
|
139 |
-
"geoBoundaries Global Database": "function()",
|
140 |
-
"West Africa Coastal Vulnerability Mapping": "function()",
|
141 |
-
"Relative Wealth Index (RWI)": "function()",
|
142 |
-
"Social Connectedness Index (SCI)": "function()",
|
143 |
-
"Native Land (Indigenous Land Maps)": "function()",
|
144 |
-
},
|
145 |
-
"Geophysical, Biological & Biogeochemical": {
|
146 |
-
"Geomorpho90m Geomorphometric Layers": "function()",
|
147 |
-
},
|
148 |
-
"Land Use and Land Cover": {
|
149 |
-
"Global Mangrove Watch": "global_mangrove_watch()",
|
150 |
-
"Mississippi River Basin Floodplain Land Use Change (1941-2000)": "lulc_mrb_floodplain()",
|
151 |
-
},
|
152 |
-
"Hydrology": {
|
153 |
-
"Global Shoreline Dataset": "function()",
|
154 |
-
},
|
155 |
-
"Agriculture, Vegetation and Forestry": {
|
156 |
-
"Landfire Mosaics LF v2.0.0": "function()",
|
157 |
-
},
|
158 |
-
"Global Utilities, Assets and Amenities Layers": {
|
159 |
-
"Global Power": "function()",
|
160 |
-
},
|
161 |
-
"EarthEnv Biodiversity ecosystems & climate Layers": {
|
162 |
-
"Global Consensus Landcover": "function()",
|
163 |
-
},
|
164 |
-
"Weather and Climate Layers": {
|
165 |
-
"Global Reference Evapotranspiration Layers": "function()",
|
166 |
-
},
|
167 |
-
"Global Events Layers": {
|
168 |
-
"Global Fire Atlas (2003-2016)": "function()",
|
169 |
-
},
|
170 |
-
}
|
171 |
-
|
172 |
-
row1_col1, row1_col2, _ = st.columns([1.2, 1.8, 1])
|
173 |
-
|
174 |
-
with row1_col1:
|
175 |
-
category = st.selectbox("Select a category", datasets.keys(), index=2)
|
176 |
-
with row1_col2:
|
177 |
-
dataset = st.selectbox("Select a dataset", datasets[category].keys())
|
178 |
-
|
179 |
-
Map = geemap.Map()
|
180 |
-
|
181 |
-
if dataset:
|
182 |
-
eval(datasets[category][dataset])
|
183 |
-
|
184 |
-
else:
|
185 |
-
Map = geemap.Map()
|
186 |
-
Map.to_streamlit(WIDTH, HEIGHT)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/heatmap.py
DELETED
@@ -1,19 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import leafmap.foliumap as leafmap
|
3 |
-
|
4 |
-
|
5 |
-
def app():
|
6 |
-
|
7 |
-
st.title('Heatmaps')
|
8 |
-
|
9 |
-
filepath = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_cities.csv"
|
10 |
-
m = leafmap.Map(tiles="stamentoner")
|
11 |
-
m.add_heatmap(
|
12 |
-
filepath,
|
13 |
-
latitude="latitude",
|
14 |
-
longitude="longitude",
|
15 |
-
value="pop_max",
|
16 |
-
name="Heat map",
|
17 |
-
radius=20,
|
18 |
-
)
|
19 |
-
m.to_streamlit(width=700, height=500)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/home.py
DELETED
@@ -1,34 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import leafmap.foliumap as leafmap
|
3 |
-
|
4 |
-
|
5 |
-
def app():
|
6 |
-
st.title("Streamlit for Geospatial Applications")
|
7 |
-
|
8 |
-
st.markdown(
|
9 |
-
"""
|
10 |
-
This multi-page web app demonstrates various interactive web apps created using [streamlit](https://streamlit.io) and open-source mapping libraries,
|
11 |
-
such as [leafmap](https://leafmap.org), [geemap](https://geemap.org), [pydeck](https://deckgl.readthedocs.io), and [kepler.gl](https://docs.kepler.gl/docs/keplergl-jupyter).
|
12 |
-
This is an open-source project and you are very welcome to contribute your comments, questions, resources, and apps as [issues](https://github.com/giswqs/streamlit-geospatial/issues) or
|
13 |
-
[pull requests](https://github.com/giswqs/streamlit-geospatial/pulls) to the [GitHub repository](https://github.com/giswqs/streamlit-geospatial).
|
14 |
-
|
15 |
-
"""
|
16 |
-
)
|
17 |
-
|
18 |
-
st.info("Click on the left sidebar menu to navigate to the different apps.")
|
19 |
-
|
20 |
-
st.subheader("Timelapse of Satellite Imagery")
|
21 |
-
st.markdown(
|
22 |
-
"""
|
23 |
-
The following timelapse animations were created using the Timelapse web app. Click `Create Timelapse` on the left sidebar menu to create your own timelapse for any location around the globe.
|
24 |
-
"""
|
25 |
-
)
|
26 |
-
|
27 |
-
row1_col1, row1_col2 = st.columns(2)
|
28 |
-
with row1_col1:
|
29 |
-
st.image("https://github.com/giswqs/data/raw/main/timelapse/spain.gif")
|
30 |
-
st.image("https://github.com/giswqs/data/raw/main/timelapse/las_vegas.gif")
|
31 |
-
|
32 |
-
with row1_col2:
|
33 |
-
st.image("https://github.com/giswqs/data/raw/main/timelapse/goes.gif")
|
34 |
-
st.image("https://github.com/giswqs/data/raw/main/timelapse/fire.gif")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/housing.py
DELETED
@@ -1,457 +0,0 @@
|
|
1 |
-
import datetime
|
2 |
-
import os
|
3 |
-
import pathlib
|
4 |
-
import requests
|
5 |
-
import zipfile
|
6 |
-
import pandas as pd
|
7 |
-
import pydeck as pdk
|
8 |
-
import geopandas as gpd
|
9 |
-
import streamlit as st
|
10 |
-
import leafmap.colormaps as cm
|
11 |
-
from leafmap.common import hex_to_rgb
|
12 |
-
|
13 |
-
|
14 |
-
STREAMLIT_STATIC_PATH = pathlib.Path(st.__path__[0]) / "static"
|
15 |
-
# We create a downloads directory within the streamlit static asset directory
|
16 |
-
# and we write output files to it
|
17 |
-
DOWNLOADS_PATH = STREAMLIT_STATIC_PATH / "downloads"
|
18 |
-
if not DOWNLOADS_PATH.is_dir():
|
19 |
-
DOWNLOADS_PATH.mkdir()
|
20 |
-
|
21 |
-
# Data source: https://www.realtor.com/research/data/
|
22 |
-
# link_prefix = "https://econdata.s3-us-west-2.amazonaws.com/Reports/"
|
23 |
-
link_prefix = "https://raw.githubusercontent.com/giswqs/data/main/housing/"
|
24 |
-
|
25 |
-
data_links = {
|
26 |
-
"weekly": {
|
27 |
-
"national": link_prefix + "Core/listing_weekly_core_aggregate_by_country.csv",
|
28 |
-
"metro": link_prefix + "Core/listing_weekly_core_aggregate_by_metro.csv",
|
29 |
-
},
|
30 |
-
"monthly_current": {
|
31 |
-
"national": link_prefix + "Core/RDC_Inventory_Core_Metrics_Country.csv",
|
32 |
-
"state": link_prefix + "Core/RDC_Inventory_Core_Metrics_State.csv",
|
33 |
-
"metro": link_prefix + "Core/RDC_Inventory_Core_Metrics_Metro.csv",
|
34 |
-
"county": link_prefix + "Core/RDC_Inventory_Core_Metrics_County.csv",
|
35 |
-
"zip": link_prefix + "Core/RDC_Inventory_Core_Metrics_Zip.csv",
|
36 |
-
},
|
37 |
-
"monthly_historical": {
|
38 |
-
"national": link_prefix + "Core/RDC_Inventory_Core_Metrics_Country_History.csv",
|
39 |
-
"state": link_prefix + "Core/RDC_Inventory_Core_Metrics_State_History.csv",
|
40 |
-
"metro": link_prefix + "Core/RDC_Inventory_Core_Metrics_Metro_History.csv",
|
41 |
-
"county": link_prefix + "Core/RDC_Inventory_Core_Metrics_County_History.csv",
|
42 |
-
"zip": link_prefix + "Core/RDC_Inventory_Core_Metrics_Zip_History.csv",
|
43 |
-
},
|
44 |
-
"hotness": {
|
45 |
-
"metro": link_prefix
|
46 |
-
+ "Hotness/RDC_Inventory_Hotness_Metrics_Metro_History.csv",
|
47 |
-
"county": link_prefix
|
48 |
-
+ "Hotness/RDC_Inventory_Hotness_Metrics_County_History.csv",
|
49 |
-
"zip": link_prefix + "Hotness/RDC_Inventory_Hotness_Metrics_Zip_History.csv",
|
50 |
-
},
|
51 |
-
}
|
52 |
-
|
53 |
-
|
54 |
-
def get_data_columns(df, category, frequency="monthly"):
|
55 |
-
if frequency == "monthly":
|
56 |
-
if category.lower() == "county":
|
57 |
-
del_cols = ["month_date_yyyymm", "county_fips", "county_name"]
|
58 |
-
elif category.lower() == "state":
|
59 |
-
del_cols = ["month_date_yyyymm", "state", "state_id"]
|
60 |
-
elif category.lower() == "national":
|
61 |
-
del_cols = ["month_date_yyyymm", "country"]
|
62 |
-
elif category.lower() == "metro":
|
63 |
-
del_cols = ["month_date_yyyymm", "cbsa_code", "cbsa_title", "HouseholdRank"]
|
64 |
-
elif category.lower() == "zip":
|
65 |
-
del_cols = ["month_date_yyyymm", "postal_code", "zip_name", "flag"]
|
66 |
-
elif frequency == "weekly":
|
67 |
-
if category.lower() == "national":
|
68 |
-
del_cols = ["week_end_date", "geo_country"]
|
69 |
-
elif category.lower() == "metro":
|
70 |
-
del_cols = ["week_end_date", "cbsa_code", "cbsa_title", "hh_rank"]
|
71 |
-
|
72 |
-
cols = df.columns.values.tolist()
|
73 |
-
|
74 |
-
for col in cols:
|
75 |
-
if col.strip() in del_cols:
|
76 |
-
cols.remove(col)
|
77 |
-
if category.lower() == "metro":
|
78 |
-
return cols[2:]
|
79 |
-
else:
|
80 |
-
return cols[1:]
|
81 |
-
|
82 |
-
|
83 |
-
@st.cache(allow_output_mutation=True)
|
84 |
-
def get_inventory_data(url):
|
85 |
-
df = pd.read_csv(url)
|
86 |
-
url = url.lower()
|
87 |
-
if "county" in url:
|
88 |
-
df["county_fips"] = df["county_fips"].map(str)
|
89 |
-
df["county_fips"] = df["county_fips"].str.zfill(5)
|
90 |
-
elif "state" in url:
|
91 |
-
df["STUSPS"] = df["state_id"].str.upper()
|
92 |
-
elif "metro" in url:
|
93 |
-
df["cbsa_code"] = df["cbsa_code"].map(str)
|
94 |
-
elif "zip" in url:
|
95 |
-
df["postal_code"] = df["postal_code"].map(str)
|
96 |
-
df["postal_code"] = df["postal_code"].str.zfill(5)
|
97 |
-
|
98 |
-
if "listing_weekly_core_aggregate_by_country" in url:
|
99 |
-
columns = get_data_columns(df, "national", "weekly")
|
100 |
-
for column in columns:
|
101 |
-
if column != "median_days_on_market_by_day_yy":
|
102 |
-
df[column] = df[column].str.rstrip("%").astype(float) / 100
|
103 |
-
if "listing_weekly_core_aggregate_by_metro" in url:
|
104 |
-
columns = get_data_columns(df, "metro", "weekly")
|
105 |
-
for column in columns:
|
106 |
-
if column != "median_days_on_market_by_day_yy":
|
107 |
-
df[column] = df[column].str.rstrip("%").astype(float) / 100
|
108 |
-
df["cbsa_code"] = df["cbsa_code"].str[:5]
|
109 |
-
return df
|
110 |
-
|
111 |
-
|
112 |
-
def filter_weekly_inventory(df, week):
|
113 |
-
df = df[df["week_end_date"] == week]
|
114 |
-
return df
|
115 |
-
|
116 |
-
|
117 |
-
def get_start_end_year(df):
|
118 |
-
start_year = int(str(df["month_date_yyyymm"].min())[:4])
|
119 |
-
end_year = int(str(df["month_date_yyyymm"].max())[:4])
|
120 |
-
return start_year, end_year
|
121 |
-
|
122 |
-
|
123 |
-
def get_periods(df):
|
124 |
-
return [str(d) for d in list(set(df["month_date_yyyymm"].tolist()))]
|
125 |
-
|
126 |
-
|
127 |
-
@st.cache(allow_output_mutation=True)
|
128 |
-
def get_geom_data(category):
|
129 |
-
|
130 |
-
prefix = (
|
131 |
-
"https://raw.githubusercontent.com/giswqs/streamlit-geospatial/master/data/"
|
132 |
-
)
|
133 |
-
links = {
|
134 |
-
"national": prefix + "us_nation.geojson",
|
135 |
-
"state": prefix + "us_states.geojson",
|
136 |
-
"county": prefix + "us_counties.geojson",
|
137 |
-
"metro": prefix + "us_metro_areas.geojson",
|
138 |
-
"zip": "https://www2.census.gov/geo/tiger/GENZ2018/shp/cb_2018_us_zcta510_500k.zip",
|
139 |
-
}
|
140 |
-
|
141 |
-
if category.lower() == "zip":
|
142 |
-
r = requests.get(links[category])
|
143 |
-
out_zip = os.path.join(DOWNLOADS_PATH, "cb_2018_us_zcta510_500k.zip")
|
144 |
-
with open(out_zip, "wb") as code:
|
145 |
-
code.write(r.content)
|
146 |
-
zip_ref = zipfile.ZipFile(out_zip, "r")
|
147 |
-
zip_ref.extractall(DOWNLOADS_PATH)
|
148 |
-
gdf = gpd.read_file(out_zip.replace("zip", "shp"))
|
149 |
-
else:
|
150 |
-
gdf = gpd.read_file(links[category])
|
151 |
-
return gdf
|
152 |
-
|
153 |
-
|
154 |
-
def join_attributes(gdf, df, category):
|
155 |
-
|
156 |
-
new_gdf = None
|
157 |
-
if category == "county":
|
158 |
-
new_gdf = gdf.merge(df, left_on="GEOID", right_on="county_fips", how="outer")
|
159 |
-
elif category == "state":
|
160 |
-
new_gdf = gdf.merge(df, left_on="STUSPS", right_on="STUSPS", how="outer")
|
161 |
-
elif category == "national":
|
162 |
-
if "geo_country" in df.columns.values.tolist():
|
163 |
-
df["country"] = None
|
164 |
-
df.loc[0, "country"] = "United States"
|
165 |
-
new_gdf = gdf.merge(df, left_on="NAME", right_on="country", how="outer")
|
166 |
-
elif category == "metro":
|
167 |
-
new_gdf = gdf.merge(df, left_on="CBSAFP", right_on="cbsa_code", how="outer")
|
168 |
-
elif category == "zip":
|
169 |
-
new_gdf = gdf.merge(df, left_on="GEOID10", right_on="postal_code", how="outer")
|
170 |
-
return new_gdf
|
171 |
-
|
172 |
-
|
173 |
-
def select_non_null(gdf, col_name):
|
174 |
-
new_gdf = gdf[~gdf[col_name].isna()]
|
175 |
-
return new_gdf
|
176 |
-
|
177 |
-
|
178 |
-
def select_null(gdf, col_name):
|
179 |
-
new_gdf = gdf[gdf[col_name].isna()]
|
180 |
-
return new_gdf
|
181 |
-
|
182 |
-
|
183 |
-
def get_data_dict(name):
|
184 |
-
in_csv = os.path.join(os.getcwd(), "data/realtor_data_dict.csv")
|
185 |
-
df = pd.read_csv(in_csv)
|
186 |
-
label = list(df[df["Name"] == name]["Label"])[0]
|
187 |
-
desc = list(df[df["Name"] == name]["Description"])[0]
|
188 |
-
return label, desc
|
189 |
-
|
190 |
-
|
191 |
-
def get_weeks(df):
|
192 |
-
seq = list(set(df[~df["week_end_date"].isnull()]["week_end_date"].tolist()))
|
193 |
-
weeks = [
|
194 |
-
datetime.date(int(d.split("/")[2]), int(d.split("/")[0]), int(d.split("/")[1]))
|
195 |
-
for d in seq
|
196 |
-
]
|
197 |
-
weeks.sort()
|
198 |
-
return weeks
|
199 |
-
|
200 |
-
|
201 |
-
def get_saturday(in_date):
|
202 |
-
idx = (in_date.weekday() + 1) % 7
|
203 |
-
sat = in_date + datetime.timedelta(6 - idx)
|
204 |
-
return sat
|
205 |
-
|
206 |
-
|
207 |
-
def app():
|
208 |
-
|
209 |
-
st.title("U.S. Real Estate Data and Market Trends")
|
210 |
-
st.markdown(
|
211 |
-
"""**Introduction:** This interactive dashboard is designed for visualizing U.S. real estate data and market trends at multiple levels (i.e., national,
|
212 |
-
state, county, and metro). The data sources include [Real Estate Data](https://www.realtor.com/research/data) from realtor.com and
|
213 |
-
[Cartographic Boundary Files](https://www.census.gov/geographies/mapping-files/time-series/geo/carto-boundary-file.html) from U.S. Census Bureau.
|
214 |
-
Several open-source packages are used to process the data and generate the visualizations, e.g., [streamlit](https://streamlit.io),
|
215 |
-
[geopandas](https://geopandas.org), [leafmap](https://leafmap.org), and [pydeck](https://deckgl.readthedocs.io).
|
216 |
-
"""
|
217 |
-
)
|
218 |
-
|
219 |
-
with st.expander("See a demo"):
|
220 |
-
st.image("https://i.imgur.com/Z3dk6Tr.gif")
|
221 |
-
|
222 |
-
row1_col1, row1_col2, row1_col3, row1_col4, row1_col5 = st.columns(
|
223 |
-
[0.6, 0.8, 0.6, 1.4, 2]
|
224 |
-
)
|
225 |
-
with row1_col1:
|
226 |
-
frequency = st.selectbox("Monthly/weekly data", ["Monthly", "Weekly"])
|
227 |
-
with row1_col2:
|
228 |
-
types = ["Current month data", "Historical data"]
|
229 |
-
if frequency == "Weekly":
|
230 |
-
types.remove("Current month data")
|
231 |
-
cur_hist = st.selectbox(
|
232 |
-
"Current/historical data",
|
233 |
-
types,
|
234 |
-
)
|
235 |
-
with row1_col3:
|
236 |
-
if frequency == "Monthly":
|
237 |
-
scale = st.selectbox(
|
238 |
-
"Scale", ["National", "State", "Metro", "County"], index=3
|
239 |
-
)
|
240 |
-
else:
|
241 |
-
scale = st.selectbox("Scale", ["National", "Metro"], index=1)
|
242 |
-
|
243 |
-
gdf = get_geom_data(scale.lower())
|
244 |
-
|
245 |
-
if frequency == "Weekly":
|
246 |
-
inventory_df = get_inventory_data(data_links["weekly"][scale.lower()])
|
247 |
-
weeks = get_weeks(inventory_df)
|
248 |
-
with row1_col1:
|
249 |
-
selected_date = st.date_input("Select a date", value=weeks[-1])
|
250 |
-
saturday = get_saturday(selected_date)
|
251 |
-
selected_period = saturday.strftime("%-m/%-d/%Y")
|
252 |
-
if saturday not in weeks:
|
253 |
-
st.error(
|
254 |
-
"The selected date is not available in the data. Please select a date between {} and {}".format(
|
255 |
-
weeks[0], weeks[-1]
|
256 |
-
)
|
257 |
-
)
|
258 |
-
selected_period = weeks[-1].strftime("%-m/%-d/%Y")
|
259 |
-
inventory_df = get_inventory_data(data_links["weekly"][scale.lower()])
|
260 |
-
inventory_df = filter_weekly_inventory(inventory_df, selected_period)
|
261 |
-
|
262 |
-
if frequency == "Monthly":
|
263 |
-
if cur_hist == "Current month data":
|
264 |
-
inventory_df = get_inventory_data(
|
265 |
-
data_links["monthly_current"][scale.lower()]
|
266 |
-
)
|
267 |
-
selected_period = get_periods(inventory_df)[0]
|
268 |
-
else:
|
269 |
-
with row1_col2:
|
270 |
-
inventory_df = get_inventory_data(
|
271 |
-
data_links["monthly_historical"][scale.lower()]
|
272 |
-
)
|
273 |
-
start_year, end_year = get_start_end_year(inventory_df)
|
274 |
-
periods = get_periods(inventory_df)
|
275 |
-
with st.expander("Select year and month", True):
|
276 |
-
selected_year = st.slider(
|
277 |
-
"Year",
|
278 |
-
start_year,
|
279 |
-
end_year,
|
280 |
-
value=start_year,
|
281 |
-
step=1,
|
282 |
-
)
|
283 |
-
selected_month = st.slider(
|
284 |
-
"Month",
|
285 |
-
min_value=1,
|
286 |
-
max_value=12,
|
287 |
-
value=int(periods[0][-2:]),
|
288 |
-
step=1,
|
289 |
-
)
|
290 |
-
selected_period = str(selected_year) + str(selected_month).zfill(2)
|
291 |
-
if selected_period not in periods:
|
292 |
-
st.error("Data not available for selected year and month")
|
293 |
-
selected_period = periods[0]
|
294 |
-
inventory_df = inventory_df[
|
295 |
-
inventory_df["month_date_yyyymm"] == int(selected_period)
|
296 |
-
]
|
297 |
-
|
298 |
-
data_cols = get_data_columns(inventory_df, scale.lower(), frequency.lower())
|
299 |
-
|
300 |
-
with row1_col4:
|
301 |
-
selected_col = st.selectbox("Attribute", data_cols)
|
302 |
-
with row1_col5:
|
303 |
-
show_desc = st.checkbox("Show attribute description")
|
304 |
-
if show_desc:
|
305 |
-
try:
|
306 |
-
label, desc = get_data_dict(selected_col.strip())
|
307 |
-
markdown = f"""
|
308 |
-
**{label}**: {desc}
|
309 |
-
"""
|
310 |
-
st.markdown(markdown)
|
311 |
-
except:
|
312 |
-
st.warning("No description available for selected attribute")
|
313 |
-
|
314 |
-
row2_col1, row2_col2, row2_col3, row2_col4, row2_col5, row2_col6 = st.columns(
|
315 |
-
[0.6, 0.68, 0.7, 0.7, 1.5, 0.8]
|
316 |
-
)
|
317 |
-
|
318 |
-
palettes = cm.list_colormaps()
|
319 |
-
with row2_col1:
|
320 |
-
palette = st.selectbox("Color palette", palettes, index=palettes.index("Blues"))
|
321 |
-
with row2_col2:
|
322 |
-
n_colors = st.slider("Number of colors", min_value=2, max_value=20, value=8)
|
323 |
-
with row2_col3:
|
324 |
-
show_nodata = st.checkbox("Show nodata areas", value=True)
|
325 |
-
with row2_col4:
|
326 |
-
show_3d = st.checkbox("Show 3D view", value=False)
|
327 |
-
with row2_col5:
|
328 |
-
if show_3d:
|
329 |
-
elev_scale = st.slider(
|
330 |
-
"Elevation scale", min_value=1, max_value=1000000, value=1, step=10
|
331 |
-
)
|
332 |
-
with row2_col6:
|
333 |
-
st.info("Press Ctrl and move the left mouse button.")
|
334 |
-
else:
|
335 |
-
elev_scale = 1
|
336 |
-
|
337 |
-
gdf = join_attributes(gdf, inventory_df, scale.lower())
|
338 |
-
gdf_null = select_null(gdf, selected_col)
|
339 |
-
gdf = select_non_null(gdf, selected_col)
|
340 |
-
gdf = gdf.sort_values(by=selected_col, ascending=True)
|
341 |
-
|
342 |
-
colors = cm.get_palette(palette, n_colors)
|
343 |
-
colors = [hex_to_rgb(c) for c in colors]
|
344 |
-
|
345 |
-
for i, ind in enumerate(gdf.index):
|
346 |
-
index = int(i / (len(gdf) / len(colors)))
|
347 |
-
if index >= len(colors):
|
348 |
-
index = len(colors) - 1
|
349 |
-
gdf.loc[ind, "R"] = colors[index][0]
|
350 |
-
gdf.loc[ind, "G"] = colors[index][1]
|
351 |
-
gdf.loc[ind, "B"] = colors[index][2]
|
352 |
-
|
353 |
-
initial_view_state = pdk.ViewState(
|
354 |
-
latitude=40, longitude=-100, zoom=3, max_zoom=16, pitch=0, bearing=0
|
355 |
-
)
|
356 |
-
|
357 |
-
min_value = gdf[selected_col].min()
|
358 |
-
max_value = gdf[selected_col].max()
|
359 |
-
color = "color"
|
360 |
-
# color_exp = f"[({selected_col}-{min_value})/({max_value}-{min_value})*255, 0, 0]"
|
361 |
-
color_exp = f"[R, G, B]"
|
362 |
-
|
363 |
-
geojson = pdk.Layer(
|
364 |
-
"GeoJsonLayer",
|
365 |
-
gdf,
|
366 |
-
pickable=True,
|
367 |
-
opacity=0.5,
|
368 |
-
stroked=True,
|
369 |
-
filled=True,
|
370 |
-
extruded=show_3d,
|
371 |
-
wireframe=True,
|
372 |
-
get_elevation=f"{selected_col}",
|
373 |
-
elevation_scale=elev_scale,
|
374 |
-
# get_fill_color="color",
|
375 |
-
get_fill_color=color_exp,
|
376 |
-
get_line_color=[0, 0, 0],
|
377 |
-
get_line_width=2,
|
378 |
-
line_width_min_pixels=1,
|
379 |
-
)
|
380 |
-
|
381 |
-
geojson_null = pdk.Layer(
|
382 |
-
"GeoJsonLayer",
|
383 |
-
gdf_null,
|
384 |
-
pickable=True,
|
385 |
-
opacity=0.2,
|
386 |
-
stroked=True,
|
387 |
-
filled=True,
|
388 |
-
extruded=False,
|
389 |
-
wireframe=True,
|
390 |
-
# get_elevation="properties.ALAND/100000",
|
391 |
-
# get_fill_color="color",
|
392 |
-
get_fill_color=[200, 200, 200],
|
393 |
-
get_line_color=[0, 0, 0],
|
394 |
-
get_line_width=2,
|
395 |
-
line_width_min_pixels=1,
|
396 |
-
)
|
397 |
-
|
398 |
-
# tooltip = {"text": "Name: {NAME}"}
|
399 |
-
|
400 |
-
# tooltip_value = f"<b>Value:</b> {median_listing_price}""
|
401 |
-
tooltip = {
|
402 |
-
"html": "<b>Name:</b> {NAME}<br><b>Value:</b> {"
|
403 |
-
+ selected_col
|
404 |
-
+ "}<br><b>Date:</b> "
|
405 |
-
+ selected_period
|
406 |
-
+ "",
|
407 |
-
"style": {"backgroundColor": "steelblue", "color": "white"},
|
408 |
-
}
|
409 |
-
|
410 |
-
layers = [geojson]
|
411 |
-
if show_nodata:
|
412 |
-
layers.append(geojson_null)
|
413 |
-
|
414 |
-
r = pdk.Deck(
|
415 |
-
layers=layers,
|
416 |
-
initial_view_state=initial_view_state,
|
417 |
-
map_style="light",
|
418 |
-
tooltip=tooltip,
|
419 |
-
)
|
420 |
-
|
421 |
-
row3_col1, row3_col2 = st.columns([6, 1])
|
422 |
-
|
423 |
-
with row3_col1:
|
424 |
-
st.pydeck_chart(r)
|
425 |
-
with row3_col2:
|
426 |
-
st.write(
|
427 |
-
cm.create_colormap(
|
428 |
-
palette,
|
429 |
-
label=selected_col.replace("_", " ").title(),
|
430 |
-
width=0.2,
|
431 |
-
height=3,
|
432 |
-
orientation="vertical",
|
433 |
-
vmin=min_value,
|
434 |
-
vmax=max_value,
|
435 |
-
font_size=10,
|
436 |
-
)
|
437 |
-
)
|
438 |
-
row4_col1, row4_col2, row4_col3 = st.columns([1, 2, 3])
|
439 |
-
with row4_col1:
|
440 |
-
show_data = st.checkbox("Show raw data")
|
441 |
-
with row4_col2:
|
442 |
-
show_cols = st.multiselect("Select columns", data_cols)
|
443 |
-
with row4_col3:
|
444 |
-
show_colormaps = st.checkbox("Preview all color palettes")
|
445 |
-
if show_colormaps:
|
446 |
-
st.write(cm.plot_colormaps(return_fig=True))
|
447 |
-
if show_data:
|
448 |
-
if scale == "National":
|
449 |
-
st.dataframe(gdf[["NAME", "GEOID"] + show_cols])
|
450 |
-
elif scale == "State":
|
451 |
-
st.dataframe(gdf[["NAME", "STUSPS"] + show_cols])
|
452 |
-
elif scale == "County":
|
453 |
-
st.dataframe(gdf[["NAME", "STATEFP", "COUNTYFP"] + show_cols])
|
454 |
-
elif scale == "Metro":
|
455 |
-
st.dataframe(gdf[["NAME", "CBSAFP"] + show_cols])
|
456 |
-
elif scale == "Zip":
|
457 |
-
st.dataframe(gdf[["GEOID10"] + show_cols])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/hurricane.py
DELETED
@@ -1,52 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import tropycal.tracks as tracks
|
3 |
-
|
4 |
-
|
5 |
-
@st.cache(allow_output_mutation=True)
|
6 |
-
def read_data(basin='north_atlantic', source='hurdat', include_btk=False):
|
7 |
-
return tracks.TrackDataset(basin=basin, source=source, include_btk=include_btk)
|
8 |
-
|
9 |
-
|
10 |
-
def app():
|
11 |
-
|
12 |
-
st.title("Hurricane Mapping")
|
13 |
-
|
14 |
-
row1_col1, row1_col2 = st.columns([3, 1])
|
15 |
-
|
16 |
-
with row1_col1:
|
17 |
-
empty = st.empty()
|
18 |
-
empty.image("https://i.imgur.com/Ec7qsR0.png")
|
19 |
-
|
20 |
-
with row1_col2:
|
21 |
-
|
22 |
-
checkbox = st.checkbox("Select from a list of hurricanes", value=False)
|
23 |
-
if checkbox:
|
24 |
-
if st.session_state.get('hurricane') is None:
|
25 |
-
st.session_state['hurricane'] = read_data()
|
26 |
-
|
27 |
-
years = st.slider(
|
28 |
-
'Select a year', min_value=1950, max_value=2022, value=(2000, 2010)
|
29 |
-
)
|
30 |
-
storms = st.session_state['hurricane'].filter_storms(year_range=years)
|
31 |
-
selected = st.selectbox('Select a storm', storms)
|
32 |
-
storm = st.session_state['hurricane'].get_storm(selected)
|
33 |
-
ax = storm.plot()
|
34 |
-
fig = ax.get_figure()
|
35 |
-
empty.pyplot(fig)
|
36 |
-
else:
|
37 |
-
|
38 |
-
name = st.text_input("Or enter a storm Name", "michael")
|
39 |
-
if name:
|
40 |
-
if st.session_state.get('hurricane') is None:
|
41 |
-
st.session_state['hurricane'] = read_data()
|
42 |
-
basin = st.session_state['hurricane']
|
43 |
-
years = basin.search_name(name)
|
44 |
-
if len(years) > 0:
|
45 |
-
year = st.selectbox("Select a year", years)
|
46 |
-
storm = basin.get_storm((name, year))
|
47 |
-
ax = storm.plot()
|
48 |
-
fig = ax.get_figure()
|
49 |
-
empty.pyplot(fig)
|
50 |
-
else:
|
51 |
-
empty.text("No storms found")
|
52 |
-
st.write("No storms found")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/plotly_maps.py
DELETED
@@ -1,17 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import leafmap.plotlymap as leafmap
|
3 |
-
|
4 |
-
|
5 |
-
def app():
|
6 |
-
|
7 |
-
st.title("Plotly Maps")
|
8 |
-
m = leafmap.Map(basemap="street", height=650)
|
9 |
-
m.add_mapbox_layer(style="streets")
|
10 |
-
|
11 |
-
basemaps = list(leafmap.basemaps.keys())
|
12 |
-
basemap = st.selectbox(
|
13 |
-
"Select a basemap", basemaps, basemaps.index("Stamen.Terrain")
|
14 |
-
)
|
15 |
-
m.add_basemap(basemap)
|
16 |
-
|
17 |
-
st.plotly_chart(m, use_container_width=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/raster.py
DELETED
@@ -1,77 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import leafmap.foliumap as leafmap
|
3 |
-
import streamlit as st
|
4 |
-
import palettable
|
5 |
-
|
6 |
-
|
7 |
-
@st.cache(allow_output_mutation=True)
|
8 |
-
def load_cog_list():
|
9 |
-
print(os.getcwd())
|
10 |
-
in_txt = os.path.join(os.getcwd(), "data/cog_files.txt")
|
11 |
-
with open(in_txt) as f:
|
12 |
-
return [line.strip() for line in f.readlines()[1:]]
|
13 |
-
|
14 |
-
|
15 |
-
@st.cache(allow_output_mutation=True)
|
16 |
-
def get_palettes():
|
17 |
-
palettes = dir(palettable.matplotlib)[:-16]
|
18 |
-
return ["matplotlib." + p for p in palettes]
|
19 |
-
|
20 |
-
|
21 |
-
def app():
|
22 |
-
|
23 |
-
st.title("Visualize Raster Datasets")
|
24 |
-
st.markdown(
|
25 |
-
"""
|
26 |
-
An interactive web app for visualizing local raster datasets and Cloud Optimized GeoTIFF ([COG](https://www.cogeo.org)). The app was built using [streamlit](https://streamlit.io), [leafmap](https://leafmap.org), and [localtileserver](https://github.com/banesullivan/localtileserver).
|
27 |
-
|
28 |
-
|
29 |
-
"""
|
30 |
-
)
|
31 |
-
|
32 |
-
row1_col1, row1_col2 = st.columns([2, 1])
|
33 |
-
|
34 |
-
with row1_col1:
|
35 |
-
cog_list = load_cog_list()
|
36 |
-
cog = st.selectbox("Select a sample Cloud Opitmized GeoTIFF (COG)", cog_list)
|
37 |
-
|
38 |
-
with row1_col2:
|
39 |
-
empty = st.empty()
|
40 |
-
|
41 |
-
url = empty.text_input(
|
42 |
-
"Enter a HTTP URL to a Cloud Optimized GeoTIFF (COG)",
|
43 |
-
cog,
|
44 |
-
)
|
45 |
-
|
46 |
-
data = st.file_uploader("Upload a raster dataset", type=["tif", "img"])
|
47 |
-
|
48 |
-
if data:
|
49 |
-
url = empty.text_input(
|
50 |
-
"Enter a URL to a Cloud Optimized GeoTIFF (COG)",
|
51 |
-
"",
|
52 |
-
)
|
53 |
-
|
54 |
-
add_palette = st.checkbox("Add a color palette")
|
55 |
-
if add_palette:
|
56 |
-
palette = st.selectbox("Select a color palette", get_palettes())
|
57 |
-
else:
|
58 |
-
palette = None
|
59 |
-
|
60 |
-
submit = st.button("Submit")
|
61 |
-
|
62 |
-
m = leafmap.Map(latlon_control=False)
|
63 |
-
|
64 |
-
if submit:
|
65 |
-
if data or url:
|
66 |
-
try:
|
67 |
-
if data:
|
68 |
-
file_path = leafmap.save_data(data)
|
69 |
-
m.add_local_tile(file_path, palette=palette, debug=True)
|
70 |
-
elif url:
|
71 |
-
m.add_remote_tile(url, palette=palette, debug=True)
|
72 |
-
except Exception as e:
|
73 |
-
with row1_col2:
|
74 |
-
st.error("Work in progress. Try it again later.")
|
75 |
-
|
76 |
-
with row1_col1:
|
77 |
-
m.to_streamlit()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/rois.py
DELETED
@@ -1,174 +0,0 @@
|
|
1 |
-
""" A module for storing some sample ROIs for creating Landsat/GOES timelapse.
|
2 |
-
"""
|
3 |
-
|
4 |
-
from shapely.geometry import Polygon
|
5 |
-
|
6 |
-
goes_rois = {
|
7 |
-
"Creek Fire, CA (2020-09-05)": {
|
8 |
-
"region": Polygon(
|
9 |
-
[
|
10 |
-
[-121.003418, 36.848857],
|
11 |
-
[-121.003418, 39.049052],
|
12 |
-
[-117.905273, 39.049052],
|
13 |
-
[-117.905273, 36.848857],
|
14 |
-
[-121.003418, 36.848857],
|
15 |
-
]
|
16 |
-
),
|
17 |
-
"start_time": "2020-09-05T15:00:00",
|
18 |
-
"end_time": "2020-09-06T02:00:00",
|
19 |
-
},
|
20 |
-
"Bomb Cyclone (2021-10-24)": {
|
21 |
-
"region": Polygon(
|
22 |
-
[
|
23 |
-
[-159.5954, 60.4088],
|
24 |
-
[-159.5954, 24.5178],
|
25 |
-
[-114.2438, 24.5178],
|
26 |
-
[-114.2438, 60.4088],
|
27 |
-
]
|
28 |
-
),
|
29 |
-
"start_time": "2021-10-24T14:00:00",
|
30 |
-
"end_time": "2021-10-25T01:00:00",
|
31 |
-
},
|
32 |
-
"Hunga Tonga Volcanic Eruption (2022-01-15)": {
|
33 |
-
"region": Polygon(
|
34 |
-
[
|
35 |
-
[-192.480469, -32.546813],
|
36 |
-
[-192.480469, -8.754795],
|
37 |
-
[-157.587891, -8.754795],
|
38 |
-
[-157.587891, -32.546813],
|
39 |
-
[-192.480469, -32.546813],
|
40 |
-
]
|
41 |
-
),
|
42 |
-
"start_time": "2022-01-15T03:00:00",
|
43 |
-
"end_time": "2022-01-15T07:00:00",
|
44 |
-
},
|
45 |
-
"Hunga Tonga Volcanic Eruption Closer Look (2022-01-15)": {
|
46 |
-
"region": Polygon(
|
47 |
-
[
|
48 |
-
[-178.901367, -22.958393],
|
49 |
-
[-178.901367, -17.85329],
|
50 |
-
[-171.452637, -17.85329],
|
51 |
-
[-171.452637, -22.958393],
|
52 |
-
[-178.901367, -22.958393],
|
53 |
-
]
|
54 |
-
),
|
55 |
-
"start_time": "2022-01-15T03:00:00",
|
56 |
-
"end_time": "2022-01-15T07:00:00",
|
57 |
-
},
|
58 |
-
}
|
59 |
-
|
60 |
-
|
61 |
-
landsat_rois = {
|
62 |
-
"Aral Sea": Polygon(
|
63 |
-
[
|
64 |
-
[57.667236, 43.834527],
|
65 |
-
[57.667236, 45.996962],
|
66 |
-
[61.12793, 45.996962],
|
67 |
-
[61.12793, 43.834527],
|
68 |
-
[57.667236, 43.834527],
|
69 |
-
]
|
70 |
-
),
|
71 |
-
"Dubai": Polygon(
|
72 |
-
[
|
73 |
-
[54.541626, 24.763044],
|
74 |
-
[54.541626, 25.427152],
|
75 |
-
[55.632019, 25.427152],
|
76 |
-
[55.632019, 24.763044],
|
77 |
-
[54.541626, 24.763044],
|
78 |
-
]
|
79 |
-
),
|
80 |
-
"Hong Kong International Airport": Polygon(
|
81 |
-
[
|
82 |
-
[113.825226, 22.198849],
|
83 |
-
[113.825226, 22.349758],
|
84 |
-
[114.085121, 22.349758],
|
85 |
-
[114.085121, 22.198849],
|
86 |
-
[113.825226, 22.198849],
|
87 |
-
]
|
88 |
-
),
|
89 |
-
"Las Vegas, NV": Polygon(
|
90 |
-
[
|
91 |
-
[-115.554199, 35.804449],
|
92 |
-
[-115.554199, 36.558188],
|
93 |
-
[-113.903503, 36.558188],
|
94 |
-
[-113.903503, 35.804449],
|
95 |
-
[-115.554199, 35.804449],
|
96 |
-
]
|
97 |
-
),
|
98 |
-
"Pucallpa, Peru": Polygon(
|
99 |
-
[
|
100 |
-
[-74.672699, -8.600032],
|
101 |
-
[-74.672699, -8.254983],
|
102 |
-
[-74.279938, -8.254983],
|
103 |
-
[-74.279938, -8.600032],
|
104 |
-
]
|
105 |
-
),
|
106 |
-
"Sierra Gorda, Chile": Polygon(
|
107 |
-
[
|
108 |
-
[-69.315491, -22.837104],
|
109 |
-
[-69.315491, -22.751488],
|
110 |
-
[-69.190006, -22.751488],
|
111 |
-
[-69.190006, -22.837104],
|
112 |
-
[-69.315491, -22.837104],
|
113 |
-
]
|
114 |
-
),
|
115 |
-
}
|
116 |
-
|
117 |
-
modis_rois = {
|
118 |
-
"World": Polygon(
|
119 |
-
[
|
120 |
-
[-171.210938, -57.136239],
|
121 |
-
[-171.210938, 79.997168],
|
122 |
-
[177.539063, 79.997168],
|
123 |
-
[177.539063, -57.136239],
|
124 |
-
[-171.210938, -57.136239],
|
125 |
-
]
|
126 |
-
),
|
127 |
-
"Africa": Polygon(
|
128 |
-
[
|
129 |
-
[-18.6983, 38.1446],
|
130 |
-
[-18.6983, -36.1630],
|
131 |
-
[52.2293, -36.1630],
|
132 |
-
[52.2293, 38.1446],
|
133 |
-
]
|
134 |
-
),
|
135 |
-
"USA": Polygon(
|
136 |
-
[
|
137 |
-
[-127.177734, 23.725012],
|
138 |
-
[-127.177734, 50.792047],
|
139 |
-
[-66.269531, 50.792047],
|
140 |
-
[-66.269531, 23.725012],
|
141 |
-
[-127.177734, 23.725012],
|
142 |
-
]
|
143 |
-
),
|
144 |
-
}
|
145 |
-
|
146 |
-
ocean_rois = {
|
147 |
-
"Gulf of Mexico": Polygon(
|
148 |
-
[
|
149 |
-
[-101.206055, 15.496032],
|
150 |
-
[-101.206055, 32.361403],
|
151 |
-
[-75.673828, 32.361403],
|
152 |
-
[-75.673828, 15.496032],
|
153 |
-
[-101.206055, 15.496032],
|
154 |
-
]
|
155 |
-
),
|
156 |
-
"North Atlantic Ocean": Polygon(
|
157 |
-
[
|
158 |
-
[-85.341797, 24.046464],
|
159 |
-
[-85.341797, 45.02695],
|
160 |
-
[-55.810547, 45.02695],
|
161 |
-
[-55.810547, 24.046464],
|
162 |
-
[-85.341797, 24.046464],
|
163 |
-
]
|
164 |
-
),
|
165 |
-
"World": Polygon(
|
166 |
-
[
|
167 |
-
[-171.210938, -57.136239],
|
168 |
-
[-171.210938, 79.997168],
|
169 |
-
[177.539063, 79.997168],
|
170 |
-
[177.539063, -57.136239],
|
171 |
-
[-171.210938, -57.136239],
|
172 |
-
]
|
173 |
-
),
|
174 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/timelapse.py
DELETED
@@ -1,1314 +0,0 @@
|
|
1 |
-
import ee
|
2 |
-
import os
|
3 |
-
import datetime
|
4 |
-
import fiona
|
5 |
-
import geopandas as gpd
|
6 |
-
import folium
|
7 |
-
import streamlit as st
|
8 |
-
import geemap.colormaps as cm
|
9 |
-
import geemap.foliumap as geemap
|
10 |
-
from datetime import date
|
11 |
-
from .rois import *
|
12 |
-
|
13 |
-
|
14 |
-
@st.cache(allow_output_mutation=True)
|
15 |
-
def uploaded_file_to_gdf(data):
|
16 |
-
import tempfile
|
17 |
-
import os
|
18 |
-
import uuid
|
19 |
-
|
20 |
-
_, file_extension = os.path.splitext(data.name)
|
21 |
-
file_id = str(uuid.uuid4())
|
22 |
-
file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
|
23 |
-
|
24 |
-
with open(file_path, "wb") as file:
|
25 |
-
file.write(data.getbuffer())
|
26 |
-
|
27 |
-
if file_path.lower().endswith(".kml"):
|
28 |
-
fiona.drvsupport.supported_drivers["KML"] = "rw"
|
29 |
-
gdf = gpd.read_file(file_path, driver="KML")
|
30 |
-
else:
|
31 |
-
gdf = gpd.read_file(file_path)
|
32 |
-
|
33 |
-
return gdf
|
34 |
-
|
35 |
-
|
36 |
-
def app():
|
37 |
-
|
38 |
-
today = date.today()
|
39 |
-
|
40 |
-
st.title("Create Timelapse")
|
41 |
-
|
42 |
-
st.markdown(
|
43 |
-
"""
|
44 |
-
An interactive web app for creating [Landsat](https://developers.google.com/earth-engine/datasets/catalog/landsat)/[GOES](https://jstnbraaten.medium.com/goes-in-earth-engine-53fbc8783c16) timelapse for any location around the globe.
|
45 |
-
The app was built using [streamlit](https://streamlit.io), [geemap](https://geemap.org), and [Google Earth Engine](https://earthengine.google.com). For more info, check out my streamlit [blog post](https://blog.streamlit.io/creating-satellite-timelapse-with-streamlit-and-earth-engine).
|
46 |
-
"""
|
47 |
-
)
|
48 |
-
|
49 |
-
row1_col1, row1_col2 = st.columns([2, 1])
|
50 |
-
|
51 |
-
if st.session_state.get("zoom_level") is None:
|
52 |
-
st.session_state["zoom_level"] = 4
|
53 |
-
|
54 |
-
st.session_state["ee_asset_id"] = None
|
55 |
-
st.session_state["bands"] = None
|
56 |
-
st.session_state["palette"] = None
|
57 |
-
st.session_state["vis_params"] = None
|
58 |
-
|
59 |
-
with row1_col1:
|
60 |
-
m = geemap.Map(
|
61 |
-
basemap="HYBRID",
|
62 |
-
plugin_Draw=True,
|
63 |
-
Draw_export=True,
|
64 |
-
locate_control=True,
|
65 |
-
plugin_LatLngPopup=False,
|
66 |
-
)
|
67 |
-
m.add_basemap("ROADMAP")
|
68 |
-
|
69 |
-
with row1_col2:
|
70 |
-
|
71 |
-
keyword = st.text_input("Search for a location:", "")
|
72 |
-
if keyword:
|
73 |
-
locations = geemap.geocode(keyword)
|
74 |
-
if locations is not None and len(locations) > 0:
|
75 |
-
str_locations = [str(g)[1:-1] for g in locations]
|
76 |
-
location = st.selectbox("Select a location:", str_locations)
|
77 |
-
loc_index = str_locations.index(location)
|
78 |
-
selected_loc = locations[loc_index]
|
79 |
-
lat, lng = selected_loc.lat, selected_loc.lng
|
80 |
-
folium.Marker(location=[lat, lng], popup=location).add_to(m)
|
81 |
-
m.set_center(lng, lat, 12)
|
82 |
-
st.session_state["zoom_level"] = 12
|
83 |
-
|
84 |
-
collection = st.selectbox(
|
85 |
-
"Select a satellite image collection: ",
|
86 |
-
[
|
87 |
-
"Any Earth Engine ImageCollection",
|
88 |
-
"Landsat TM-ETM-OLI Surface Reflectance",
|
89 |
-
"Sentinel-2 MSI Surface Reflectance",
|
90 |
-
"Geostationary Operational Environmental Satellites (GOES)",
|
91 |
-
"MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km",
|
92 |
-
"MODIS Gap filled Land Surface Temperature Daily",
|
93 |
-
"MODIS Ocean Color SMI",
|
94 |
-
"USDA National Agriculture Imagery Program (NAIP)",
|
95 |
-
],
|
96 |
-
index=1,
|
97 |
-
)
|
98 |
-
|
99 |
-
if collection in [
|
100 |
-
"Landsat TM-ETM-OLI Surface Reflectance",
|
101 |
-
"Sentinel-2 MSI Surface Reflectance",
|
102 |
-
]:
|
103 |
-
roi_options = ["Uploaded GeoJSON"] + list(landsat_rois.keys())
|
104 |
-
|
105 |
-
elif collection == "Geostationary Operational Environmental Satellites (GOES)":
|
106 |
-
roi_options = ["Uploaded GeoJSON"] + list(goes_rois.keys())
|
107 |
-
|
108 |
-
elif collection in [
|
109 |
-
"MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km",
|
110 |
-
"MODIS Gap filled Land Surface Temperature Daily",
|
111 |
-
]:
|
112 |
-
roi_options = ["Uploaded GeoJSON"] + list(modis_rois.keys())
|
113 |
-
elif collection == "MODIS Ocean Color SMI":
|
114 |
-
roi_options = ["Uploaded GeoJSON"] + list(ocean_rois.keys())
|
115 |
-
else:
|
116 |
-
roi_options = ["Uploaded GeoJSON"]
|
117 |
-
|
118 |
-
if collection == "Any Earth Engine ImageCollection":
|
119 |
-
keyword = st.text_input("Enter a keyword to search (e.g., MODIS):", "")
|
120 |
-
if keyword:
|
121 |
-
|
122 |
-
assets = geemap.search_ee_data(keyword)
|
123 |
-
ee_assets = []
|
124 |
-
for asset in assets:
|
125 |
-
if asset["ee_id_snippet"].startswith("ee.ImageCollection"):
|
126 |
-
ee_assets.append(asset)
|
127 |
-
|
128 |
-
asset_titles = [x["title"] for x in ee_assets]
|
129 |
-
dataset = st.selectbox("Select a dataset:", asset_titles)
|
130 |
-
if len(ee_assets) > 0:
|
131 |
-
st.session_state["ee_assets"] = ee_assets
|
132 |
-
st.session_state["asset_titles"] = asset_titles
|
133 |
-
index = asset_titles.index(dataset)
|
134 |
-
ee_id = ee_assets[index]["id"]
|
135 |
-
else:
|
136 |
-
ee_id = ""
|
137 |
-
|
138 |
-
if dataset is not None:
|
139 |
-
with st.expander("Show dataset details", False):
|
140 |
-
index = asset_titles.index(dataset)
|
141 |
-
html = geemap.ee_data_html(st.session_state["ee_assets"][index])
|
142 |
-
st.markdown(html, True)
|
143 |
-
# elif collection == "MODIS Gap filled Land Surface Temperature Daily":
|
144 |
-
# ee_id = ""
|
145 |
-
else:
|
146 |
-
ee_id = ""
|
147 |
-
|
148 |
-
asset_id = st.text_input("Enter an ee.ImageCollection asset ID:", ee_id)
|
149 |
-
|
150 |
-
if asset_id:
|
151 |
-
with st.expander("Customize band combination and color palette", True):
|
152 |
-
try:
|
153 |
-
col = ee.ImageCollection.load(asset_id)
|
154 |
-
st.session_state["ee_asset_id"] = asset_id
|
155 |
-
except:
|
156 |
-
st.error("Invalid Earth Engine asset ID.")
|
157 |
-
st.session_state["ee_asset_id"] = None
|
158 |
-
return
|
159 |
-
|
160 |
-
img_bands = col.first().bandNames().getInfo()
|
161 |
-
if len(img_bands) >= 3:
|
162 |
-
default_bands = img_bands[:3][::-1]
|
163 |
-
else:
|
164 |
-
default_bands = img_bands[:]
|
165 |
-
bands = st.multiselect(
|
166 |
-
"Select one or three bands (RGB):", img_bands, default_bands
|
167 |
-
)
|
168 |
-
st.session_state["bands"] = bands
|
169 |
-
|
170 |
-
if len(bands) == 1:
|
171 |
-
palette_options = st.selectbox(
|
172 |
-
"Color palette",
|
173 |
-
cm.list_colormaps(),
|
174 |
-
index=2,
|
175 |
-
)
|
176 |
-
palette_values = cm.get_palette(palette_options, 15)
|
177 |
-
palette = st.text_area(
|
178 |
-
"Enter a custom palette:",
|
179 |
-
palette_values,
|
180 |
-
)
|
181 |
-
st.write(
|
182 |
-
cm.plot_colormap(cmap=palette_options, return_fig=True)
|
183 |
-
)
|
184 |
-
st.session_state["palette"] = eval(palette)
|
185 |
-
|
186 |
-
if bands:
|
187 |
-
vis_params = st.text_area(
|
188 |
-
"Enter visualization parameters",
|
189 |
-
"{'bands': ["
|
190 |
-
+ ", ".join([f"'{band}'" for band in bands])
|
191 |
-
+ "]}",
|
192 |
-
)
|
193 |
-
else:
|
194 |
-
vis_params = st.text_area(
|
195 |
-
"Enter visualization parameters",
|
196 |
-
"{}",
|
197 |
-
)
|
198 |
-
try:
|
199 |
-
st.session_state["vis_params"] = eval(vis_params)
|
200 |
-
st.session_state["vis_params"]["palette"] = st.session_state[
|
201 |
-
"palette"
|
202 |
-
]
|
203 |
-
except Exception as e:
|
204 |
-
st.session_state["vis_params"] = None
|
205 |
-
st.error(
|
206 |
-
f"Invalid visualization parameters. It must be a dictionary."
|
207 |
-
)
|
208 |
-
|
209 |
-
elif collection == "MODIS Gap filled Land Surface Temperature Daily":
|
210 |
-
with st.expander("Show dataset details", False):
|
211 |
-
st.markdown(
|
212 |
-
"""
|
213 |
-
See the [Awesome GEE Community Datasets](https://samapriya.github.io/awesome-gee-community-datasets/projects/daily_lst/).
|
214 |
-
"""
|
215 |
-
)
|
216 |
-
|
217 |
-
MODIS_options = ["Daytime (1:30 pm)", "Nighttime (1:30 am)"]
|
218 |
-
MODIS_option = st.selectbox("Select a MODIS dataset:", MODIS_options)
|
219 |
-
if MODIS_option == "Daytime (1:30 pm)":
|
220 |
-
st.session_state[
|
221 |
-
"ee_asset_id"
|
222 |
-
] = "projects/sat-io/open-datasets/gap-filled-lst/gf_day_1km"
|
223 |
-
else:
|
224 |
-
st.session_state[
|
225 |
-
"ee_asset_id"
|
226 |
-
] = "projects/sat-io/open-datasets/gap-filled-lst/gf_night_1km"
|
227 |
-
|
228 |
-
palette_options = st.selectbox(
|
229 |
-
"Color palette",
|
230 |
-
cm.list_colormaps(),
|
231 |
-
index=90,
|
232 |
-
)
|
233 |
-
palette_values = cm.get_palette(palette_options, 15)
|
234 |
-
palette = st.text_area(
|
235 |
-
"Enter a custom palette:",
|
236 |
-
palette_values,
|
237 |
-
)
|
238 |
-
st.write(cm.plot_colormap(cmap=palette_options, return_fig=True))
|
239 |
-
st.session_state["palette"] = eval(palette)
|
240 |
-
elif collection == "MODIS Ocean Color SMI":
|
241 |
-
with st.expander("Show dataset details", False):
|
242 |
-
st.markdown(
|
243 |
-
"""
|
244 |
-
See the [Earth Engine Data Catalog](https://developers.google.com/earth-engine/datasets/catalog/NASA_OCEANDATA_MODIS-Aqua_L3SMI).
|
245 |
-
"""
|
246 |
-
)
|
247 |
-
|
248 |
-
MODIS_options = ["Aqua", "Terra"]
|
249 |
-
MODIS_option = st.selectbox("Select a satellite:", MODIS_options)
|
250 |
-
st.session_state["ee_asset_id"] = MODIS_option
|
251 |
-
# if MODIS_option == "Daytime (1:30 pm)":
|
252 |
-
# st.session_state[
|
253 |
-
# "ee_asset_id"
|
254 |
-
# ] = "projects/sat-io/open-datasets/gap-filled-lst/gf_day_1km"
|
255 |
-
# else:
|
256 |
-
# st.session_state[
|
257 |
-
# "ee_asset_id"
|
258 |
-
# ] = "projects/sat-io/open-datasets/gap-filled-lst/gf_night_1km"
|
259 |
-
|
260 |
-
band_dict = {
|
261 |
-
"Chlorophyll a concentration": "chlor_a",
|
262 |
-
"Normalized fluorescence line height": "nflh",
|
263 |
-
"Particulate organic carbon": "poc",
|
264 |
-
"Sea surface temperature": "sst",
|
265 |
-
"Remote sensing reflectance at band 412nm": "Rrs_412",
|
266 |
-
"Remote sensing reflectance at band 443nm": "Rrs_443",
|
267 |
-
"Remote sensing reflectance at band 469nm": "Rrs_469",
|
268 |
-
"Remote sensing reflectance at band 488nm": "Rrs_488",
|
269 |
-
"Remote sensing reflectance at band 531nm": "Rrs_531",
|
270 |
-
"Remote sensing reflectance at band 547nm": "Rrs_547",
|
271 |
-
"Remote sensing reflectance at band 555nm": "Rrs_555",
|
272 |
-
"Remote sensing reflectance at band 645nm": "Rrs_645",
|
273 |
-
"Remote sensing reflectance at band 667nm": "Rrs_667",
|
274 |
-
"Remote sensing reflectance at band 678nm": "Rrs_678",
|
275 |
-
}
|
276 |
-
|
277 |
-
band_options = list(band_dict.keys())
|
278 |
-
band = st.selectbox(
|
279 |
-
"Select a band",
|
280 |
-
band_options,
|
281 |
-
band_options.index("Sea surface temperature"),
|
282 |
-
)
|
283 |
-
st.session_state["band"] = band_dict[band]
|
284 |
-
|
285 |
-
colors = cm.list_colormaps()
|
286 |
-
palette_options = st.selectbox(
|
287 |
-
"Color palette",
|
288 |
-
colors,
|
289 |
-
index=colors.index("coolwarm"),
|
290 |
-
)
|
291 |
-
palette_values = cm.get_palette(palette_options, 15)
|
292 |
-
palette = st.text_area(
|
293 |
-
"Enter a custom palette:",
|
294 |
-
palette_values,
|
295 |
-
)
|
296 |
-
st.write(cm.plot_colormap(cmap=palette_options, return_fig=True))
|
297 |
-
st.session_state["palette"] = eval(palette)
|
298 |
-
|
299 |
-
sample_roi = st.selectbox(
|
300 |
-
"Select a sample ROI or upload a GeoJSON file:",
|
301 |
-
roi_options,
|
302 |
-
index=0,
|
303 |
-
)
|
304 |
-
|
305 |
-
add_outline = st.checkbox(
|
306 |
-
"Overlay an administrative boundary on timelapse", False
|
307 |
-
)
|
308 |
-
|
309 |
-
if add_outline:
|
310 |
-
|
311 |
-
with st.expander("Customize administrative boundary", True):
|
312 |
-
|
313 |
-
overlay_options = {
|
314 |
-
"User-defined": None,
|
315 |
-
"Continents": "continents",
|
316 |
-
"Countries": "countries",
|
317 |
-
"US States": "us_states",
|
318 |
-
"China": "china",
|
319 |
-
}
|
320 |
-
|
321 |
-
overlay = st.selectbox(
|
322 |
-
"Select an administrative boundary:",
|
323 |
-
list(overlay_options.keys()),
|
324 |
-
index=2,
|
325 |
-
)
|
326 |
-
|
327 |
-
overlay_data = overlay_options[overlay]
|
328 |
-
|
329 |
-
if overlay_data is None:
|
330 |
-
overlay_data = st.text_input(
|
331 |
-
"Enter an HTTP URL to a GeoJSON file or an ee.FeatureCollection asset id:",
|
332 |
-
"https://raw.githubusercontent.com/giswqs/geemap/master/examples/data/countries.geojson",
|
333 |
-
)
|
334 |
-
|
335 |
-
overlay_color = st.color_picker(
|
336 |
-
"Select a color for the administrative boundary:", "#000000"
|
337 |
-
)
|
338 |
-
overlay_width = st.slider(
|
339 |
-
"Select a line width for the administrative boundary:", 1, 20, 1
|
340 |
-
)
|
341 |
-
overlay_opacity = st.slider(
|
342 |
-
"Select an opacity for the administrative boundary:",
|
343 |
-
0.0,
|
344 |
-
1.0,
|
345 |
-
1.0,
|
346 |
-
0.05,
|
347 |
-
)
|
348 |
-
else:
|
349 |
-
overlay_data = None
|
350 |
-
overlay_color = "black"
|
351 |
-
overlay_width = 1
|
352 |
-
overlay_opacity = 1
|
353 |
-
|
354 |
-
with row1_col1:
|
355 |
-
|
356 |
-
with st.expander(
|
357 |
-
"Steps: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Expand this tab to see a demo π"
|
358 |
-
):
|
359 |
-
video_empty = st.empty()
|
360 |
-
|
361 |
-
data = st.file_uploader(
|
362 |
-
"Upload a GeoJSON file to use as an ROI. Customize timelapse parameters and then click the Submit button ππ",
|
363 |
-
type=["geojson", "kml", "zip"],
|
364 |
-
)
|
365 |
-
|
366 |
-
crs = "epsg:4326"
|
367 |
-
if sample_roi == "Uploaded GeoJSON":
|
368 |
-
if data is None:
|
369 |
-
# st.info(
|
370 |
-
# "Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click Submit button"
|
371 |
-
# )
|
372 |
-
if collection in [
|
373 |
-
"Geostationary Operational Environmental Satellites (GOES)",
|
374 |
-
"USDA National Agriculture Imagery Program (NAIP)",
|
375 |
-
] and (not keyword):
|
376 |
-
m.set_center(-100, 40, 3)
|
377 |
-
# else:
|
378 |
-
# m.set_center(4.20, 18.63, zoom=2)
|
379 |
-
else:
|
380 |
-
if collection in [
|
381 |
-
"Landsat TM-ETM-OLI Surface Reflectance",
|
382 |
-
"Sentinel-2 MSI Surface Reflectance",
|
383 |
-
]:
|
384 |
-
gdf = gpd.GeoDataFrame(
|
385 |
-
index=[0], crs=crs, geometry=[landsat_rois[sample_roi]]
|
386 |
-
)
|
387 |
-
elif (
|
388 |
-
collection
|
389 |
-
== "Geostationary Operational Environmental Satellites (GOES)"
|
390 |
-
):
|
391 |
-
gdf = gpd.GeoDataFrame(
|
392 |
-
index=[0], crs=crs, geometry=[goes_rois[sample_roi]["region"]]
|
393 |
-
)
|
394 |
-
elif collection == "MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km":
|
395 |
-
gdf = gpd.GeoDataFrame(
|
396 |
-
index=[0], crs=crs, geometry=[modis_rois[sample_roi]]
|
397 |
-
)
|
398 |
-
|
399 |
-
if sample_roi != "Uploaded GeoJSON":
|
400 |
-
|
401 |
-
if collection in [
|
402 |
-
"Landsat TM-ETM-OLI Surface Reflectance",
|
403 |
-
"Sentinel-2 MSI Surface Reflectance",
|
404 |
-
]:
|
405 |
-
gdf = gpd.GeoDataFrame(
|
406 |
-
index=[0], crs=crs, geometry=[landsat_rois[sample_roi]]
|
407 |
-
)
|
408 |
-
elif (
|
409 |
-
collection
|
410 |
-
== "Geostationary Operational Environmental Satellites (GOES)"
|
411 |
-
):
|
412 |
-
gdf = gpd.GeoDataFrame(
|
413 |
-
index=[0], crs=crs, geometry=[goes_rois[sample_roi]["region"]]
|
414 |
-
)
|
415 |
-
elif collection in [
|
416 |
-
"MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km",
|
417 |
-
"MODIS Gap filled Land Surface Temperature Daily",
|
418 |
-
]:
|
419 |
-
gdf = gpd.GeoDataFrame(
|
420 |
-
index=[0], crs=crs, geometry=[modis_rois[sample_roi]]
|
421 |
-
)
|
422 |
-
elif collection == "MODIS Ocean Color SMI":
|
423 |
-
gdf = gpd.GeoDataFrame(
|
424 |
-
index=[0], crs=crs, geometry=[ocean_rois[sample_roi]]
|
425 |
-
)
|
426 |
-
st.session_state["roi"] = geemap.gdf_to_ee(gdf, geodesic=False)
|
427 |
-
m.add_gdf(gdf, "ROI")
|
428 |
-
|
429 |
-
elif data:
|
430 |
-
gdf = uploaded_file_to_gdf(data)
|
431 |
-
st.session_state["roi"] = geemap.gdf_to_ee(gdf, geodesic=False)
|
432 |
-
m.add_gdf(gdf, "ROI")
|
433 |
-
|
434 |
-
m.to_streamlit(height=600)
|
435 |
-
|
436 |
-
with row1_col2:
|
437 |
-
|
438 |
-
if collection in [
|
439 |
-
"Landsat TM-ETM-OLI Surface Reflectance",
|
440 |
-
"Sentinel-2 MSI Surface Reflectance",
|
441 |
-
]:
|
442 |
-
|
443 |
-
if collection == "Landsat TM-ETM-OLI Surface Reflectance":
|
444 |
-
sensor_start_year = 1984
|
445 |
-
timelapse_title = "Landsat Timelapse"
|
446 |
-
timelapse_speed = 5
|
447 |
-
elif collection == "Sentinel-2 MSI Surface Reflectance":
|
448 |
-
sensor_start_year = 2015
|
449 |
-
timelapse_title = "Sentinel-2 Timelapse"
|
450 |
-
timelapse_speed = 5
|
451 |
-
video_empty.video("https://youtu.be/VVRK_-dEjR4")
|
452 |
-
|
453 |
-
with st.form("submit_landsat_form"):
|
454 |
-
|
455 |
-
roi = None
|
456 |
-
if st.session_state.get("roi") is not None:
|
457 |
-
roi = st.session_state.get("roi")
|
458 |
-
out_gif = geemap.temp_file_path(".gif")
|
459 |
-
|
460 |
-
title = st.text_input(
|
461 |
-
"Enter a title to show on the timelapse: ", timelapse_title
|
462 |
-
)
|
463 |
-
RGB = st.selectbox(
|
464 |
-
"Select an RGB band combination:",
|
465 |
-
[
|
466 |
-
"Red/Green/Blue",
|
467 |
-
"NIR/Red/Green",
|
468 |
-
"SWIR2/SWIR1/NIR",
|
469 |
-
"NIR/SWIR1/Red",
|
470 |
-
"SWIR2/NIR/Red",
|
471 |
-
"SWIR2/SWIR1/Red",
|
472 |
-
"SWIR1/NIR/Blue",
|
473 |
-
"NIR/SWIR1/Blue",
|
474 |
-
"SWIR2/NIR/Green",
|
475 |
-
"SWIR1/NIR/Red",
|
476 |
-
"SWIR2/NIR/SWIR1",
|
477 |
-
"SWIR1/NIR/SWIR2",
|
478 |
-
],
|
479 |
-
index=9,
|
480 |
-
)
|
481 |
-
|
482 |
-
frequency = st.selectbox(
|
483 |
-
"Select a temporal frequency:",
|
484 |
-
["year", "quarter", "month"],
|
485 |
-
index=0,
|
486 |
-
)
|
487 |
-
|
488 |
-
with st.expander("Customize timelapse"):
|
489 |
-
|
490 |
-
speed = st.slider("Frames per second:", 1, 30, timelapse_speed)
|
491 |
-
dimensions = st.slider(
|
492 |
-
"Maximum dimensions (Width*Height) in pixels", 768, 2000, 768
|
493 |
-
)
|
494 |
-
progress_bar_color = st.color_picker(
|
495 |
-
"Progress bar color:", "#0000ff"
|
496 |
-
)
|
497 |
-
years = st.slider(
|
498 |
-
"Start and end year:",
|
499 |
-
sensor_start_year,
|
500 |
-
today.year,
|
501 |
-
(sensor_start_year, today.year),
|
502 |
-
)
|
503 |
-
months = st.slider("Start and end month:", 1, 12, (1, 12))
|
504 |
-
font_size = st.slider("Font size:", 10, 50, 30)
|
505 |
-
font_color = st.color_picker("Font color:", "#ffffff")
|
506 |
-
apply_fmask = st.checkbox(
|
507 |
-
"Apply fmask (remove clouds, shadows, snow)", True
|
508 |
-
)
|
509 |
-
font_type = st.selectbox(
|
510 |
-
"Select the font type for the title:",
|
511 |
-
["arial.ttf", "alibaba.otf"],
|
512 |
-
index=0,
|
513 |
-
)
|
514 |
-
fading = st.slider(
|
515 |
-
"Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
|
516 |
-
)
|
517 |
-
mp4 = st.checkbox("Save timelapse as MP4", True)
|
518 |
-
|
519 |
-
empty_text = st.empty()
|
520 |
-
empty_image = st.empty()
|
521 |
-
empty_fire_image = st.empty()
|
522 |
-
empty_video = st.container()
|
523 |
-
submitted = st.form_submit_button("Submit")
|
524 |
-
if submitted:
|
525 |
-
|
526 |
-
if sample_roi == "Uploaded GeoJSON" and data is None:
|
527 |
-
empty_text.warning(
|
528 |
-
"Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
|
529 |
-
)
|
530 |
-
else:
|
531 |
-
|
532 |
-
empty_text.text("Computing... Please wait...")
|
533 |
-
|
534 |
-
start_year = years[0]
|
535 |
-
end_year = years[1]
|
536 |
-
start_date = str(months[0]).zfill(2) + "-01"
|
537 |
-
end_date = str(months[1]).zfill(2) + "-30"
|
538 |
-
bands = RGB.split("/")
|
539 |
-
|
540 |
-
try:
|
541 |
-
if collection == "Landsat TM-ETM-OLI Surface Reflectance":
|
542 |
-
out_gif = geemap.landsat_timelapse(
|
543 |
-
roi=roi,
|
544 |
-
out_gif=out_gif,
|
545 |
-
start_year=start_year,
|
546 |
-
end_year=end_year,
|
547 |
-
start_date=start_date,
|
548 |
-
end_date=end_date,
|
549 |
-
bands=bands,
|
550 |
-
apply_fmask=apply_fmask,
|
551 |
-
frames_per_second=speed,
|
552 |
-
dimensions=dimensions,
|
553 |
-
overlay_data=overlay_data,
|
554 |
-
overlay_color=overlay_color,
|
555 |
-
overlay_width=overlay_width,
|
556 |
-
overlay_opacity=overlay_opacity,
|
557 |
-
frequency=frequency,
|
558 |
-
date_format=None,
|
559 |
-
title=title,
|
560 |
-
title_xy=("2%", "90%"),
|
561 |
-
add_text=True,
|
562 |
-
text_xy=("2%", "2%"),
|
563 |
-
text_sequence=None,
|
564 |
-
font_type=font_type,
|
565 |
-
font_size=font_size,
|
566 |
-
font_color=font_color,
|
567 |
-
add_progress_bar=True,
|
568 |
-
progress_bar_color=progress_bar_color,
|
569 |
-
progress_bar_height=5,
|
570 |
-
loop=0,
|
571 |
-
mp4=mp4,
|
572 |
-
fading=fading,
|
573 |
-
)
|
574 |
-
elif collection == "Sentinel-2 MSI Surface Reflectance":
|
575 |
-
out_gif = geemap.sentinel2_timelapse(
|
576 |
-
roi=roi,
|
577 |
-
out_gif=out_gif,
|
578 |
-
start_year=start_year,
|
579 |
-
end_year=end_year,
|
580 |
-
start_date=start_date,
|
581 |
-
end_date=end_date,
|
582 |
-
bands=bands,
|
583 |
-
apply_fmask=apply_fmask,
|
584 |
-
frames_per_second=speed,
|
585 |
-
dimensions=dimensions,
|
586 |
-
overlay_data=overlay_data,
|
587 |
-
overlay_color=overlay_color,
|
588 |
-
overlay_width=overlay_width,
|
589 |
-
overlay_opacity=overlay_opacity,
|
590 |
-
frequency=frequency,
|
591 |
-
date_format=None,
|
592 |
-
title=title,
|
593 |
-
title_xy=("2%", "90%"),
|
594 |
-
add_text=True,
|
595 |
-
text_xy=("2%", "2%"),
|
596 |
-
text_sequence=None,
|
597 |
-
font_type=font_type,
|
598 |
-
font_size=font_size,
|
599 |
-
font_color=font_color,
|
600 |
-
add_progress_bar=True,
|
601 |
-
progress_bar_color=progress_bar_color,
|
602 |
-
progress_bar_height=5,
|
603 |
-
loop=0,
|
604 |
-
mp4=mp4,
|
605 |
-
fading=fading,
|
606 |
-
)
|
607 |
-
except:
|
608 |
-
empty_text.error(
|
609 |
-
"An error occurred while computing the timelapse. Your probably requested too much data. Try reducing the ROI or timespan."
|
610 |
-
)
|
611 |
-
st.stop()
|
612 |
-
|
613 |
-
if out_gif is not None and os.path.exists(out_gif):
|
614 |
-
|
615 |
-
empty_text.text(
|
616 |
-
"Right click the GIF to save it to your computerπ"
|
617 |
-
)
|
618 |
-
empty_image.image(out_gif)
|
619 |
-
|
620 |
-
out_mp4 = out_gif.replace(".gif", ".mp4")
|
621 |
-
if mp4 and os.path.exists(out_mp4):
|
622 |
-
with empty_video:
|
623 |
-
st.text(
|
624 |
-
"Right click the MP4 to save it to your computerπ"
|
625 |
-
)
|
626 |
-
st.video(out_gif.replace(".gif", ".mp4"))
|
627 |
-
|
628 |
-
else:
|
629 |
-
empty_text.error(
|
630 |
-
"Something went wrong. You probably requested too much data. Try reducing the ROI or timespan."
|
631 |
-
)
|
632 |
-
|
633 |
-
elif collection == "Geostationary Operational Environmental Satellites (GOES)":
|
634 |
-
|
635 |
-
video_empty.video("https://youtu.be/16fA2QORG4A")
|
636 |
-
|
637 |
-
with st.form("submit_goes_form"):
|
638 |
-
|
639 |
-
roi = None
|
640 |
-
if st.session_state.get("roi") is not None:
|
641 |
-
roi = st.session_state.get("roi")
|
642 |
-
out_gif = geemap.temp_file_path(".gif")
|
643 |
-
|
644 |
-
satellite = st.selectbox("Select a satellite:", ["GOES-17", "GOES-16"])
|
645 |
-
earliest_date = datetime.date(2017, 7, 10)
|
646 |
-
latest_date = datetime.date.today()
|
647 |
-
|
648 |
-
if sample_roi == "Uploaded GeoJSON":
|
649 |
-
roi_start_date = today - datetime.timedelta(days=2)
|
650 |
-
roi_end_date = today - datetime.timedelta(days=1)
|
651 |
-
roi_start_time = datetime.time(14, 00)
|
652 |
-
roi_end_time = datetime.time(1, 00)
|
653 |
-
else:
|
654 |
-
roi_start = goes_rois[sample_roi]["start_time"]
|
655 |
-
roi_end = goes_rois[sample_roi]["end_time"]
|
656 |
-
roi_start_date = datetime.datetime.strptime(
|
657 |
-
roi_start[:10], "%Y-%m-%d"
|
658 |
-
)
|
659 |
-
roi_end_date = datetime.datetime.strptime(roi_end[:10], "%Y-%m-%d")
|
660 |
-
roi_start_time = datetime.time(
|
661 |
-
int(roi_start[11:13]), int(roi_start[14:16])
|
662 |
-
)
|
663 |
-
roi_end_time = datetime.time(
|
664 |
-
int(roi_end[11:13]), int(roi_end[14:16])
|
665 |
-
)
|
666 |
-
|
667 |
-
start_date = st.date_input("Select the start date:", roi_start_date)
|
668 |
-
end_date = st.date_input("Select the end date:", roi_end_date)
|
669 |
-
|
670 |
-
with st.expander("Customize timelapse"):
|
671 |
-
|
672 |
-
add_fire = st.checkbox("Add Fire/Hotspot Characterization", False)
|
673 |
-
|
674 |
-
scan_type = st.selectbox(
|
675 |
-
"Select a scan type:", ["Full Disk", "CONUS", "Mesoscale"]
|
676 |
-
)
|
677 |
-
|
678 |
-
start_time = st.time_input(
|
679 |
-
"Select the start time of the start date:", roi_start_time
|
680 |
-
)
|
681 |
-
|
682 |
-
end_time = st.time_input(
|
683 |
-
"Select the end time of the end date:", roi_end_time
|
684 |
-
)
|
685 |
-
|
686 |
-
start = (
|
687 |
-
start_date.strftime("%Y-%m-%d")
|
688 |
-
+ "T"
|
689 |
-
+ start_time.strftime("%H:%M:%S")
|
690 |
-
)
|
691 |
-
end = (
|
692 |
-
end_date.strftime("%Y-%m-%d")
|
693 |
-
+ "T"
|
694 |
-
+ end_time.strftime("%H:%M:%S")
|
695 |
-
)
|
696 |
-
|
697 |
-
speed = st.slider("Frames per second:", 1, 30, 5)
|
698 |
-
add_progress_bar = st.checkbox("Add a progress bar", True)
|
699 |
-
progress_bar_color = st.color_picker(
|
700 |
-
"Progress bar color:", "#0000ff"
|
701 |
-
)
|
702 |
-
font_size = st.slider("Font size:", 10, 50, 20)
|
703 |
-
font_color = st.color_picker("Font color:", "#ffffff")
|
704 |
-
fading = st.slider(
|
705 |
-
"Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
|
706 |
-
)
|
707 |
-
mp4 = st.checkbox("Save timelapse as MP4", True)
|
708 |
-
|
709 |
-
empty_text = st.empty()
|
710 |
-
empty_image = st.empty()
|
711 |
-
empty_video = st.container()
|
712 |
-
empty_fire_text = st.empty()
|
713 |
-
empty_fire_image = st.empty()
|
714 |
-
|
715 |
-
submitted = st.form_submit_button("Submit")
|
716 |
-
if submitted:
|
717 |
-
if sample_roi == "Uploaded GeoJSON" and data is None:
|
718 |
-
empty_text.warning(
|
719 |
-
"Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
|
720 |
-
)
|
721 |
-
else:
|
722 |
-
empty_text.text("Computing... Please wait...")
|
723 |
-
|
724 |
-
geemap.goes_timelapse(
|
725 |
-
out_gif,
|
726 |
-
start_date=start,
|
727 |
-
end_date=end,
|
728 |
-
data=satellite,
|
729 |
-
scan=scan_type.replace(" ", "_").lower(),
|
730 |
-
region=roi,
|
731 |
-
dimensions=768,
|
732 |
-
framesPerSecond=speed,
|
733 |
-
date_format="YYYY-MM-dd HH:mm",
|
734 |
-
xy=("3%", "3%"),
|
735 |
-
text_sequence=None,
|
736 |
-
font_type="arial.ttf",
|
737 |
-
font_size=font_size,
|
738 |
-
font_color=font_color,
|
739 |
-
add_progress_bar=add_progress_bar,
|
740 |
-
progress_bar_color=progress_bar_color,
|
741 |
-
progress_bar_height=5,
|
742 |
-
loop=0,
|
743 |
-
overlay_data=overlay_data,
|
744 |
-
overlay_color=overlay_color,
|
745 |
-
overlay_width=overlay_width,
|
746 |
-
overlay_opacity=overlay_opacity,
|
747 |
-
mp4=mp4,
|
748 |
-
fading=fading,
|
749 |
-
)
|
750 |
-
|
751 |
-
if out_gif is not None and os.path.exists(out_gif):
|
752 |
-
empty_text.text(
|
753 |
-
"Right click the GIF to save it to your computerπ"
|
754 |
-
)
|
755 |
-
empty_image.image(out_gif)
|
756 |
-
|
757 |
-
out_mp4 = out_gif.replace(".gif", ".mp4")
|
758 |
-
if mp4 and os.path.exists(out_mp4):
|
759 |
-
with empty_video:
|
760 |
-
st.text(
|
761 |
-
"Right click the MP4 to save it to your computerπ"
|
762 |
-
)
|
763 |
-
st.video(out_gif.replace(".gif", ".mp4"))
|
764 |
-
|
765 |
-
if add_fire:
|
766 |
-
out_fire_gif = geemap.temp_file_path(".gif")
|
767 |
-
empty_fire_text.text(
|
768 |
-
"Delineating Fire Hotspot... Please wait..."
|
769 |
-
)
|
770 |
-
geemap.goes_fire_timelapse(
|
771 |
-
out_fire_gif,
|
772 |
-
start_date=start,
|
773 |
-
end_date=end,
|
774 |
-
data=satellite,
|
775 |
-
scan=scan_type.replace(" ", "_").lower(),
|
776 |
-
region=roi,
|
777 |
-
dimensions=768,
|
778 |
-
framesPerSecond=speed,
|
779 |
-
date_format="YYYY-MM-dd HH:mm",
|
780 |
-
xy=("3%", "3%"),
|
781 |
-
text_sequence=None,
|
782 |
-
font_type="arial.ttf",
|
783 |
-
font_size=font_size,
|
784 |
-
font_color=font_color,
|
785 |
-
add_progress_bar=add_progress_bar,
|
786 |
-
progress_bar_color=progress_bar_color,
|
787 |
-
progress_bar_height=5,
|
788 |
-
loop=0,
|
789 |
-
)
|
790 |
-
if os.path.exists(out_fire_gif):
|
791 |
-
empty_fire_image.image(out_fire_gif)
|
792 |
-
else:
|
793 |
-
empty_text.text(
|
794 |
-
"Something went wrong, either the ROI is too big or there are no data available for the specified date range. Please try a smaller ROI or different date range."
|
795 |
-
)
|
796 |
-
|
797 |
-
elif collection == "MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km":
|
798 |
-
|
799 |
-
video_empty.video("https://youtu.be/16fA2QORG4A")
|
800 |
-
|
801 |
-
satellite = st.selectbox("Select a satellite:", ["Terra", "Aqua"])
|
802 |
-
band = st.selectbox("Select a band:", ["NDVI", "EVI"])
|
803 |
-
|
804 |
-
with st.form("submit_modis_form"):
|
805 |
-
|
806 |
-
roi = None
|
807 |
-
if st.session_state.get("roi") is not None:
|
808 |
-
roi = st.session_state.get("roi")
|
809 |
-
out_gif = geemap.temp_file_path(".gif")
|
810 |
-
|
811 |
-
with st.expander("Customize timelapse"):
|
812 |
-
|
813 |
-
start = st.date_input(
|
814 |
-
"Select a start date:", datetime.date(2000, 2, 8)
|
815 |
-
)
|
816 |
-
end = st.date_input("Select an end date:", datetime.date.today())
|
817 |
-
|
818 |
-
start_date = start.strftime("%Y-%m-%d")
|
819 |
-
end_date = end.strftime("%Y-%m-%d")
|
820 |
-
|
821 |
-
speed = st.slider("Frames per second:", 1, 30, 5)
|
822 |
-
add_progress_bar = st.checkbox("Add a progress bar", True)
|
823 |
-
progress_bar_color = st.color_picker(
|
824 |
-
"Progress bar color:", "#0000ff"
|
825 |
-
)
|
826 |
-
font_size = st.slider("Font size:", 10, 50, 20)
|
827 |
-
font_color = st.color_picker("Font color:", "#ffffff")
|
828 |
-
|
829 |
-
font_type = st.selectbox(
|
830 |
-
"Select the font type for the title:",
|
831 |
-
["arial.ttf", "alibaba.otf"],
|
832 |
-
index=0,
|
833 |
-
)
|
834 |
-
fading = st.slider(
|
835 |
-
"Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
|
836 |
-
)
|
837 |
-
mp4 = st.checkbox("Save timelapse as MP4", True)
|
838 |
-
|
839 |
-
empty_text = st.empty()
|
840 |
-
empty_image = st.empty()
|
841 |
-
empty_video = st.container()
|
842 |
-
|
843 |
-
submitted = st.form_submit_button("Submit")
|
844 |
-
if submitted:
|
845 |
-
if sample_roi == "Uploaded GeoJSON" and data is None:
|
846 |
-
empty_text.warning(
|
847 |
-
"Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
|
848 |
-
)
|
849 |
-
else:
|
850 |
-
|
851 |
-
empty_text.text("Computing... Please wait...")
|
852 |
-
|
853 |
-
geemap.modis_ndvi_timelapse(
|
854 |
-
out_gif,
|
855 |
-
satellite,
|
856 |
-
band,
|
857 |
-
start_date,
|
858 |
-
end_date,
|
859 |
-
roi,
|
860 |
-
768,
|
861 |
-
speed,
|
862 |
-
overlay_data=overlay_data,
|
863 |
-
overlay_color=overlay_color,
|
864 |
-
overlay_width=overlay_width,
|
865 |
-
overlay_opacity=overlay_opacity,
|
866 |
-
mp4=mp4,
|
867 |
-
fading=fading,
|
868 |
-
)
|
869 |
-
|
870 |
-
geemap.reduce_gif_size(out_gif)
|
871 |
-
|
872 |
-
empty_text.text(
|
873 |
-
"Right click the GIF to save it to your computerπ"
|
874 |
-
)
|
875 |
-
empty_image.image(out_gif)
|
876 |
-
|
877 |
-
out_mp4 = out_gif.replace(".gif", ".mp4")
|
878 |
-
if mp4 and os.path.exists(out_mp4):
|
879 |
-
with empty_video:
|
880 |
-
st.text(
|
881 |
-
"Right click the MP4 to save it to your computerπ"
|
882 |
-
)
|
883 |
-
st.video(out_gif.replace(".gif", ".mp4"))
|
884 |
-
|
885 |
-
elif collection == "Any Earth Engine ImageCollection":
|
886 |
-
|
887 |
-
with st.form("submit_ts_form"):
|
888 |
-
with st.expander("Customize timelapse"):
|
889 |
-
|
890 |
-
title = st.text_input(
|
891 |
-
"Enter a title to show on the timelapse: ", "Timelapse"
|
892 |
-
)
|
893 |
-
start_date = st.date_input(
|
894 |
-
"Select the start date:", datetime.date(2020, 1, 1)
|
895 |
-
)
|
896 |
-
end_date = st.date_input(
|
897 |
-
"Select the end date:", datetime.date.today()
|
898 |
-
)
|
899 |
-
frequency = st.selectbox(
|
900 |
-
"Select a temporal frequency:",
|
901 |
-
["year", "quarter", "month", "day", "hour", "minute", "second"],
|
902 |
-
index=0,
|
903 |
-
)
|
904 |
-
reducer = st.selectbox(
|
905 |
-
"Select a reducer for aggregating data:",
|
906 |
-
["median", "mean", "min", "max", "sum", "variance", "stdDev"],
|
907 |
-
index=0,
|
908 |
-
)
|
909 |
-
data_format = st.selectbox(
|
910 |
-
"Select a date format to show on the timelapse:",
|
911 |
-
[
|
912 |
-
"YYYY-MM-dd",
|
913 |
-
"YYYY",
|
914 |
-
"YYMM-MM",
|
915 |
-
"YYYY-MM-dd HH:mm",
|
916 |
-
"YYYY-MM-dd HH:mm:ss",
|
917 |
-
"HH:mm",
|
918 |
-
"HH:mm:ss",
|
919 |
-
"w",
|
920 |
-
"M",
|
921 |
-
"d",
|
922 |
-
"D",
|
923 |
-
],
|
924 |
-
index=0,
|
925 |
-
)
|
926 |
-
|
927 |
-
speed = st.slider("Frames per second:", 1, 30, 5)
|
928 |
-
add_progress_bar = st.checkbox("Add a progress bar", True)
|
929 |
-
progress_bar_color = st.color_picker(
|
930 |
-
"Progress bar color:", "#0000ff"
|
931 |
-
)
|
932 |
-
font_size = st.slider("Font size:", 10, 50, 30)
|
933 |
-
font_color = st.color_picker("Font color:", "#ffffff")
|
934 |
-
font_type = st.selectbox(
|
935 |
-
"Select the font type for the title:",
|
936 |
-
["arial.ttf", "alibaba.otf"],
|
937 |
-
index=0,
|
938 |
-
)
|
939 |
-
fading = st.slider(
|
940 |
-
"Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
|
941 |
-
)
|
942 |
-
mp4 = st.checkbox("Save timelapse as MP4", True)
|
943 |
-
|
944 |
-
empty_text = st.empty()
|
945 |
-
empty_image = st.empty()
|
946 |
-
empty_video = st.container()
|
947 |
-
empty_fire_image = st.empty()
|
948 |
-
|
949 |
-
roi = None
|
950 |
-
if st.session_state.get("roi") is not None:
|
951 |
-
roi = st.session_state.get("roi")
|
952 |
-
out_gif = geemap.temp_file_path(".gif")
|
953 |
-
|
954 |
-
submitted = st.form_submit_button("Submit")
|
955 |
-
if submitted:
|
956 |
-
|
957 |
-
if sample_roi == "Uploaded GeoJSON" and data is None:
|
958 |
-
empty_text.warning(
|
959 |
-
"Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
|
960 |
-
)
|
961 |
-
else:
|
962 |
-
|
963 |
-
empty_text.text("Computing... Please wait...")
|
964 |
-
try:
|
965 |
-
geemap.create_timelapse(
|
966 |
-
st.session_state.get("ee_asset_id"),
|
967 |
-
start_date=start_date.strftime("%Y-%m-%d"),
|
968 |
-
end_date=end_date.strftime("%Y-%m-%d"),
|
969 |
-
region=roi,
|
970 |
-
frequency=frequency,
|
971 |
-
reducer=reducer,
|
972 |
-
date_format=data_format,
|
973 |
-
out_gif=out_gif,
|
974 |
-
bands=st.session_state.get("bands"),
|
975 |
-
palette=st.session_state.get("palette"),
|
976 |
-
vis_params=st.session_state.get("vis_params"),
|
977 |
-
dimensions=768,
|
978 |
-
frames_per_second=speed,
|
979 |
-
crs="EPSG:3857",
|
980 |
-
overlay_data=overlay_data,
|
981 |
-
overlay_color=overlay_color,
|
982 |
-
overlay_width=overlay_width,
|
983 |
-
overlay_opacity=overlay_opacity,
|
984 |
-
title=title,
|
985 |
-
title_xy=("2%", "90%"),
|
986 |
-
add_text=True,
|
987 |
-
text_xy=("2%", "2%"),
|
988 |
-
text_sequence=None,
|
989 |
-
font_type=font_type,
|
990 |
-
font_size=font_size,
|
991 |
-
font_color=font_color,
|
992 |
-
add_progress_bar=add_progress_bar,
|
993 |
-
progress_bar_color=progress_bar_color,
|
994 |
-
progress_bar_height=5,
|
995 |
-
loop=0,
|
996 |
-
mp4=mp4,
|
997 |
-
fading=fading,
|
998 |
-
)
|
999 |
-
except:
|
1000 |
-
empty_text.error(
|
1001 |
-
"An error occurred while computing the timelapse. You probably requested too much data. Try reducing the ROI or timespan."
|
1002 |
-
)
|
1003 |
-
|
1004 |
-
empty_text.text(
|
1005 |
-
"Right click the GIF to save it to your computerπ"
|
1006 |
-
)
|
1007 |
-
empty_image.image(out_gif)
|
1008 |
-
|
1009 |
-
out_mp4 = out_gif.replace(".gif", ".mp4")
|
1010 |
-
if mp4 and os.path.exists(out_mp4):
|
1011 |
-
with empty_video:
|
1012 |
-
st.text(
|
1013 |
-
"Right click the MP4 to save it to your computerπ"
|
1014 |
-
)
|
1015 |
-
st.video(out_gif.replace(".gif", ".mp4"))
|
1016 |
-
|
1017 |
-
elif collection in [
|
1018 |
-
"MODIS Gap filled Land Surface Temperature Daily",
|
1019 |
-
"MODIS Ocean Color SMI",
|
1020 |
-
]:
|
1021 |
-
|
1022 |
-
with st.form("submit_ts_form"):
|
1023 |
-
with st.expander("Customize timelapse"):
|
1024 |
-
|
1025 |
-
title = st.text_input(
|
1026 |
-
"Enter a title to show on the timelapse: ",
|
1027 |
-
"Surface Temperature",
|
1028 |
-
)
|
1029 |
-
start_date = st.date_input(
|
1030 |
-
"Select the start date:", datetime.date(2018, 1, 1)
|
1031 |
-
)
|
1032 |
-
end_date = st.date_input(
|
1033 |
-
"Select the end date:", datetime.date(2020, 12, 31)
|
1034 |
-
)
|
1035 |
-
frequency = st.selectbox(
|
1036 |
-
"Select a temporal frequency:",
|
1037 |
-
["year", "quarter", "month", "week", "day"],
|
1038 |
-
index=2,
|
1039 |
-
)
|
1040 |
-
reducer = st.selectbox(
|
1041 |
-
"Select a reducer for aggregating data:",
|
1042 |
-
["median", "mean", "min", "max", "sum", "variance", "stdDev"],
|
1043 |
-
index=0,
|
1044 |
-
)
|
1045 |
-
|
1046 |
-
vis_params = st.text_area(
|
1047 |
-
"Enter visualization parameters",
|
1048 |
-
"",
|
1049 |
-
help="Enter a string in the format of a dictionary, such as '{'min': 23, 'max': 32}'",
|
1050 |
-
)
|
1051 |
-
|
1052 |
-
speed = st.slider("Frames per second:", 1, 30, 5)
|
1053 |
-
add_progress_bar = st.checkbox("Add a progress bar", True)
|
1054 |
-
progress_bar_color = st.color_picker(
|
1055 |
-
"Progress bar color:", "#0000ff"
|
1056 |
-
)
|
1057 |
-
font_size = st.slider("Font size:", 10, 50, 30)
|
1058 |
-
font_color = st.color_picker("Font color:", "#ffffff")
|
1059 |
-
font_type = st.selectbox(
|
1060 |
-
"Select the font type for the title:",
|
1061 |
-
["arial.ttf", "alibaba.otf"],
|
1062 |
-
index=0,
|
1063 |
-
)
|
1064 |
-
add_colorbar = st.checkbox("Add a colorbar", True)
|
1065 |
-
colorbar_label = st.text_input(
|
1066 |
-
"Enter the colorbar label:", "Surface Temperature (Β°C)"
|
1067 |
-
)
|
1068 |
-
fading = st.slider(
|
1069 |
-
"Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
|
1070 |
-
)
|
1071 |
-
mp4 = st.checkbox("Save timelapse as MP4", True)
|
1072 |
-
|
1073 |
-
empty_text = st.empty()
|
1074 |
-
empty_image = st.empty()
|
1075 |
-
empty_video = st.container()
|
1076 |
-
|
1077 |
-
roi = None
|
1078 |
-
if st.session_state.get("roi") is not None:
|
1079 |
-
roi = st.session_state.get("roi")
|
1080 |
-
out_gif = geemap.temp_file_path(".gif")
|
1081 |
-
|
1082 |
-
submitted = st.form_submit_button("Submit")
|
1083 |
-
if submitted:
|
1084 |
-
|
1085 |
-
if sample_roi == "Uploaded GeoJSON" and data is None:
|
1086 |
-
empty_text.warning(
|
1087 |
-
"Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
|
1088 |
-
)
|
1089 |
-
else:
|
1090 |
-
|
1091 |
-
empty_text.text("Computing... Please wait...")
|
1092 |
-
try:
|
1093 |
-
if (
|
1094 |
-
collection
|
1095 |
-
== "MODIS Gap filled Land Surface Temperature Daily"
|
1096 |
-
):
|
1097 |
-
out_gif = geemap.create_timelapse(
|
1098 |
-
st.session_state.get("ee_asset_id"),
|
1099 |
-
start_date=start_date.strftime("%Y-%m-%d"),
|
1100 |
-
end_date=end_date.strftime("%Y-%m-%d"),
|
1101 |
-
region=roi,
|
1102 |
-
bands=None,
|
1103 |
-
frequency=frequency,
|
1104 |
-
reducer=reducer,
|
1105 |
-
date_format=None,
|
1106 |
-
out_gif=out_gif,
|
1107 |
-
palette=st.session_state.get("palette"),
|
1108 |
-
vis_params=None,
|
1109 |
-
dimensions=768,
|
1110 |
-
frames_per_second=speed,
|
1111 |
-
crs="EPSG:3857",
|
1112 |
-
overlay_data=overlay_data,
|
1113 |
-
overlay_color=overlay_color,
|
1114 |
-
overlay_width=overlay_width,
|
1115 |
-
overlay_opacity=overlay_opacity,
|
1116 |
-
title=title,
|
1117 |
-
title_xy=("2%", "90%"),
|
1118 |
-
add_text=True,
|
1119 |
-
text_xy=("2%", "2%"),
|
1120 |
-
text_sequence=None,
|
1121 |
-
font_type=font_type,
|
1122 |
-
font_size=font_size,
|
1123 |
-
font_color=font_color,
|
1124 |
-
add_progress_bar=add_progress_bar,
|
1125 |
-
progress_bar_color=progress_bar_color,
|
1126 |
-
progress_bar_height=5,
|
1127 |
-
add_colorbar=add_colorbar,
|
1128 |
-
colorbar_label=colorbar_label,
|
1129 |
-
loop=0,
|
1130 |
-
mp4=mp4,
|
1131 |
-
fading=fading,
|
1132 |
-
)
|
1133 |
-
elif collection == "MODIS Ocean Color SMI":
|
1134 |
-
if vis_params.startswith("{") and vis_params.endswith(
|
1135 |
-
"}"
|
1136 |
-
):
|
1137 |
-
vis_params = eval(vis_params)
|
1138 |
-
else:
|
1139 |
-
vis_params = None
|
1140 |
-
out_gif = geemap.modis_ocean_color_timelapse(
|
1141 |
-
st.session_state.get("ee_asset_id"),
|
1142 |
-
start_date=start_date.strftime("%Y-%m-%d"),
|
1143 |
-
end_date=end_date.strftime("%Y-%m-%d"),
|
1144 |
-
region=roi,
|
1145 |
-
bands=st.session_state["band"],
|
1146 |
-
frequency=frequency,
|
1147 |
-
reducer=reducer,
|
1148 |
-
date_format=None,
|
1149 |
-
out_gif=out_gif,
|
1150 |
-
palette=st.session_state.get("palette"),
|
1151 |
-
vis_params=vis_params,
|
1152 |
-
dimensions=768,
|
1153 |
-
frames_per_second=speed,
|
1154 |
-
crs="EPSG:3857",
|
1155 |
-
overlay_data=overlay_data,
|
1156 |
-
overlay_color=overlay_color,
|
1157 |
-
overlay_width=overlay_width,
|
1158 |
-
overlay_opacity=overlay_opacity,
|
1159 |
-
title=title,
|
1160 |
-
title_xy=("2%", "90%"),
|
1161 |
-
add_text=True,
|
1162 |
-
text_xy=("2%", "2%"),
|
1163 |
-
text_sequence=None,
|
1164 |
-
font_type=font_type,
|
1165 |
-
font_size=font_size,
|
1166 |
-
font_color=font_color,
|
1167 |
-
add_progress_bar=add_progress_bar,
|
1168 |
-
progress_bar_color=progress_bar_color,
|
1169 |
-
progress_bar_height=5,
|
1170 |
-
add_colorbar=add_colorbar,
|
1171 |
-
colorbar_label=colorbar_label,
|
1172 |
-
loop=0,
|
1173 |
-
mp4=mp4,
|
1174 |
-
fading=fading,
|
1175 |
-
)
|
1176 |
-
except:
|
1177 |
-
empty_text.error(
|
1178 |
-
"Something went wrong. You probably requested too much data. Try reducing the ROI or timespan."
|
1179 |
-
)
|
1180 |
-
|
1181 |
-
if out_gif is not None and os.path.exists(out_gif):
|
1182 |
-
|
1183 |
-
geemap.reduce_gif_size(out_gif)
|
1184 |
-
|
1185 |
-
empty_text.text(
|
1186 |
-
"Right click the GIF to save it to your computerπ"
|
1187 |
-
)
|
1188 |
-
empty_image.image(out_gif)
|
1189 |
-
|
1190 |
-
out_mp4 = out_gif.replace(".gif", ".mp4")
|
1191 |
-
if mp4 and os.path.exists(out_mp4):
|
1192 |
-
with empty_video:
|
1193 |
-
st.text(
|
1194 |
-
"Right click the MP4 to save it to your computerπ"
|
1195 |
-
)
|
1196 |
-
st.video(out_gif.replace(".gif", ".mp4"))
|
1197 |
-
|
1198 |
-
else:
|
1199 |
-
st.error(
|
1200 |
-
"Something went wrong. You probably requested too much data. Try reducing the ROI or timespan."
|
1201 |
-
)
|
1202 |
-
|
1203 |
-
elif collection == "USDA National Agriculture Imagery Program (NAIP)":
|
1204 |
-
|
1205 |
-
with st.form("submit_naip_form"):
|
1206 |
-
with st.expander("Customize timelapse"):
|
1207 |
-
|
1208 |
-
title = st.text_input(
|
1209 |
-
"Enter a title to show on the timelapse: ", "NAIP Timelapse"
|
1210 |
-
)
|
1211 |
-
|
1212 |
-
years = st.slider(
|
1213 |
-
"Start and end year:",
|
1214 |
-
2003,
|
1215 |
-
today.year,
|
1216 |
-
(2003, today.year),
|
1217 |
-
)
|
1218 |
-
|
1219 |
-
bands = st.selectbox(
|
1220 |
-
"Select a band combination:", ["N/R/G", "R/G/B"], index=0
|
1221 |
-
)
|
1222 |
-
|
1223 |
-
speed = st.slider("Frames per second:", 1, 30, 3)
|
1224 |
-
add_progress_bar = st.checkbox("Add a progress bar", True)
|
1225 |
-
progress_bar_color = st.color_picker(
|
1226 |
-
"Progress bar color:", "#0000ff"
|
1227 |
-
)
|
1228 |
-
font_size = st.slider("Font size:", 10, 50, 30)
|
1229 |
-
font_color = st.color_picker("Font color:", "#ffffff")
|
1230 |
-
font_type = st.selectbox(
|
1231 |
-
"Select the font type for the title:",
|
1232 |
-
["arial.ttf", "alibaba.otf"],
|
1233 |
-
index=0,
|
1234 |
-
)
|
1235 |
-
fading = st.slider(
|
1236 |
-
"Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
|
1237 |
-
)
|
1238 |
-
mp4 = st.checkbox("Save timelapse as MP4", True)
|
1239 |
-
|
1240 |
-
empty_text = st.empty()
|
1241 |
-
empty_image = st.empty()
|
1242 |
-
empty_video = st.container()
|
1243 |
-
empty_fire_image = st.empty()
|
1244 |
-
|
1245 |
-
roi = None
|
1246 |
-
if st.session_state.get("roi") is not None:
|
1247 |
-
roi = st.session_state.get("roi")
|
1248 |
-
out_gif = geemap.temp_file_path(".gif")
|
1249 |
-
|
1250 |
-
submitted = st.form_submit_button("Submit")
|
1251 |
-
if submitted:
|
1252 |
-
|
1253 |
-
if sample_roi == "Uploaded GeoJSON" and data is None:
|
1254 |
-
empty_text.warning(
|
1255 |
-
"Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
|
1256 |
-
)
|
1257 |
-
else:
|
1258 |
-
|
1259 |
-
empty_text.text("Computing... Please wait...")
|
1260 |
-
try:
|
1261 |
-
geemap.naip_timelapse(
|
1262 |
-
roi,
|
1263 |
-
years[0],
|
1264 |
-
years[1],
|
1265 |
-
out_gif,
|
1266 |
-
bands=bands.split("/"),
|
1267 |
-
palette=st.session_state.get("palette"),
|
1268 |
-
vis_params=None,
|
1269 |
-
dimensions=768,
|
1270 |
-
frames_per_second=speed,
|
1271 |
-
crs="EPSG:3857",
|
1272 |
-
overlay_data=overlay_data,
|
1273 |
-
overlay_color=overlay_color,
|
1274 |
-
overlay_width=overlay_width,
|
1275 |
-
overlay_opacity=overlay_opacity,
|
1276 |
-
title=title,
|
1277 |
-
title_xy=("2%", "90%"),
|
1278 |
-
add_text=True,
|
1279 |
-
text_xy=("2%", "2%"),
|
1280 |
-
text_sequence=None,
|
1281 |
-
font_type=font_type,
|
1282 |
-
font_size=font_size,
|
1283 |
-
font_color=font_color,
|
1284 |
-
add_progress_bar=add_progress_bar,
|
1285 |
-
progress_bar_color=progress_bar_color,
|
1286 |
-
progress_bar_height=5,
|
1287 |
-
loop=0,
|
1288 |
-
mp4=mp4,
|
1289 |
-
fading=fading,
|
1290 |
-
)
|
1291 |
-
except:
|
1292 |
-
empty_text.error(
|
1293 |
-
"Something went wrong. You either requested too much data or the ROI is outside the U.S."
|
1294 |
-
)
|
1295 |
-
|
1296 |
-
if out_gif is not None and os.path.exists(out_gif):
|
1297 |
-
|
1298 |
-
empty_text.text(
|
1299 |
-
"Right click the GIF to save it to your computerπ"
|
1300 |
-
)
|
1301 |
-
empty_image.image(out_gif)
|
1302 |
-
|
1303 |
-
out_mp4 = out_gif.replace(".gif", ".mp4")
|
1304 |
-
if mp4 and os.path.exists(out_mp4):
|
1305 |
-
with empty_video:
|
1306 |
-
st.text(
|
1307 |
-
"Right click the MP4 to save it to your computerπ"
|
1308 |
-
)
|
1309 |
-
st.video(out_gif.replace(".gif", ".mp4"))
|
1310 |
-
|
1311 |
-
else:
|
1312 |
-
st.error(
|
1313 |
-
"Something went wrong. You either requested too much data or the ROI is outside the U.S."
|
1314 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/vector.py
DELETED
@@ -1,98 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import fiona
|
3 |
-
import geopandas as gpd
|
4 |
-
import streamlit as st
|
5 |
-
|
6 |
-
|
7 |
-
def save_uploaded_file(file_content, file_name):
|
8 |
-
"""
|
9 |
-
Save the uploaded file to a temporary directory
|
10 |
-
"""
|
11 |
-
import tempfile
|
12 |
-
import os
|
13 |
-
import uuid
|
14 |
-
|
15 |
-
_, file_extension = os.path.splitext(file_name)
|
16 |
-
file_id = str(uuid.uuid4())
|
17 |
-
file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
|
18 |
-
|
19 |
-
with open(file_path, "wb") as file:
|
20 |
-
file.write(file_content.getbuffer())
|
21 |
-
|
22 |
-
return file_path
|
23 |
-
|
24 |
-
|
25 |
-
def app():
|
26 |
-
|
27 |
-
st.title("Upload Vector Data")
|
28 |
-
|
29 |
-
row1_col1, row1_col2 = st.columns([2, 1])
|
30 |
-
width = 950
|
31 |
-
height = 600
|
32 |
-
|
33 |
-
with row1_col2:
|
34 |
-
|
35 |
-
backend = st.selectbox(
|
36 |
-
"Select a plotting backend", ["folium", "kepler.gl", "pydeck"], index=2
|
37 |
-
)
|
38 |
-
|
39 |
-
if backend == "folium":
|
40 |
-
import leafmap.foliumap as leafmap
|
41 |
-
elif backend == "kepler.gl":
|
42 |
-
import leafmap.kepler as leafmap
|
43 |
-
elif backend == "pydeck":
|
44 |
-
import leafmap.deck as leafmap
|
45 |
-
|
46 |
-
url = st.text_input(
|
47 |
-
"Enter a URL to a vector dataset",
|
48 |
-
"https://github.com/giswqs/streamlit-geospatial/raw/master/data/us_states.geojson",
|
49 |
-
)
|
50 |
-
|
51 |
-
data = st.file_uploader(
|
52 |
-
"Upload a vector dataset", type=["geojson", "kml", "zip", "tab"]
|
53 |
-
)
|
54 |
-
|
55 |
-
container = st.container()
|
56 |
-
|
57 |
-
if data or url:
|
58 |
-
if data:
|
59 |
-
file_path = save_uploaded_file(data, data.name)
|
60 |
-
layer_name = os.path.splitext(data.name)[0]
|
61 |
-
elif url:
|
62 |
-
file_path = url
|
63 |
-
layer_name = url.split("/")[-1].split(".")[0]
|
64 |
-
|
65 |
-
with row1_col1:
|
66 |
-
if file_path.lower().endswith(".kml"):
|
67 |
-
fiona.drvsupport.supported_drivers["KML"] = "rw"
|
68 |
-
gdf = gpd.read_file(file_path, driver="KML")
|
69 |
-
else:
|
70 |
-
gdf = gpd.read_file(file_path)
|
71 |
-
lon, lat = leafmap.gdf_centroid(gdf)
|
72 |
-
if backend == "pydeck":
|
73 |
-
|
74 |
-
column_names = gdf.columns.values.tolist()
|
75 |
-
random_column = None
|
76 |
-
with container:
|
77 |
-
random_color = st.checkbox("Apply random colors", True)
|
78 |
-
if random_color:
|
79 |
-
random_column = st.selectbox(
|
80 |
-
"Select a column to apply random colors", column_names
|
81 |
-
)
|
82 |
-
|
83 |
-
m = leafmap.Map(center=(lat, lon))
|
84 |
-
m.add_gdf(gdf, random_color_column=random_column)
|
85 |
-
st.pydeck_chart(m)
|
86 |
-
|
87 |
-
else:
|
88 |
-
m = leafmap.Map(center=(lat, lon), draw_export=True)
|
89 |
-
m.add_gdf(gdf, layer_name=layer_name)
|
90 |
-
# m.add_vector(file_path, layer_name=layer_name)
|
91 |
-
if backend == "folium":
|
92 |
-
m.zoom_to_gdf(gdf)
|
93 |
-
m.to_streamlit(width=width, height=height)
|
94 |
-
|
95 |
-
else:
|
96 |
-
with row1_col1:
|
97 |
-
m = leafmap.Map()
|
98 |
-
st.pydeck_chart(m)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/wms.py
DELETED
@@ -1,68 +0,0 @@
|
|
1 |
-
import ast
|
2 |
-
import streamlit as st
|
3 |
-
import leafmap.foliumap as leafmap
|
4 |
-
|
5 |
-
|
6 |
-
@st.cache(allow_output_mutation=True)
|
7 |
-
def get_layers(url):
|
8 |
-
options = leafmap.get_wms_layers(url)
|
9 |
-
return options
|
10 |
-
|
11 |
-
|
12 |
-
def app():
|
13 |
-
st.title("Add Web Map Service (WMS)")
|
14 |
-
st.markdown(
|
15 |
-
"""
|
16 |
-
This app is a demonstration of loading Web Map Service (WMS) layers. Simply enter the URL of the WMS service
|
17 |
-
in the text box below and press Enter to retrieve the layers. Go to https://apps.nationalmap.gov/services to find
|
18 |
-
some WMS URLs if needed.
|
19 |
-
"""
|
20 |
-
)
|
21 |
-
|
22 |
-
row1_col1, row1_col2 = st.columns([3, 1.3])
|
23 |
-
width = 800
|
24 |
-
height = 600
|
25 |
-
layers = None
|
26 |
-
|
27 |
-
with row1_col2:
|
28 |
-
|
29 |
-
esa_landcover = "https://services.terrascope.be/wms/v2"
|
30 |
-
url = st.text_input(
|
31 |
-
"Enter a WMS URL:", value="https://services.terrascope.be/wms/v2"
|
32 |
-
)
|
33 |
-
empty = st.empty()
|
34 |
-
|
35 |
-
if url:
|
36 |
-
options = get_layers(url)
|
37 |
-
|
38 |
-
default = None
|
39 |
-
if url == esa_landcover:
|
40 |
-
default = "WORLDCOVER_2020_MAP"
|
41 |
-
layers = empty.multiselect(
|
42 |
-
"Select WMS layers to add to the map:", options, default=default
|
43 |
-
)
|
44 |
-
add_legend = st.checkbox("Add a legend to the map", value=True)
|
45 |
-
if default == "WORLDCOVER_2020_MAP":
|
46 |
-
legend = str(leafmap.builtin_legends["ESA_WorldCover"])
|
47 |
-
else:
|
48 |
-
legend = ""
|
49 |
-
if add_legend:
|
50 |
-
legend_text = st.text_area(
|
51 |
-
"Enter a legend as a dictionary {label: color}",
|
52 |
-
value=legend,
|
53 |
-
height=200,
|
54 |
-
)
|
55 |
-
|
56 |
-
with row1_col1:
|
57 |
-
m = leafmap.Map(center=(36.3, 0), zoom=2)
|
58 |
-
|
59 |
-
if layers is not None:
|
60 |
-
for layer in layers:
|
61 |
-
m.add_wms_layer(
|
62 |
-
url, layers=layer, name=layer, attribution=" ", transparent=True
|
63 |
-
)
|
64 |
-
if add_legend and legend_text:
|
65 |
-
legend_dict = ast.literal_eval(legend_text)
|
66 |
-
m.add_legend(legend_dict=legend_dict)
|
67 |
-
|
68 |
-
m.to_streamlit(width, height)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/xy.py
DELETED
@@ -1,65 +0,0 @@
|
|
1 |
-
import leafmap.foliumap as leafmap
|
2 |
-
import pandas as pd
|
3 |
-
import streamlit as st
|
4 |
-
|
5 |
-
|
6 |
-
def app():
|
7 |
-
|
8 |
-
st.title("Add Points from XY")
|
9 |
-
|
10 |
-
sample_url = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/world_cities.csv"
|
11 |
-
url = st.text_input("Enter URL:", sample_url)
|
12 |
-
m = leafmap.Map(locate_control=True, plugin_LatLngPopup=False)
|
13 |
-
|
14 |
-
if url:
|
15 |
-
|
16 |
-
try:
|
17 |
-
df = pd.read_csv(url)
|
18 |
-
|
19 |
-
columns = df.columns.values.tolist()
|
20 |
-
row1_col1, row1_col2, row1_col3, row1_col4, row1_col5 = st.columns(
|
21 |
-
[1, 1, 3, 1, 1]
|
22 |
-
)
|
23 |
-
|
24 |
-
lon_index = 0
|
25 |
-
lat_index = 0
|
26 |
-
|
27 |
-
for col in columns:
|
28 |
-
if col.lower() in ["lon", "longitude", "long", "lng"]:
|
29 |
-
lon_index = columns.index(col)
|
30 |
-
elif col.lower() in ["lat", "latitude"]:
|
31 |
-
lat_index = columns.index(col)
|
32 |
-
|
33 |
-
with row1_col1:
|
34 |
-
x = st.selectbox("Select longitude column", columns, lon_index)
|
35 |
-
|
36 |
-
with row1_col2:
|
37 |
-
y = st.selectbox("Select latitude column", columns, lat_index)
|
38 |
-
|
39 |
-
with row1_col3:
|
40 |
-
popups = st.multiselect("Select popup columns", columns, columns)
|
41 |
-
|
42 |
-
with row1_col4:
|
43 |
-
heatmap = st.checkbox("Add heatmap")
|
44 |
-
|
45 |
-
if heatmap:
|
46 |
-
with row1_col5:
|
47 |
-
if "pop_max" in columns:
|
48 |
-
index = columns.index("pop_max")
|
49 |
-
else:
|
50 |
-
index = 0
|
51 |
-
heatmap_col = st.selectbox("Select heatmap column", columns, index)
|
52 |
-
try:
|
53 |
-
m.add_heatmap(df, y, x, heatmap_col)
|
54 |
-
except:
|
55 |
-
st.error("Please select a numeric column")
|
56 |
-
|
57 |
-
try:
|
58 |
-
m.add_points_from_xy(df, x, y, popups)
|
59 |
-
except:
|
60 |
-
st.error("Please select a numeric column")
|
61 |
-
|
62 |
-
except Exception as e:
|
63 |
-
st.error(e)
|
64 |
-
|
65 |
-
m.to_streamlit()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
environment-bk.yml
DELETED
@@ -1,17 +0,0 @@
|
|
1 |
-
name: geo
|
2 |
-
channels:
|
3 |
-
- conda-forge
|
4 |
-
dependencies:
|
5 |
-
- gdal=3.4.3
|
6 |
-
- pip
|
7 |
-
- pip:
|
8 |
-
- geopandas
|
9 |
-
- keplergl
|
10 |
-
- streamlit
|
11 |
-
- localtileserver
|
12 |
-
- palettable
|
13 |
-
- streamlit-folium
|
14 |
-
- streamlit-keplergl
|
15 |
-
- streamlit-bokeh-events
|
16 |
-
- git+https://github.com/giswqs/leafmap
|
17 |
-
- git+https://github.com/giswqs/geemap
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
index.html
DELETED
@@ -1,39 +0,0 @@
|
|
1 |
-
<!DOCTYPE html>
|
2 |
-
<html>
|
3 |
-
<head>
|
4 |
-
<title>Streamlit for Geospatial</title>
|
5 |
-
<style type="text/css">
|
6 |
-
html {
|
7 |
-
overflow: auto;
|
8 |
-
}
|
9 |
-
html,
|
10 |
-
body,
|
11 |
-
div,
|
12 |
-
iframe {
|
13 |
-
margin: 0px;
|
14 |
-
padding: 0px;
|
15 |
-
height: 100%;
|
16 |
-
border: none;
|
17 |
-
}
|
18 |
-
iframe {
|
19 |
-
display: block;
|
20 |
-
width: 100%;
|
21 |
-
border: none;
|
22 |
-
overflow-y: auto;
|
23 |
-
overflow-x: hidden;
|
24 |
-
}
|
25 |
-
</style>
|
26 |
-
</head>
|
27 |
-
<body>
|
28 |
-
<iframe
|
29 |
-
src="https://share.streamlit.io/giswqs/streamlit-geospatial/app.py"
|
30 |
-
frameborder="0"
|
31 |
-
marginheight="0"
|
32 |
-
marginwidth="0"
|
33 |
-
width="100%"
|
34 |
-
height="100%"
|
35 |
-
scrolling="auto"
|
36 |
-
>
|
37 |
-
</iframe>
|
38 |
-
</body>
|
39 |
-
</html>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
multiapp.py
DELETED
@@ -1,81 +0,0 @@
|
|
1 |
-
"""Frameworks for running multiple Streamlit applications as a single app.
|
2 |
-
"""
|
3 |
-
import streamlit as st
|
4 |
-
|
5 |
-
# app_state = st.experimental_get_query_params()
|
6 |
-
# app_state = {k: v[0] if isinstance(v, list) else v for k, v in app_state.items()} # fetch the first item in each query string as we don't have multiple values for each query string key in this example
|
7 |
-
|
8 |
-
|
9 |
-
class MultiApp:
|
10 |
-
"""Framework for combining multiple streamlit applications.
|
11 |
-
Usage:
|
12 |
-
def foo():
|
13 |
-
st.title("Hello Foo")
|
14 |
-
def bar():
|
15 |
-
st.title("Hello Bar")
|
16 |
-
app = MultiApp()
|
17 |
-
app.add_app("Foo", foo)
|
18 |
-
app.add_app("Bar", bar)
|
19 |
-
app.run()
|
20 |
-
It is also possible keep each application in a separate file.
|
21 |
-
import foo
|
22 |
-
import bar
|
23 |
-
app = MultiApp()
|
24 |
-
app.add_app("Foo", foo.app)
|
25 |
-
app.add_app("Bar", bar.app)
|
26 |
-
app.run()
|
27 |
-
"""
|
28 |
-
|
29 |
-
def __init__(self):
|
30 |
-
self.apps = []
|
31 |
-
|
32 |
-
def add_app(self, title, func):
|
33 |
-
"""Adds a new application.
|
34 |
-
Parameters
|
35 |
-
----------
|
36 |
-
func:
|
37 |
-
the python function to render this app.
|
38 |
-
title:
|
39 |
-
title of the app. Appears in the dropdown in the sidebar.
|
40 |
-
"""
|
41 |
-
self.apps.append({"title": title, "function": func})
|
42 |
-
|
43 |
-
def run(self):
|
44 |
-
app_state = st.experimental_get_query_params()
|
45 |
-
app_state = {
|
46 |
-
k: v[0] if isinstance(v, list) else v for k, v in app_state.items()
|
47 |
-
} # fetch the first item in each query string as we don't have multiple values for each query string key in this example
|
48 |
-
|
49 |
-
# st.write('before', app_state)
|
50 |
-
|
51 |
-
titles = [a["title"] for a in self.apps]
|
52 |
-
functions = [a["function"] for a in self.apps]
|
53 |
-
default_radio = titles.index(app_state["page"]) if "page" in app_state else 0
|
54 |
-
|
55 |
-
st.sidebar.title("Navigation")
|
56 |
-
|
57 |
-
title = st.sidebar.radio("Go To", titles, index=default_radio, key="radio")
|
58 |
-
|
59 |
-
app_state["page"] = st.session_state.radio
|
60 |
-
# st.write('after', app_state)
|
61 |
-
|
62 |
-
st.experimental_set_query_params(**app_state)
|
63 |
-
# st.experimental_set_query_params(**st.session_state.to_dict())
|
64 |
-
functions[titles.index(title)]()
|
65 |
-
|
66 |
-
st.sidebar.title("Contribute")
|
67 |
-
st.sidebar.info(
|
68 |
-
"This is an open source project and you are very welcome to contribute your "
|
69 |
-
"comments, questions, resources and apps as "
|
70 |
-
"[issues](https://github.com/giswqs/streamlit-geospatial/issues) or "
|
71 |
-
"[pull requests](https://github.com/giswqs/streamlit-geospatial/pulls) "
|
72 |
-
"to the [source code](https://github.com/giswqs/streamlit-geospatial). "
|
73 |
-
)
|
74 |
-
st.sidebar.title("About")
|
75 |
-
st.sidebar.info(
|
76 |
-
"""
|
77 |
-
This web [app](https://share.streamlit.io/giswqs/streamlit-geospatial/app.py) is maintained by [Qiusheng Wu](https://wetlands.io). You can follow me on social media:
|
78 |
-
[GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/c/QiushengWu) | [LinkedIn](https://www.linkedin.com/in/qiushengwu).
|
79 |
-
This web app URL: <https://streamlit.gishub.org>
|
80 |
-
"""
|
81 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pages/10_π_Earth_Engine_Datasets.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import ee
|
|
|
2 |
import streamlit as st
|
3 |
import geemap.foliumap as geemap
|
4 |
|
@@ -79,8 +80,7 @@ def search_data():
|
|
79 |
|
80 |
dataset = None
|
81 |
with col2:
|
82 |
-
keyword = st.text_input(
|
83 |
-
"Enter a keyword to search (e.g., elevation)", "")
|
84 |
if keyword:
|
85 |
ee_assets = geemap.search_ee_data(keyword)
|
86 |
asset_titles = [x["title"] for x in ee_assets]
|
@@ -102,8 +102,7 @@ def search_data():
|
|
102 |
with st.expander("Show dataset details", True):
|
103 |
index = asset_titles.index(dataset)
|
104 |
|
105 |
-
html = geemap.ee_data_html(
|
106 |
-
st.session_state["ee_assets"][index])
|
107 |
html = html.replace("\n", "")
|
108 |
st.markdown(html, True)
|
109 |
|
@@ -111,6 +110,14 @@ def search_data():
|
|
111 |
uid = ee_assets[index]["uid"]
|
112 |
st.markdown(f"""**Earth Engine Snippet:** `{ee_id}`""")
|
113 |
ee_asset = f"{translate[asset_types[index]]}{ee_id}')"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
114 |
vis_params = st.text_input(
|
115 |
"Enter visualization parameters as a dictionary", {}
|
116 |
)
|
@@ -122,12 +129,11 @@ def search_data():
|
|
122 |
if vis_params.strip() == "":
|
123 |
# st.error("Please enter visualization parameters")
|
124 |
vis_params = "{}"
|
125 |
-
vis =
|
126 |
if not isinstance(vis, dict):
|
127 |
-
st.error(
|
128 |
-
"Visualization parameters must be a dictionary")
|
129 |
try:
|
130 |
-
Map.addLayer(
|
131 |
except Exception as e:
|
132 |
st.error(f"Error adding layer: {e}")
|
133 |
except Exception as e:
|
@@ -143,8 +149,7 @@ def search_data():
|
|
143 |
def app():
|
144 |
st.title("Earth Engine Data Catalog")
|
145 |
|
146 |
-
apps = ["Search Earth Engine Data Catalog",
|
147 |
-
"National Land Cover Database (NLCD)"]
|
148 |
|
149 |
selected_app = st.selectbox("Select an app", apps)
|
150 |
|
|
|
1 |
import ee
|
2 |
+
import json
|
3 |
import streamlit as st
|
4 |
import geemap.foliumap as geemap
|
5 |
|
|
|
80 |
|
81 |
dataset = None
|
82 |
with col2:
|
83 |
+
keyword = st.text_input("Enter a keyword to search (e.g., elevation)", "")
|
|
|
84 |
if keyword:
|
85 |
ee_assets = geemap.search_ee_data(keyword)
|
86 |
asset_titles = [x["title"] for x in ee_assets]
|
|
|
102 |
with st.expander("Show dataset details", True):
|
103 |
index = asset_titles.index(dataset)
|
104 |
|
105 |
+
html = geemap.ee_data_html(st.session_state["ee_assets"][index])
|
|
|
106 |
html = html.replace("\n", "")
|
107 |
st.markdown(html, True)
|
108 |
|
|
|
110 |
uid = ee_assets[index]["uid"]
|
111 |
st.markdown(f"""**Earth Engine Snippet:** `{ee_id}`""")
|
112 |
ee_asset = f"{translate[asset_types[index]]}{ee_id}')"
|
113 |
+
|
114 |
+
if ee_asset.startswith("ee.ImageCollection"):
|
115 |
+
ee_asset = ee.ImageCollection(ee_id)
|
116 |
+
elif ee_asset.startswith("ee.Image"):
|
117 |
+
ee_asset = ee.Image(ee_id)
|
118 |
+
elif ee_asset.startswith("ee.FeatureCollection"):
|
119 |
+
ee_asset = ee.FeatureCollection(ee_id)
|
120 |
+
|
121 |
vis_params = st.text_input(
|
122 |
"Enter visualization parameters as a dictionary", {}
|
123 |
)
|
|
|
129 |
if vis_params.strip() == "":
|
130 |
# st.error("Please enter visualization parameters")
|
131 |
vis_params = "{}"
|
132 |
+
vis = json.loads(vis_params.replace("'", '"'))
|
133 |
if not isinstance(vis, dict):
|
134 |
+
st.error("Visualization parameters must be a dictionary")
|
|
|
135 |
try:
|
136 |
+
Map.addLayer(ee_asset, vis, layer_name)
|
137 |
except Exception as e:
|
138 |
st.error(f"Error adding layer: {e}")
|
139 |
except Exception as e:
|
|
|
149 |
def app():
|
150 |
st.title("Earth Engine Data Catalog")
|
151 |
|
152 |
+
apps = ["Search Earth Engine Data Catalog", "National Land Cover Database (NLCD)"]
|
|
|
153 |
|
154 |
selected_app = st.selectbox("Select an app", apps)
|
155 |
|
pages/13_ποΈ_Global_Building_Footprints.py
CHANGED
@@ -6,7 +6,6 @@ import streamlit as st
|
|
6 |
st.set_page_config(layout="wide")
|
7 |
|
8 |
|
9 |
-
@st.cache(persist=True)
|
10 |
def ee_authenticate(token_name="EARTHENGINE_TOKEN"):
|
11 |
geemap.ee_initialize(token_name=token_name)
|
12 |
|
@@ -30,25 +29,26 @@ st.title("Global Building Footprints")
|
|
30 |
col1, col2 = st.columns([8, 2])
|
31 |
|
32 |
|
33 |
-
@st.
|
34 |
def read_data(url):
|
35 |
return gpd.read_file(url)
|
36 |
|
37 |
|
38 |
-
countries =
|
39 |
-
|
|
|
|
|
40 |
|
41 |
countries_gdf = read_data(countries)
|
42 |
states_gdf = read_data(states)
|
43 |
|
44 |
-
country_names = countries_gdf[
|
45 |
-
country_names.remove(
|
46 |
-
country_names.append(
|
47 |
country_names.sort()
|
48 |
-
country_names = [name.replace(
|
49 |
-
for name in country_names]
|
50 |
|
51 |
-
state_names = states_gdf[
|
52 |
|
53 |
basemaps = list(geemap.basemaps)
|
54 |
|
@@ -56,41 +56,44 @@ Map = geemap.Map()
|
|
56 |
|
57 |
with col2:
|
58 |
|
59 |
-
basemap = st.selectbox("Select a basemap", basemaps,
|
60 |
-
index=basemaps.index('HYBRID'))
|
61 |
Map.add_basemap(basemap)
|
62 |
|
63 |
-
country = st.selectbox(
|
64 |
-
|
|
|
65 |
|
66 |
-
if country ==
|
67 |
-
state = st.selectbox(
|
68 |
-
|
|
|
69 |
layer_name = state
|
70 |
|
71 |
try:
|
72 |
fc = ee.FeatureCollection(
|
73 |
-
f
|
|
|
74 |
except:
|
75 |
-
st.error(
|
76 |
|
77 |
else:
|
78 |
try:
|
79 |
fc = ee.FeatureCollection(
|
80 |
-
f
|
|
|
81 |
except:
|
82 |
-
st.error(
|
83 |
|
84 |
layer_name = country
|
85 |
|
86 |
-
color = st.color_picker(
|
87 |
|
88 |
-
style = {
|
89 |
|
90 |
split = st.checkbox("Split-panel map")
|
91 |
|
92 |
if split:
|
93 |
-
left = geemap.ee_tile_layer(fc.style(**style), {},
|
94 |
right = left
|
95 |
Map.split_map(left, right)
|
96 |
else:
|
|
|
6 |
st.set_page_config(layout="wide")
|
7 |
|
8 |
|
|
|
9 |
def ee_authenticate(token_name="EARTHENGINE_TOKEN"):
|
10 |
geemap.ee_initialize(token_name=token_name)
|
11 |
|
|
|
29 |
col1, col2 = st.columns([8, 2])
|
30 |
|
31 |
|
32 |
+
@st.cache_data
|
33 |
def read_data(url):
|
34 |
return gpd.read_file(url)
|
35 |
|
36 |
|
37 |
+
countries = (
|
38 |
+
"https://github.com/giswqs/geemap/raw/master/examples/data/countries.geojson"
|
39 |
+
)
|
40 |
+
states = "https://github.com/giswqs/geemap/raw/master/examples/data/us_states.json"
|
41 |
|
42 |
countries_gdf = read_data(countries)
|
43 |
states_gdf = read_data(states)
|
44 |
|
45 |
+
country_names = countries_gdf["NAME"].values.tolist()
|
46 |
+
country_names.remove("United States of America")
|
47 |
+
country_names.append("USA")
|
48 |
country_names.sort()
|
49 |
+
country_names = [name.replace(".", "").replace(" ", "_") for name in country_names]
|
|
|
50 |
|
51 |
+
state_names = states_gdf["name"].values.tolist()
|
52 |
|
53 |
basemaps = list(geemap.basemaps)
|
54 |
|
|
|
56 |
|
57 |
with col2:
|
58 |
|
59 |
+
basemap = st.selectbox("Select a basemap", basemaps, index=basemaps.index("HYBRID"))
|
|
|
60 |
Map.add_basemap(basemap)
|
61 |
|
62 |
+
country = st.selectbox(
|
63 |
+
"Select a country", country_names, index=country_names.index("USA")
|
64 |
+
)
|
65 |
|
66 |
+
if country == "USA":
|
67 |
+
state = st.selectbox(
|
68 |
+
"Select a state", state_names, index=state_names.index("Florida")
|
69 |
+
)
|
70 |
layer_name = state
|
71 |
|
72 |
try:
|
73 |
fc = ee.FeatureCollection(
|
74 |
+
f"projects/sat-io/open-datasets/MSBuildings/US/{state}"
|
75 |
+
)
|
76 |
except:
|
77 |
+
st.error("No data available for the selected state.")
|
78 |
|
79 |
else:
|
80 |
try:
|
81 |
fc = ee.FeatureCollection(
|
82 |
+
f"projects/sat-io/open-datasets/MSBuildings/{country}"
|
83 |
+
)
|
84 |
except:
|
85 |
+
st.error("No data available for the selected country.")
|
86 |
|
87 |
layer_name = country
|
88 |
|
89 |
+
color = st.color_picker("Select a color", "#FF5500")
|
90 |
|
91 |
+
style = {"fillColor": "00000000", "color": color}
|
92 |
|
93 |
split = st.checkbox("Split-panel map")
|
94 |
|
95 |
if split:
|
96 |
+
left = geemap.ee_tile_layer(fc.style(**style), {}, "Left")
|
97 |
right = left
|
98 |
Map.split_map(left, right)
|
99 |
else:
|
pages/1_π·_Timelapse.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import ee
|
|
|
2 |
import os
|
3 |
import warnings
|
4 |
import datetime
|
@@ -15,7 +16,7 @@ st.set_page_config(layout="wide")
|
|
15 |
warnings.filterwarnings("ignore")
|
16 |
|
17 |
|
18 |
-
@st.
|
19 |
def ee_authenticate(token_name="EARTHENGINE_TOKEN"):
|
20 |
geemap.ee_initialize(token_name=token_name)
|
21 |
|
@@ -205,7 +206,7 @@ ocean_rois = {
|
|
205 |
}
|
206 |
|
207 |
|
208 |
-
@st.
|
209 |
def uploaded_file_to_gdf(data):
|
210 |
import tempfile
|
211 |
import os
|
@@ -235,8 +236,8 @@ def app():
|
|
235 |
|
236 |
st.markdown(
|
237 |
"""
|
238 |
-
An interactive web app for creating [Landsat](https://developers.google.com/earth-engine/datasets/catalog/landsat)/[GOES](https://jstnbraaten.medium.com/goes-in-earth-engine-53fbc8783c16) timelapse for any location around the globe.
|
239 |
-
The app was built using [streamlit](https://streamlit.io), [geemap](https://geemap.org), and [Google Earth Engine](https://earthengine.google.com). For more info, check out my streamlit [blog post](https://blog.streamlit.io/creating-satellite-timelapse-with-streamlit-and-earth-engine).
|
240 |
"""
|
241 |
)
|
242 |
|
@@ -376,7 +377,9 @@ def app():
|
|
376 |
st.write(
|
377 |
cm.plot_colormap(cmap=palette_options, return_fig=True)
|
378 |
)
|
379 |
-
st.session_state["palette"] =
|
|
|
|
|
380 |
|
381 |
if bands:
|
382 |
vis_params = st.text_area(
|
@@ -391,7 +394,9 @@ def app():
|
|
391 |
"{}",
|
392 |
)
|
393 |
try:
|
394 |
-
st.session_state["vis_params"] =
|
|
|
|
|
395 |
st.session_state["vis_params"]["palette"] = st.session_state[
|
396 |
"palette"
|
397 |
]
|
@@ -412,13 +417,13 @@ def app():
|
|
412 |
MODIS_options = ["Daytime (1:30 pm)", "Nighttime (1:30 am)"]
|
413 |
MODIS_option = st.selectbox("Select a MODIS dataset:", MODIS_options)
|
414 |
if MODIS_option == "Daytime (1:30 pm)":
|
415 |
-
st.session_state[
|
416 |
-
"
|
417 |
-
|
418 |
else:
|
419 |
-
st.session_state[
|
420 |
-
"
|
421 |
-
|
422 |
|
423 |
palette_options = st.selectbox(
|
424 |
"Color palette",
|
@@ -431,7 +436,7 @@ def app():
|
|
431 |
palette_values,
|
432 |
)
|
433 |
st.write(cm.plot_colormap(cmap=palette_options, return_fig=True))
|
434 |
-
st.session_state["palette"] =
|
435 |
elif collection == "MODIS Ocean Color SMI":
|
436 |
with st.expander("Show dataset details", False):
|
437 |
st.markdown(
|
@@ -489,7 +494,7 @@ def app():
|
|
489 |
palette_values,
|
490 |
)
|
491 |
st.write(cm.plot_colormap(cmap=palette_options, return_fig=True))
|
492 |
-
st.session_state["palette"] =
|
493 |
|
494 |
sample_roi = st.selectbox(
|
495 |
"Select a sample ROI or upload a GeoJSON file:",
|
@@ -1341,7 +1346,9 @@ def app():
|
|
1341 |
if vis_params.startswith("{") and vis_params.endswith(
|
1342 |
"}"
|
1343 |
):
|
1344 |
-
vis_params =
|
|
|
|
|
1345 |
else:
|
1346 |
vis_params = None
|
1347 |
out_gif = geemap.modis_ocean_color_timelapse(
|
|
|
1 |
import ee
|
2 |
+
import json
|
3 |
import os
|
4 |
import warnings
|
5 |
import datetime
|
|
|
16 |
warnings.filterwarnings("ignore")
|
17 |
|
18 |
|
19 |
+
@st.cache_data
|
20 |
def ee_authenticate(token_name="EARTHENGINE_TOKEN"):
|
21 |
geemap.ee_initialize(token_name=token_name)
|
22 |
|
|
|
206 |
}
|
207 |
|
208 |
|
209 |
+
@st.cache_data
|
210 |
def uploaded_file_to_gdf(data):
|
211 |
import tempfile
|
212 |
import os
|
|
|
236 |
|
237 |
st.markdown(
|
238 |
"""
|
239 |
+
An interactive web app for creating [Landsat](https://developers.google.com/earth-engine/datasets/catalog/landsat)/[GOES](https://jstnbraaten.medium.com/goes-in-earth-engine-53fbc8783c16) timelapse for any location around the globe.
|
240 |
+
The app was built using [streamlit](https://streamlit.io), [geemap](https://geemap.org), and [Google Earth Engine](https://earthengine.google.com). For more info, check out my streamlit [blog post](https://blog.streamlit.io/creating-satellite-timelapse-with-streamlit-and-earth-engine).
|
241 |
"""
|
242 |
)
|
243 |
|
|
|
377 |
st.write(
|
378 |
cm.plot_colormap(cmap=palette_options, return_fig=True)
|
379 |
)
|
380 |
+
st.session_state["palette"] = json.loads(
|
381 |
+
palette.replace("'", '"')
|
382 |
+
)
|
383 |
|
384 |
if bands:
|
385 |
vis_params = st.text_area(
|
|
|
394 |
"{}",
|
395 |
)
|
396 |
try:
|
397 |
+
st.session_state["vis_params"] = json.loads(
|
398 |
+
vis_params.replace("'", '"')
|
399 |
+
)
|
400 |
st.session_state["vis_params"]["palette"] = st.session_state[
|
401 |
"palette"
|
402 |
]
|
|
|
417 |
MODIS_options = ["Daytime (1:30 pm)", "Nighttime (1:30 am)"]
|
418 |
MODIS_option = st.selectbox("Select a MODIS dataset:", MODIS_options)
|
419 |
if MODIS_option == "Daytime (1:30 pm)":
|
420 |
+
st.session_state["ee_asset_id"] = (
|
421 |
+
"projects/sat-io/open-datasets/gap-filled-lst/gf_day_1km"
|
422 |
+
)
|
423 |
else:
|
424 |
+
st.session_state["ee_asset_id"] = (
|
425 |
+
"projects/sat-io/open-datasets/gap-filled-lst/gf_night_1km"
|
426 |
+
)
|
427 |
|
428 |
palette_options = st.selectbox(
|
429 |
"Color palette",
|
|
|
436 |
palette_values,
|
437 |
)
|
438 |
st.write(cm.plot_colormap(cmap=palette_options, return_fig=True))
|
439 |
+
st.session_state["palette"] = json.loads(palette.replace("'", '"'))
|
440 |
elif collection == "MODIS Ocean Color SMI":
|
441 |
with st.expander("Show dataset details", False):
|
442 |
st.markdown(
|
|
|
494 |
palette_values,
|
495 |
)
|
496 |
st.write(cm.plot_colormap(cmap=palette_options, return_fig=True))
|
497 |
+
st.session_state["palette"] = json.loads(palette.replace("'", '"'))
|
498 |
|
499 |
sample_roi = st.selectbox(
|
500 |
"Select a sample ROI or upload a GeoJSON file:",
|
|
|
1346 |
if vis_params.startswith("{") and vis_params.endswith(
|
1347 |
"}"
|
1348 |
):
|
1349 |
+
vis_params = json.loads(
|
1350 |
+
vis_params.replace("'", '"')
|
1351 |
+
)
|
1352 |
else:
|
1353 |
vis_params = None
|
1354 |
out_gif = geemap.modis_ocean_color_timelapse(
|
pages/2_π _U.S._Housing.py
CHANGED
@@ -95,7 +95,7 @@ def get_data_columns(df, category, frequency="monthly"):
|
|
95 |
return cols[1:]
|
96 |
|
97 |
|
98 |
-
@st.
|
99 |
def get_inventory_data(url):
|
100 |
df = pd.read_csv(url)
|
101 |
url = url.lower()
|
@@ -139,7 +139,7 @@ def get_periods(df):
|
|
139 |
return [str(d) for d in list(set(df["month_date_yyyymm"].tolist()))]
|
140 |
|
141 |
|
142 |
-
@st.
|
143 |
def get_geom_data(category):
|
144 |
|
145 |
prefix = (
|
@@ -224,7 +224,7 @@ def app():
|
|
224 |
st.title("U.S. Real Estate Data and Market Trends")
|
225 |
st.markdown(
|
226 |
"""**Introduction:** This interactive dashboard is designed for visualizing U.S. real estate data and market trends at multiple levels (i.e., national,
|
227 |
-
state, county, and metro). The data sources include [Real Estate Data](https://www.realtor.com/research/data) from realtor.com and
|
228 |
[Cartographic Boundary Files](https://www.census.gov/geographies/mapping-files/time-series/geo/carto-boundary-file.html) from U.S. Census Bureau.
|
229 |
Several open-source packages are used to process the data and generate the visualizations, e.g., [streamlit](https://streamlit.io),
|
230 |
[geopandas](https://geopandas.org), [leafmap](https://leafmap.org), and [pydeck](https://deckgl.readthedocs.io).
|
|
|
95 |
return cols[1:]
|
96 |
|
97 |
|
98 |
+
@st.cache_data
|
99 |
def get_inventory_data(url):
|
100 |
df = pd.read_csv(url)
|
101 |
url = url.lower()
|
|
|
139 |
return [str(d) for d in list(set(df["month_date_yyyymm"].tolist()))]
|
140 |
|
141 |
|
142 |
+
@st.cache_data
|
143 |
def get_geom_data(category):
|
144 |
|
145 |
prefix = (
|
|
|
224 |
st.title("U.S. Real Estate Data and Market Trends")
|
225 |
st.markdown(
|
226 |
"""**Introduction:** This interactive dashboard is designed for visualizing U.S. real estate data and market trends at multiple levels (i.e., national,
|
227 |
+
state, county, and metro). The data sources include [Real Estate Data](https://www.realtor.com/research/data) from realtor.com and
|
228 |
[Cartographic Boundary Files](https://www.census.gov/geographies/mapping-files/time-series/geo/carto-boundary-file.html) from U.S. Census Bureau.
|
229 |
Several open-source packages are used to process the data and generate the visualizations, e.g., [streamlit](https://streamlit.io),
|
230 |
[geopandas](https://geopandas.org), [leafmap](https://leafmap.org), and [pydeck](https://deckgl.readthedocs.io).
|
pages/3_πͺ_Split_Map.py
CHANGED
@@ -23,8 +23,8 @@ with st.expander("See source code"):
|
|
23 |
with st.echo():
|
24 |
m = leafmap.Map()
|
25 |
m.split_map(
|
26 |
-
left_layer=
|
27 |
)
|
28 |
-
m.add_legend(title=
|
29 |
|
30 |
m.to_streamlit(height=700)
|
|
|
23 |
with st.echo():
|
24 |
m = leafmap.Map()
|
25 |
m.split_map(
|
26 |
+
left_layer="ESA WorldCover 2020 S2 FCC", right_layer="ESA WorldCover 2020"
|
27 |
)
|
28 |
+
m.add_legend(title="ESA Land Cover", builtin_legend="ESA_WorldCover")
|
29 |
|
30 |
m.to_streamlit(height=700)
|
pages/4_π₯_Heatmap.py
CHANGED
@@ -22,7 +22,7 @@ st.title("Heatmap")
|
|
22 |
with st.expander("See source code"):
|
23 |
with st.echo():
|
24 |
filepath = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_cities.csv"
|
25 |
-
m = leafmap.Map(center=[40, -100], zoom=4
|
26 |
m.add_heatmap(
|
27 |
filepath,
|
28 |
latitude="latitude",
|
|
|
22 |
with st.expander("See source code"):
|
23 |
with st.echo():
|
24 |
filepath = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_cities.csv"
|
25 |
+
m = leafmap.Map(center=[40, -100], zoom=4)
|
26 |
m.add_heatmap(
|
27 |
filepath,
|
28 |
latitude="latitude",
|
pages/5_π_Marker_Cluster.py
CHANGED
@@ -23,16 +23,16 @@ with st.expander("See source code"):
|
|
23 |
with st.echo():
|
24 |
|
25 |
m = leafmap.Map(center=[40, -100], zoom=4)
|
26 |
-
cities =
|
27 |
-
regions =
|
28 |
|
29 |
-
m.add_geojson(regions, layer_name=
|
30 |
m.add_points_from_xy(
|
31 |
cities,
|
32 |
x="longitude",
|
33 |
y="latitude",
|
34 |
-
color_column=
|
35 |
-
icon_names=[
|
36 |
spin=True,
|
37 |
add_legend=True,
|
38 |
)
|
|
|
23 |
with st.echo():
|
24 |
|
25 |
m = leafmap.Map(center=[40, -100], zoom=4)
|
26 |
+
cities = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_cities.csv"
|
27 |
+
regions = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_regions.geojson"
|
28 |
|
29 |
+
m.add_geojson(regions, layer_name="US Regions")
|
30 |
m.add_points_from_xy(
|
31 |
cities,
|
32 |
x="longitude",
|
33 |
y="latitude",
|
34 |
+
color_column="region",
|
35 |
+
icon_names=["gear", "map", "leaf", "globe"],
|
36 |
spin=True,
|
37 |
add_legend=True,
|
38 |
)
|
pages/6_πΊοΈ_Basemaps.py
CHANGED
@@ -22,7 +22,7 @@ def app():
|
|
22 |
st.title("Search Basemaps")
|
23 |
st.markdown(
|
24 |
"""
|
25 |
-
This app is a demonstration of searching and loading basemaps from [xyzservices](https://github.com/geopandas/xyzservices) and [Quick Map Services (QMS)](https://github.com/nextgis/quickmapservices). Selecting from 1000+ basemaps with a few clicks.
|
26 |
"""
|
27 |
)
|
28 |
|
@@ -47,8 +47,7 @@ def app():
|
|
47 |
if qms is not None:
|
48 |
options = options + qms
|
49 |
|
50 |
-
tiles = empty.multiselect(
|
51 |
-
"Select XYZ tiles to add to the map:", options)
|
52 |
|
53 |
with row1_col1:
|
54 |
m = leafmap.Map()
|
|
|
22 |
st.title("Search Basemaps")
|
23 |
st.markdown(
|
24 |
"""
|
25 |
+
This app is a demonstration of searching and loading basemaps from [xyzservices](https://github.com/geopandas/xyzservices) and [Quick Map Services (QMS)](https://github.com/nextgis/quickmapservices). Selecting from 1000+ basemaps with a few clicks.
|
26 |
"""
|
27 |
)
|
28 |
|
|
|
47 |
if qms is not None:
|
48 |
options = options + qms
|
49 |
|
50 |
+
tiles = empty.multiselect("Select XYZ tiles to add to the map:", options)
|
|
|
51 |
|
52 |
with row1_col1:
|
53 |
m = leafmap.Map()
|
pages/7_π¦_Web_Map_Service.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import ast
|
|
|
2 |
import streamlit as st
|
3 |
import leafmap.foliumap as leafmap
|
4 |
|
@@ -18,19 +19,29 @@ st.sidebar.info(
|
|
18 |
"""
|
19 |
)
|
20 |
|
|
|
|
|
|
|
|
|
|
|
21 |
|
22 |
-
|
|
|
23 |
def get_layers(url):
|
24 |
options = leafmap.get_wms_layers(url)
|
25 |
return options
|
26 |
|
27 |
|
|
|
|
|
|
|
|
|
28 |
def app():
|
29 |
st.title("Web Map Service (WMS)")
|
30 |
st.markdown(
|
31 |
"""
|
32 |
-
This app is a demonstration of loading Web Map Service (WMS) layers. Simply enter the URL of the WMS service
|
33 |
-
in the text box below and press Enter to retrieve the layers. Go to https://apps.nationalmap.gov/services to find
|
34 |
some WMS URLs if needed.
|
35 |
"""
|
36 |
)
|
@@ -49,7 +60,14 @@ def app():
|
|
49 |
empty = st.empty()
|
50 |
|
51 |
if url:
|
52 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
|
54 |
default = None
|
55 |
if url == esa_landcover:
|
@@ -78,7 +96,7 @@ def app():
|
|
78 |
url, layers=layer, name=layer, attribution=" ", transparent=True
|
79 |
)
|
80 |
if add_legend and legend_text:
|
81 |
-
legend_dict =
|
82 |
m.add_legend(legend_dict=legend_dict)
|
83 |
|
84 |
m.to_streamlit(height=height)
|
|
|
1 |
import ast
|
2 |
+
import json
|
3 |
import streamlit as st
|
4 |
import leafmap.foliumap as leafmap
|
5 |
|
|
|
19 |
"""
|
20 |
)
|
21 |
|
22 |
+
# Define a whitelist of trusted URLs
|
23 |
+
trusted_urls = [
|
24 |
+
"https://services.terrascope.be/wms/v2",
|
25 |
+
# Add more trusted URLs here
|
26 |
+
]
|
27 |
|
28 |
+
|
29 |
+
@st.cache_data
|
30 |
def get_layers(url):
|
31 |
options = leafmap.get_wms_layers(url)
|
32 |
return options
|
33 |
|
34 |
|
35 |
+
def is_trusted_url(url):
|
36 |
+
return url in trusted_urls
|
37 |
+
|
38 |
+
|
39 |
def app():
|
40 |
st.title("Web Map Service (WMS)")
|
41 |
st.markdown(
|
42 |
"""
|
43 |
+
This app is a demonstration of loading Web Map Service (WMS) layers. Simply enter the URL of the WMS service
|
44 |
+
in the text box below and press Enter to retrieve the layers. Go to https://apps.nationalmap.gov/services to find
|
45 |
some WMS URLs if needed.
|
46 |
"""
|
47 |
)
|
|
|
60 |
empty = st.empty()
|
61 |
|
62 |
if url:
|
63 |
+
|
64 |
+
if is_trusted_url(url):
|
65 |
+
options = get_layers(url)
|
66 |
+
# Process options as needed
|
67 |
+
else:
|
68 |
+
st.error(
|
69 |
+
"The entered URL is not trusted. Please enter a valid WMS URL."
|
70 |
+
)
|
71 |
|
72 |
default = None
|
73 |
if url == esa_landcover:
|
|
|
96 |
url, layers=layer, name=layer, attribution=" ", transparent=True
|
97 |
)
|
98 |
if add_legend and legend_text:
|
99 |
+
legend_dict = json.loads(legend_text.replace("'", '"'))
|
100 |
m.add_legend(legend_dict=legend_dict)
|
101 |
|
102 |
m.to_streamlit(height=height)
|
pages/8_ποΈ_Raster_Data_Visualization.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import os
|
2 |
import leafmap.foliumap as leafmap
|
3 |
import leafmap.colormaps as cm
|
@@ -20,7 +21,7 @@ st.sidebar.info(
|
|
20 |
)
|
21 |
|
22 |
|
23 |
-
@st.
|
24 |
def load_cog_list():
|
25 |
print(os.getcwd())
|
26 |
in_txt = os.path.join(os.getcwd(), "data/cog_files.txt")
|
@@ -28,7 +29,7 @@ def load_cog_list():
|
|
28 |
return [line.strip() for line in f.readlines()[1:]]
|
29 |
|
30 |
|
31 |
-
@st.
|
32 |
def get_palettes():
|
33 |
return list(cm.palettes.keys())
|
34 |
# palettes = dir(palettable.matplotlib)[:-16]
|
@@ -44,6 +45,14 @@ An interactive web app for visualizing local raster datasets and Cloud Optimized
|
|
44 |
"""
|
45 |
)
|
46 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
row1_col1, row1_col2 = st.columns([2, 1])
|
48 |
|
49 |
with row1_col1:
|
@@ -58,7 +67,7 @@ with row1_col2:
|
|
58 |
cog,
|
59 |
)
|
60 |
|
61 |
-
if url:
|
62 |
try:
|
63 |
options = leafmap.cog_bands(url)
|
64 |
except Exception as e:
|
@@ -73,6 +82,8 @@ with row1_col2:
|
|
73 |
pass
|
74 |
else:
|
75 |
st.error("Please select one or three bands")
|
|
|
|
|
76 |
|
77 |
add_params = st.checkbox("Add visualization parameters")
|
78 |
if add_params:
|
@@ -82,7 +93,7 @@ with row1_col2:
|
|
82 |
|
83 |
if len(vis_params) > 0:
|
84 |
try:
|
85 |
-
vis_params =
|
86 |
except Exception as e:
|
87 |
st.error(
|
88 |
f"Invalid visualization parameters. It should be a dictionary. Error: {e}"
|
|
|
1 |
+
import json
|
2 |
import os
|
3 |
import leafmap.foliumap as leafmap
|
4 |
import leafmap.colormaps as cm
|
|
|
21 |
)
|
22 |
|
23 |
|
24 |
+
@st.cache_data
|
25 |
def load_cog_list():
|
26 |
print(os.getcwd())
|
27 |
in_txt = os.path.join(os.getcwd(), "data/cog_files.txt")
|
|
|
29 |
return [line.strip() for line in f.readlines()[1:]]
|
30 |
|
31 |
|
32 |
+
@st.cache_data
|
33 |
def get_palettes():
|
34 |
return list(cm.palettes.keys())
|
35 |
# palettes = dir(palettable.matplotlib)[:-16]
|
|
|
45 |
"""
|
46 |
)
|
47 |
|
48 |
+
|
49 |
+
def is_trusted_url(url):
|
50 |
+
if url.startswith("https://opendata.digitalglobe.com/events/california-fire-2020/"):
|
51 |
+
return True
|
52 |
+
else:
|
53 |
+
return False
|
54 |
+
|
55 |
+
|
56 |
row1_col1, row1_col2 = st.columns([2, 1])
|
57 |
|
58 |
with row1_col1:
|
|
|
67 |
cog,
|
68 |
)
|
69 |
|
70 |
+
if is_trusted_url(url):
|
71 |
try:
|
72 |
options = leafmap.cog_bands(url)
|
73 |
except Exception as e:
|
|
|
82 |
pass
|
83 |
else:
|
84 |
st.error("Please select one or three bands")
|
85 |
+
else:
|
86 |
+
st.error("Please enter a trusted URL")
|
87 |
|
88 |
add_params = st.checkbox("Add visualization parameters")
|
89 |
if add_params:
|
|
|
93 |
|
94 |
if len(vis_params) > 0:
|
95 |
try:
|
96 |
+
vis_params = json.loads(vis_params.replace("'", '"'))
|
97 |
except Exception as e:
|
98 |
st.error(
|
99 |
f"Invalid visualization parameters. It should be a dictionary. Error: {e}"
|
pages/9_π²_Vector_Data_Visualization.py
CHANGED
@@ -20,6 +20,17 @@ st.sidebar.info(
|
|
20 |
)
|
21 |
|
22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
def save_uploaded_file(file_content, file_name):
|
24 |
"""
|
25 |
Save the uploaded file to a temporary directory
|
@@ -70,7 +81,7 @@ def app():
|
|
70 |
|
71 |
container = st.container()
|
72 |
|
73 |
-
if data or url:
|
74 |
if data:
|
75 |
file_path = save_uploaded_file(data, data.name)
|
76 |
layer_name = os.path.splitext(data.name)[0]
|
|
|
20 |
)
|
21 |
|
22 |
|
23 |
+
# Define a whitelist of trusted URLs
|
24 |
+
trusted_urls = [
|
25 |
+
"https://github.com/giswqs/streamlit-geospatial/raw/master/data/us_states.geojson",
|
26 |
+
# Add more trusted URLs here
|
27 |
+
]
|
28 |
+
|
29 |
+
|
30 |
+
def is_trusted_url(url):
|
31 |
+
return url in trusted_urls
|
32 |
+
|
33 |
+
|
34 |
def save_uploaded_file(file_content, file_name):
|
35 |
"""
|
36 |
Save the uploaded file to a temporary directory
|
|
|
81 |
|
82 |
container = st.container()
|
83 |
|
84 |
+
if data or is_trusted_url(url):
|
85 |
if data:
|
86 |
file_path = save_uploaded_file(data, data.name)
|
87 |
layer_name = os.path.splitext(data.name)[0]
|
postBuild
DELETED
@@ -1,6 +0,0 @@
|
|
1 |
-
# enable nbserverproxy
|
2 |
-
jupyter serverextension enable --sys-prefix nbserverproxy
|
3 |
-
# streamlit launches at startup
|
4 |
-
mv streamlit_call.py ${NB_PYTHON_PREFIX}/lib/python*/site-packages/
|
5 |
-
# enable streamlit extension
|
6 |
-
jupyter serverextension enable --sys-prefix streamlit_call
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requirements.txt
CHANGED
@@ -1,24 +1,25 @@
|
|
1 |
--find-links=https://girder.github.io/large_image_wheels GDAL
|
2 |
# cartopy
|
3 |
-
|
|
|
4 |
# ipywidgets<8.0.5
|
5 |
-
geemap
|
6 |
ffmpeg-python
|
7 |
geopandas
|
8 |
# jupyter-server-proxy
|
9 |
# keplergl
|
10 |
-
leafmap>=0.
|
11 |
# localtileserver
|
12 |
# nbserverproxy
|
13 |
owslib
|
14 |
palettable
|
15 |
plotly
|
16 |
streamlit
|
17 |
-
streamlit-bokeh-events
|
18 |
streamlit-folium
|
19 |
# streamlit-keplergl
|
20 |
# tropycal
|
21 |
# git+https://github.com/giswqs/leafmap
|
22 |
-
|
23 |
-
altair<5
|
24 |
|
|
|
1 |
--find-links=https://girder.github.io/large_image_wheels GDAL
|
2 |
# cartopy
|
3 |
+
fiona
|
4 |
+
folium #==0.13.0
|
5 |
# ipywidgets<8.0.5
|
6 |
+
# geemap
|
7 |
ffmpeg-python
|
8 |
geopandas
|
9 |
# jupyter-server-proxy
|
10 |
# keplergl
|
11 |
+
leafmap>=0.35.2
|
12 |
# localtileserver
|
13 |
# nbserverproxy
|
14 |
owslib
|
15 |
palettable
|
16 |
plotly
|
17 |
streamlit
|
18 |
+
# streamlit-bokeh-events
|
19 |
streamlit-folium
|
20 |
# streamlit-keplergl
|
21 |
# tropycal
|
22 |
# git+https://github.com/giswqs/leafmap
|
23 |
+
git+https://github.com/giswqs/geemap
|
24 |
+
# altair<5
|
25 |
|
setup.sh
DELETED
@@ -1,18 +0,0 @@
|
|
1 |
-
# sudo add-apt-repository ppa:ubuntugis/ppa && sudo apt-get update
|
2 |
-
# sudo apt-get update
|
3 |
-
# sudo apt-get install python3-dev
|
4 |
-
# sudo apt-get install gdal-bin
|
5 |
-
# sudo apt-get install libgdal-dev
|
6 |
-
# export CPLUS_INCLUDE_PATH=/usr/include/gdal
|
7 |
-
# export C_INCLUDE_PATH=/usr/include/gdal
|
8 |
-
# gdal-config --version
|
9 |
-
# pip install GDAL==$(gdal-config --version | awk -F'[.]' '{print $1"."$2}') localtileserver
|
10 |
-
|
11 |
-
mkdir -p ~/.streamlit/
|
12 |
-
echo "\
|
13 |
-
[server]\n\
|
14 |
-
headless = true\n\
|
15 |
-
port = $PORT\n\
|
16 |
-
enableCORS = false\n\
|
17 |
-
\n\
|
18 |
-
" > ~/.streamlit/config.toml
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
streamlit_app.py
DELETED
@@ -1,43 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import leafmap.foliumap as leafmap
|
3 |
-
|
4 |
-
st.set_page_config(layout="wide")
|
5 |
-
|
6 |
-
st.sidebar.info(
|
7 |
-
"""
|
8 |
-
- Web App URL: <https://streamlit.gishub.org>
|
9 |
-
- GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
|
10 |
-
"""
|
11 |
-
)
|
12 |
-
|
13 |
-
st.sidebar.title("Contact")
|
14 |
-
st.sidebar.info(
|
15 |
-
"""
|
16 |
-
Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/c/QiushengWu) | [LinkedIn](https://www.linkedin.com/in/qiushengwu)
|
17 |
-
"""
|
18 |
-
)
|
19 |
-
|
20 |
-
# Customize page title
|
21 |
-
st.title("Streamlit for Geospatial Applications")
|
22 |
-
|
23 |
-
st.markdown(
|
24 |
-
"""
|
25 |
-
This multipage app template demonstrates various interactive web apps created using [streamlit](https://streamlit.io) and [leafmap](https://leafmap.org). It is an open-source project and you are very welcome to contribute to the [GitHub repository](https://github.com/giswqs/streamlit-multipage-template).
|
26 |
-
"""
|
27 |
-
)
|
28 |
-
|
29 |
-
st.header("Instructions")
|
30 |
-
|
31 |
-
markdown = """
|
32 |
-
1. For the [GitHub repository](https://github.com/giswqs/streamlit-multipage-template) or [use it as a template](https://github.com/giswqs/streamlit-multipage-template/generate) for your own project.
|
33 |
-
2. Customize the sidebar by changing the sidebar text and logo in each Python files.
|
34 |
-
3. Find your favorite emoji from https://emojipedia.org.
|
35 |
-
4. Add a new app to the `pages/` directory with an emoji in the file name, e.g., `1_π_Chart.py`.
|
36 |
-
|
37 |
-
"""
|
38 |
-
|
39 |
-
st.markdown(markdown)
|
40 |
-
|
41 |
-
m = leafmap.Map(minimap_control=True)
|
42 |
-
m.add_basemap("OpenTopoMap")
|
43 |
-
m.to_streamlit(height=500)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
streamlit_call.py
DELETED
@@ -1,14 +0,0 @@
|
|
1 |
-
from subprocess import Popen
|
2 |
-
|
3 |
-
|
4 |
-
def load_jupyter_server_extension(nbapp):
|
5 |
-
"""serve the streamlit app"""
|
6 |
-
Popen(
|
7 |
-
[
|
8 |
-
"streamlit",
|
9 |
-
"run",
|
10 |
-
"Home.py",
|
11 |
-
"--browser.serverAddress=0.0.0.0",
|
12 |
-
"--server.enableCORS=False",
|
13 |
-
]
|
14 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|