# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # + import leafmap.foliumap as leafmap import rioxarray import geopandas as gpd import streamlit as st from streamlit_folium import st_folium import json import pystac_client import planetary_computer import odc.stac from datetime import datetime, timedelta # - def extract_geom(gdf, cog): x = (rioxarray. open_rasterio('/vsicurl/'+cog, masked=True). rio.clip(gdf.geometry.values, gdf.crs, from_disk=True) ) return x def read_polygon(polygon): geojson_str = json.dumps(polygon) gdf = gpd.read_file(geojson_str, driver='GeoJSON') gdf.set_crs('epsg:4326') return gdf def area_hectares(gdf): area = gdf.to_crs("epsg:9822").area / 10000. return area def stac_search(box, datetime): # STAC Search for this imagery in space/time window items = ( pystac_client.Client. open("https://planetarycomputer.microsoft.com/api/stac/v1", modifier=planetary_computer.sign_inplace). search(collections=["sentinel-2-l2a"], bbox=box, datetime=datetime, limit=10, query={"eo:cloud_cover": {"lt": 20}}). item_collection()) return items def compute_nbs(items, box, zoom=6): # Time to compute: # client = dask.distributed.Client() # landsat_bands = ["nir08", "swir16"] sentinel_bands = ["B08", "B12", "SCL"] # NIR, SWIR, and Cloud Mask # The magic of gdalwarper. Can also resample, reproject, and aggregate on the fly data = odc.stac.load(items, bands=sentinel_bands, bbox=box, # resolution = 10 * 2**zoom, # chunks={}, # <-- use Dask ) times = [0,data.time.size-1] data = data.isel(time=times) # Compute the Normalized Burn Ratio, must be float swir = data["B12"].astype("float") nir = data["B08"].astype("float") # can resample and aggregate in xarray. compute with dask nbs = ( ((nir - swir) / (nir + swir)). #resample(time="MS"). #median("time", keep_attrs=True). compute() ) return nbs @st.cache def usgs_gpd(): return gpd.read_parquet("/vsicurl/https://data.source.coop/cboettig/fire/usgs.parquet") def nbs(box, date): # here we go! before_date = date - timedelta(days=7) after_date = date + timedelta(days=7) search_dates = before_date.strftime("%Y-%m-%d") + "/" + after_date.strftime("%Y-%m-%d") items = stac_search(box, search_dates) nbs = compute_nbs(items, box) # write first and last date to tif # nbs.isel(time=0).rio.to_raster(raster_path="before.tif", driver="COG") # nbs.isel(time=(nbs.time.size-1)).rio.to_raster(raster_path="after.tif", driver="COG") # + def compute_map(items, box): nbs = compute_nbs(items, box) # doesn't work without serializing data.... # m2.add_raster(nbs.isel(time=0), colormap="terrain", layer_name="NBR") # empty? # write first and last date to tif nbs.isel(time=0).rio.to_raster(raster_path="static/before.tif", driver="COG") nbs.isel(time=(nbs.time.size-1)).rio.to_raster(raster_path="static/after.tif", driver="COG") m2 = leafmap.Map(center=[35, -110], zoom=6) m2.split_map("static/before.tif", "static/after.tif", left_label = "NBR before fire", right_label = "NBR after fire") return m2 def use_pyplot(items, box): nbs = compute_nbs(items, box) ## Show map with pyplot import matplotlib as plt cmap = plt.colormaps.get_cmap('viridis') # viridis is the default colormap for imshow cmap.set_bad(color='black') im = nbs.plot.imshow(row="time", cmap=cmap, add_colorbar=False, size=4) st.pyplot(im.fig, use_container_width=False) # + import copy code_ex = "" # Fire Polygons, USGS usgs = "https://data.source.coop/cboettig/fire/usgs.pmtiles" # calfire = usgs_style = { "version": 8, "sources": { "source1": { "type": "vector", "url": "pmtiles://" + usgs, "attribution": "USGS"}}, "layers": [{ "id": "usgs", "source": "source1", "source-layer": "usgs", "type": "fill", "paint": {"fill-color": "#FFA500", "fill-opacity": 0.4}}]} # hack around pmtiles - draw tool issue empty = copy.deepcopy(usgs_style) empty["layers"][0]["paint"]["fill-opacity"] = 0 # + st.set_page_config(layout="wide", page_title="Leafmap Explorer", page_icon="🔥") st.title("🌲🔥🌲🌲 Wildfire Explorer (DRAFT)") DESCRIPTION=''' The tool displays wildfire polygons, but can also calculate Normalized Burn Ratios over a desired region using Sentinel2 data. Pan and zoom to the desired location on the map. Mouse over past fires see fire ID, name, and date. You can set a date in the left menu. This can be the date of a focal fire or any other date you wish to investigate in the Sentinel data catalog. Then, use the map tool (square) to draw a bounding box. Use esc key to exit drawing mode and double click on the box to begin calculations. Scroll down to see the results. ''' st.markdown(DESCRIPTION) m = leafmap.Map(center=[35, -110], zoom=6) # + ## Map controls sidebar with st.sidebar: date = st.date_input("select date", value = datetime.strptime("2021-06-17", "%Y-%m-%d"), min_value = datetime.strptime("2015-06-27", "%Y-%m-%d") ) # sentinel2 records begin with st.expander("🔥 Wildfire"): # Fire Polygons, USGS fire_layer = st.toggle("USGS Fire boundaries", True) if fire_layer: m.add_pmtiles(usgs, name="Fire", style=usgs_style, overlay=True, show=True, zoom_to_layer=False, tooltip=False) m.add_pmtiles(usgs, name="tooltip", style=empty, overlay=True, show=True, zoom_to_layer=False, tooltip=True) #m.add_raster("after.tif", colormap="terrain", layer_name="NBR") #m.set_center(lon=center["lng"], lat = center["lat"], zoom=zoom) st_data = m.to_streamlit(height=500, bidirectional=True) #st.divider() # x = m.st_last_draw(st_data) # bounds = m.st_map_bounds(st_data) polygon = st_data["last_active_drawing"] if polygon is not None: st.markdown(f"Date selected: {date}. Searching Sentinel2 Catalog...") gdf = read_polygon(polygon) box = gdf.total_bounds before_date = date - timedelta(days=14) after_date = date + timedelta(days=30) search_dates = before_date.strftime("%Y-%m-%d") + "/" + after_date.strftime("%Y-%m-%d") items = stac_search(box, search_dates) out = items.to_dict() #out n = len(out["features"]) st.markdown(f"{n} images found. Computing NBR...") m2 = compute_map(items, box) m2.to_streamlit()