Skip to content
FL-disaster-vulnerabilities
For now this is a relatively basic geospatial map for some natural disaster data. Feel free to borrow code or expand upon.
The cloud version of this notebook can be found at: https://www.datacamp.com/datalab/w/a8b8f744-32e9-4226-9680-73a301221667/edit
import folium
from folium import FeatureGroup, LayerControl
from folium.plugins import HeatMap, MiniMap
from branca.colormap import LinearColormap
from folium.features import GeoJsonTooltip
import pandas as pd
import geopandas as gpd
# google sheet by David E
document_id = '1EIHK3lGBfIWVhOKcW5QJjjHdhrMeiitwzt3Oh6kugJ4'
tab_name = 'data_1-FL'
url = f'https://docs.google.com/spreadsheets/d/{document_id}/gviz/tq?tqx=out:csv&sheet={tab_name}'
df = pd.read_csv(url)
df.head()
# geojson from arcgis https://www.arcgis.com/home/item.html?id=3c164274a80748dda926a046525da610
# url = 'https://services.arcgis.com/P3ePLMYs2RVChkJx/arcgis/rest/services/USA_Counties_Generalized_Boundaries/FeatureServer/0/query?outFields=*&where=1%3D1&f=geojson' # exceededTransferLimit true, refining query
url = 'https://services.arcgis.com/P3ePLMYs2RVChkJx/arcgis/rest/services/USA_Counties_Generalized_Boundaries/FeatureServer/0/query?outFields=*&where=STATE_ABBR%3D%27FL%27&f=geojson'
# Read the GeoJSON data into a GeoDataFrame
counties = gpd.read_file(url)
# Print or inspect the counties data
counties.head()
# Define the features you want to visualize and their corresponding column names
feature_list = [
{
'name': 'Energy Burden',
'column': 'energy_burden_percentile',
'enabled': False
},
{
'name': 'Expected Agricultural Loss due to Natural Hazards Heatmap',
'column': 'expected_agricultural_loss_rate_natural_hazards_risk_index_percentile',
'enabled': False
},
{
'name': 'Expected Building Loss due to Natural Hazards Heatmap',
'column': 'expected_building_loss_rate_natural_hazards_risk_index_percentile',
'enabled': False
},
{
'name': 'Expected Population Loss due to Natural Hazards Heatmap',
'column': 'expected_population_loss_rate_natural_hazards_risk_index_percentile',
'enabled': False
},
{
'name': 'Risk of Fire in 30 years Heatmap',
'column': 'share_of_properties_at_risk_of_fire_in_30_years_percentile',
'enabled': False
},
{
'name': 'Risk of Flood in 30 years Heatmap',
'column': 'share_of_properties_at_risk_of_flood_in_30_years_percentile',
'enabled': False
}
]
# Prepare required columns
required_columns = {'Lat', 'Lon', 'geoid'} | {f['column'] for f in feature_list}
required_columns = [col['column'] if isinstance(col, dict) else col for col in required_columns]
# clean and reduce data
df_clean = df.dropna(subset=required_columns)
df_clean_reduced = df_clean[required_columns]
# df_clean_reduced.head()
# Create base map
m = folium.Map(location=[df_clean_reduced['Lat'].mean(), df_clean_reduced['Lon'].mean()], zoom_start=7)
# Create shared color scale for percentile data
shared_color_scale = LinearColormap(
colors=['green', 'yellow', 'red'],
vmin=0,
vmax=100,
caption="Percentile"
)
# Function to create grid
def create_grid(df, feature_column, grid_size=0.1):
df['lat_bin'] = np.floor(df['Lat'] / grid_size) * grid_size
df['lon_bin'] = np.floor(df['Lon'] / grid_size) * grid_size
grid = df.groupby(['lat_bin', 'lon_bin']).agg({
feature_column: 'mean',
'geoid': lambda x: x.iloc[0] # Take the first geoid in the group
}).reset_index()
return grid
# Function to get county name from geoid
def get_county_name(geoid):
county = counties[counties['FIPS'] == str(geoid)[:5]]
return county['NAME'].iloc[0] if not county.empty else "Unknown County"
# Iterate over features
for feature in feature_list:
# Create grid
grid = create_grid(df_clean_reduced, feature['column'])
# Create feature group
feature_group = folium.FeatureGroup(name=feature['name'], show=feature['enabled'])
# Add grid cells to feature group
for _, row in grid.iterrows():
value = row[feature['column']]
county_name = get_county_name(row['geoid'])
popup_text = f"""
<b>{feature['name']}</b><br>
County: {county_name}<br>
Percentile: {value:.2f}<br>
Geoid: {row['geoid']}
"""
folium.CircleMarker(
location=[row['lat_bin'], row['lon_bin']],
radius=10,
popup=folium.Popup(popup_text, max_width=300),
tooltip=f"{feature['name']} Percentile: {value:.2f}",
color=shared_color_scale(value),
fill=True,
fillColor=shared_color_scale(value),
fillOpacity=0.7,
weight=0
).add_to(feature_group)
feature_group.add_to(m)
# Add county boundaries
folium.GeoJson(
counties,
name='County Boundaries',
style_function=lambda feature: {
'fillColor': 'transparent',
'color': 'black',
'weight': 1,
'fillOpacity': 0.7,
},
tooltip=folium.GeoJsonTooltip(
fields=['NAME'],
aliases=['County:'],
localize=True
)
).add_to(m)
# Add shared color scale legend to map
shared_color_scale.add_to(m)
# Add layer control
folium.LayerControl().add_to(m)
# Add custom JavaScript to toggle legend visibility
legend_toggle_js = """
<script>
document.addEventListener('DOMContentLoaded', function() {
var legendElement = document.querySelector('.leaflet-bottom.leaflet-right');
var layerControl = document.querySelector('.leaflet-control-layers-expanded');
function updateLegendVisibility() {
var checkboxes = layerControl.querySelectorAll('input[type="checkbox"]');
var anyLayerVisible = Array.from(checkboxes).some(cb => cb.checked);
legendElement.style.display = anyLayerVisible ? 'block' : 'none';
}
// Initial check
updateLegendVisibility();
// Add event listeners to checkboxes
layerControl.addEventListener('change', updateLegendVisibility);
});
</script>
"""
# Add the custom JavaScript to the map
m.get_root().html.add_child(folium.Element(legend_toggle_js))
Map Notes:
- Toggle a layer to view
- Datapoints are clickable for additional info.
m
Continued work:
Check out the rest of the work the team contributed. If you'd like to continue exploring this data, this was part of challenge 4:
This “getting started” analysis should help answer the following questions
- Where are communities located that have higher vulnerability to natural disasters?
- Who is represented in those communities?
- What is the housing make-up of those communities?
- Analysis insights and questions: What surprised you from this analysis? What are some limitations of the analysis? What are ways to extend the work?
Next steps I'd suggest: Build vulnerability scores
helpful resources: https://eodatascape.datakind.org/