|
|
|
import subprocess |
|
import sys |
|
|
|
def install_package(package): |
|
try: |
|
__import__(package) |
|
except ImportError: |
|
print(f"Installing {package}...") |
|
subprocess.check_call([sys.executable, "-m", "pip", "install", package]) |
|
|
|
|
|
required_packages = [ |
|
'gradio', 'pandas', 'requests', 'beautifulsoup4', |
|
'plotly', 'folium', 'numpy', 'geopy' |
|
] |
|
|
|
for package in required_packages: |
|
install_package(package) |
|
|
|
|
|
import gradio as gr |
|
import pandas as pd |
|
import requests |
|
from bs4 import BeautifulSoup |
|
import plotly.express as px |
|
import plotly.graph_objects as go |
|
import folium |
|
from folium.plugins import MarkerCluster, HeatMap |
|
import re |
|
import numpy as np |
|
from urllib.parse import urljoin |
|
import time |
|
import json |
|
import os |
|
from geopy.distance import geodesic |
|
from datetime import datetime, timedelta |
|
import warnings |
|
warnings.filterwarnings('ignore') |
|
|
|
|
|
def dms_to_decimal(degrees, minutes, seconds, direction): |
|
decimal = float(degrees) + float(minutes)/60 + float(seconds)/3600 |
|
if direction in ['S', 'W', '-']: |
|
decimal = -decimal |
|
return decimal |
|
|
|
|
|
def parse_dms_coordinates(text): |
|
if not text: |
|
return None, None |
|
|
|
|
|
text = text.replace('**', '').replace('\n', ' ').strip() |
|
|
|
|
|
lat_pattern = r'(\d+)°\s*(\d+)\'\s*(\d+\.?\d*)\'?\'\s*(?:Latitude|[NS])' |
|
lon_pattern = r'(-?\d+)°\s*(\d+)\'\s*(\d+\.?\d*)\'?\'\s*(?:Longitude|[EW])' |
|
|
|
lat_match = re.search(lat_pattern, text) |
|
lon_match = re.search(lon_pattern, text) |
|
|
|
latitude = None |
|
longitude = None |
|
|
|
if lat_match: |
|
lat_deg, lat_min, lat_sec = lat_match.groups() |
|
|
|
lat_dir = 'N' |
|
if 'S' in text: |
|
lat_dir = 'S' |
|
latitude = dms_to_decimal(lat_deg, lat_min, lat_sec, lat_dir) |
|
|
|
if lon_match: |
|
lon_deg, lon_min, lon_sec = lon_match.groups() |
|
|
|
lon_dir = 'E' |
|
if 'W' in text or '-' in lon_deg: |
|
lon_dir = 'W' |
|
longitude = dms_to_decimal(lon_deg.replace('-', ''), lon_min, lon_sec, lon_dir) |
|
|
|
return latitude, longitude |
|
|
|
|
|
def fetch_firms_data(): |
|
""" |
|
Fetch NASA FIRMS VIIRS active fire data for the last 24 hours |
|
Filters for USA only and returns relevant fire hotspot data with cleaned numeric fields |
|
""" |
|
firms_url = "https://firms.modaps.eosdis.nasa.gov/data/active_fire/viirs/csv/J1_VIIRS_C2_Global_24h.csv" |
|
|
|
try: |
|
print("Fetching NASA FIRMS data...") |
|
response = requests.get(firms_url, timeout=60) |
|
response.raise_for_status() |
|
|
|
|
|
from io import StringIO |
|
firms_df = pd.read_csv(StringIO(response.text)) |
|
|
|
print(f"Retrieved {len(firms_df)} global fire hotspots") |
|
|
|
|
|
|
|
usa_firms = firms_df[ |
|
( |
|
|
|
((firms_df['latitude'] >= 24.5) & (firms_df['latitude'] <= 49.0) & |
|
(firms_df['longitude'] >= -125.0) & (firms_df['longitude'] <= -66.0)) | |
|
|
|
((firms_df['latitude'] >= 54.0) & (firms_df['latitude'] <= 72.0) & |
|
(firms_df['longitude'] >= -180.0) & (firms_df['longitude'] <= -130.0)) | |
|
|
|
((firms_df['latitude'] >= 18.0) & (firms_df['latitude'] <= 23.0) & |
|
(firms_df['longitude'] >= -162.0) & (firms_df['longitude'] <= -154.0)) |
|
) |
|
].copy() |
|
|
|
print(f"Filtered to {len(usa_firms)} USA fire hotspots") |
|
|
|
|
|
if 'frp' in usa_firms.columns: |
|
|
|
usa_firms['frp'] = usa_firms['frp'].astype(str).str.replace('nominal', '', regex=False) |
|
usa_firms['frp'] = usa_firms['frp'].str.replace(r'[^\d\.]', '', regex=True) |
|
usa_firms['frp'] = usa_firms['frp'].replace('', '0') |
|
usa_firms['frp'] = pd.to_numeric(usa_firms['frp'], errors='coerce').fillna(0) |
|
print(f"Cleaned FRP column, mean FRP: {usa_firms['frp'].mean():.2f}") |
|
|
|
if 'confidence' in usa_firms.columns: |
|
|
|
usa_firms['confidence'] = usa_firms['confidence'].astype(str).str.replace('nominal', '', regex=False) |
|
usa_firms['confidence'] = usa_firms['confidence'].str.replace(r'[^\d\.]', '', regex=True) |
|
usa_firms['confidence'] = usa_firms['confidence'].replace('', '50') |
|
usa_firms['confidence'] = pd.to_numeric(usa_firms['confidence'], errors='coerce').fillna(50) |
|
print(f"Cleaned confidence column, mean confidence: {usa_firms['confidence'].mean():.2f}") |
|
|
|
|
|
if 'acq_date' in usa_firms.columns and 'acq_time' in usa_firms.columns: |
|
try: |
|
usa_firms['datetime'] = pd.to_datetime( |
|
usa_firms['acq_date'] + ' ' + usa_firms['acq_time'].astype(str).str.zfill(4), |
|
format='%Y-%m-%d %H%M', |
|
errors='coerce' |
|
) |
|
|
|
usa_firms = usa_firms.sort_values('datetime', ascending=False) |
|
print(f"Added datetime column, latest detection: {usa_firms['datetime'].max()}") |
|
except Exception as e: |
|
print(f"Warning: Could not create datetime column: {e}") |
|
|
|
return usa_firms |
|
|
|
except Exception as e: |
|
print(f"Error fetching FIRMS data: {e}") |
|
return pd.DataFrame() |
|
|
|
|
|
def match_firms_to_inciweb(inciweb_df, firms_df, max_distance_km=50): |
|
""" |
|
Match FIRMS hotspots to InciWeb incidents based on geographic proximity |
|
Enhanced with better error handling and data cleaning |
|
""" |
|
if firms_df.empty or inciweb_df.empty: |
|
print("Warning: Empty dataframes passed to matching function") |
|
return inciweb_df |
|
|
|
try: |
|
print(f"Matching {len(firms_df)} FIRMS hotspots to {len(inciweb_df)} InciWeb incidents...") |
|
|
|
|
|
inciweb_df = inciweb_df.copy() |
|
inciweb_df['firms_hotspots'] = 0 |
|
inciweb_df['total_frp'] = 0.0 |
|
inciweb_df['avg_confidence'] = 0.0 |
|
inciweb_df['latest_hotspot'] = None |
|
inciweb_df['is_active'] = False |
|
inciweb_df['hotspot_coords'] = None |
|
inciweb_df['activity_level'] = 'Unknown' |
|
|
|
|
|
incidents_with_coords = inciweb_df[ |
|
(inciweb_df['latitude'].notna()) & (inciweb_df['longitude'].notna()) |
|
].copy() |
|
|
|
print(f"Processing {len(incidents_with_coords)} incidents with coordinates...") |
|
|
|
for idx, incident in incidents_with_coords.iterrows(): |
|
try: |
|
incident_coords = (float(incident['latitude']), float(incident['longitude'])) |
|
|
|
|
|
matched_hotspots = [] |
|
|
|
for _, hotspot in firms_df.iterrows(): |
|
try: |
|
hotspot_lat = float(hotspot['latitude']) |
|
hotspot_lon = float(hotspot['longitude']) |
|
hotspot_coords = (hotspot_lat, hotspot_lon) |
|
|
|
distance = geodesic(incident_coords, hotspot_coords).kilometers |
|
|
|
if distance <= max_distance_km: |
|
|
|
clean_hotspot = { |
|
'latitude': hotspot_lat, |
|
'longitude': hotspot_lon, |
|
'frp': float(hotspot.get('frp', 0)) if pd.notna(hotspot.get('frp')) else 0.0, |
|
'confidence': float(hotspot.get('confidence', 50)) if pd.notna(hotspot.get('confidence')) else 50.0, |
|
'datetime': hotspot.get('datetime', None), |
|
'distance': distance |
|
} |
|
matched_hotspots.append(clean_hotspot) |
|
|
|
except (ValueError, TypeError, KeyError) as e: |
|
|
|
continue |
|
|
|
if matched_hotspots: |
|
|
|
num_hotspots = len(matched_hotspots) |
|
total_frp = sum(hs['frp'] for hs in matched_hotspots) |
|
avg_confidence = sum(hs['confidence'] for hs in matched_hotspots) / num_hotspots if num_hotspots > 0 else 0.0 |
|
|
|
|
|
latest_hotspot = None |
|
hotspot_times = [hs['datetime'] for hs in matched_hotspots if hs['datetime'] is not None] |
|
if hotspot_times: |
|
latest_hotspot = max(hotspot_times) |
|
|
|
|
|
if num_hotspots >= 20 and total_frp >= 100: |
|
activity_level = 'Very High' |
|
elif num_hotspots >= 10 and total_frp >= 50: |
|
activity_level = 'High' |
|
elif num_hotspots >= 5 and total_frp >= 20: |
|
activity_level = 'Medium' |
|
elif num_hotspots >= 1: |
|
activity_level = 'Low' |
|
else: |
|
activity_level = 'Minimal' |
|
|
|
|
|
inciweb_df.at[idx, 'firms_hotspots'] = num_hotspots |
|
inciweb_df.at[idx, 'total_frp'] = total_frp |
|
inciweb_df.at[idx, 'avg_confidence'] = avg_confidence |
|
inciweb_df.at[idx, 'latest_hotspot'] = latest_hotspot |
|
inciweb_df.at[idx, 'is_active'] = True |
|
inciweb_df.at[idx, 'activity_level'] = activity_level |
|
|
|
|
|
hotspot_coords_str = str([(hs['latitude'], hs['longitude'], hs['frp']) |
|
for hs in matched_hotspots[:10]]) |
|
inciweb_df.at[idx, 'hotspot_coords'] = hotspot_coords_str |
|
|
|
print(f" {incident['name']}: {num_hotspots} hotspots, {total_frp:.1f} FRP, {activity_level} activity") |
|
|
|
except Exception as e: |
|
print(f" Error processing incident {incident.get('name', 'Unknown')}: {e}") |
|
continue |
|
|
|
|
|
active_count = (inciweb_df['is_active'] == True).sum() |
|
total_with_coords = len(incidents_with_coords) |
|
|
|
print(f"Found {active_count} active incidents out of {total_with_coords} with coordinates") |
|
|
|
return inciweb_df |
|
|
|
except Exception as e: |
|
print(f"Error in match_firms_to_inciweb: {e}") |
|
|
|
inciweb_df = inciweb_df.copy() |
|
for col in ['firms_hotspots', 'total_frp', 'avg_confidence', 'latest_hotspot', 'is_active', 'hotspot_coords', 'activity_level']: |
|
if col not in inciweb_df.columns: |
|
if col in ['firms_hotspots']: |
|
inciweb_df[col] = 0 |
|
elif col in ['total_frp', 'avg_confidence']: |
|
inciweb_df[col] = 0.0 |
|
elif col in ['is_active']: |
|
inciweb_df[col] = False |
|
elif col in ['activity_level']: |
|
inciweb_df[col] = 'Unknown' |
|
else: |
|
inciweb_df[col] = None |
|
return inciweb_df |
|
|
|
|
|
def fetch_inciweb_data(): |
|
base_url = "https://inciweb.wildfire.gov" |
|
accessible_url = urljoin(base_url, "/accessible-view") |
|
|
|
try: |
|
print(f"Fetching data from: {accessible_url}") |
|
response = requests.get(accessible_url, timeout=30) |
|
response.raise_for_status() |
|
except requests.exceptions.RequestException as e: |
|
print(f"Error fetching data from InciWeb: {e}") |
|
return pd.DataFrame() |
|
|
|
soup = BeautifulSoup(response.content, "html.parser") |
|
|
|
incidents = [] |
|
|
|
|
|
incident_links = soup.find_all("a", href=re.compile(r"/incident-information/")) |
|
|
|
for link in incident_links: |
|
try: |
|
incident = {} |
|
|
|
|
|
incident["name"] = link.text.strip() |
|
incident["link"] = urljoin(base_url, link.get("href")) |
|
incident["id"] = link.get("href").split("/")[-1] |
|
|
|
|
|
row = link.parent |
|
if row and row.name == "td": |
|
row_cells = row.parent.find_all("td") |
|
|
|
|
|
if len(row_cells) >= 5: |
|
incident_type_cell = row_cells[1] if len(row_cells) > 1 else None |
|
if incident_type_cell: |
|
incident["type"] = incident_type_cell.text.strip() |
|
|
|
location_cell = row_cells[2] if len(row_cells) > 2 else None |
|
if location_cell: |
|
incident["location"] = location_cell.text.strip() |
|
state_match = re.search(r'([A-Z]{2})', incident["location"]) |
|
if state_match: |
|
incident["state"] = state_match.group(1) |
|
else: |
|
state_parts = incident["location"].split(',') |
|
if state_parts: |
|
incident["state"] = state_parts[0].strip() |
|
else: |
|
incident["state"] = None |
|
|
|
size_cell = row_cells[3] if len(row_cells) > 3 else None |
|
if size_cell: |
|
size_text = size_cell.text.strip() |
|
|
|
|
|
size_text = size_text.replace('nominal', '').strip() |
|
|
|
|
|
if size_text and size_text != '': |
|
size_match = re.search(r'(\d+(?:,\d+)*)', size_text) |
|
if size_match: |
|
try: |
|
incident["size"] = int(size_match.group(1).replace(',', '')) |
|
except ValueError: |
|
incident["size"] = None |
|
else: |
|
incident["size"] = None |
|
else: |
|
incident["size"] = None |
|
|
|
updated_cell = row_cells[4] if len(row_cells) > 4 else None |
|
if updated_cell: |
|
incident["updated"] = updated_cell.text.strip() |
|
|
|
incidents.append(incident) |
|
except Exception as e: |
|
print(f"Error processing incident: {e}") |
|
continue |
|
|
|
df = pd.DataFrame(incidents) |
|
|
|
|
|
expected_columns = { |
|
"size": None, |
|
"type": "Unknown", |
|
"location": "Unknown", |
|
"state": None, |
|
"updated": "Unknown" |
|
} |
|
|
|
for col, default_val in expected_columns.items(): |
|
if col not in df.columns: |
|
df[col] = default_val |
|
|
|
|
|
if 'size' in df.columns: |
|
|
|
df['size'] = df['size'].astype(str).str.replace('nominal', '', regex=False) |
|
df['size'] = df['size'].str.replace(r'[^\d,]', '', regex=True) |
|
df['size'] = df['size'].replace('', None) |
|
|
|
|
|
df["size"] = pd.to_numeric(df["size"].str.replace(',', '') if df["size"].dtype == 'object' else df["size"], errors="coerce") |
|
|
|
print(f"Fetched {len(df)} incidents") |
|
return df |
|
|
|
|
|
def get_incident_coordinates_basic(incident_url): |
|
"""Enhanced coordinate extraction with proper DMS parsing""" |
|
try: |
|
print(f" Fetching coordinates from: {incident_url}") |
|
response = requests.get(incident_url, timeout=20) |
|
response.raise_for_status() |
|
soup = BeautifulSoup(response.content, "html.parser") |
|
|
|
|
|
for row in soup.find_all('tr'): |
|
th = row.find('th') |
|
if th and 'Coordinates' in th.get_text(strip=True): |
|
coord_cell = row.find('td') |
|
if coord_cell: |
|
coord_content = coord_cell.get_text(strip=True) |
|
print(f" Found coordinate cell content: {coord_content}") |
|
|
|
|
|
lat_deg_match = re.search(r'(\d+)\s*°.*?Latitude', coord_content) |
|
lat_min_match = re.search(r'(\d+)\s*\'.*?Latitude', coord_content) |
|
lat_sec_match = re.search(r'(\d+\.?\d*)\s*\'\'.*?Latitude', coord_content) |
|
|
|
|
|
longitude_part = coord_content[coord_content.find('Latitude'):] if 'Latitude' in coord_content else coord_content |
|
lon_deg_match = re.search(r'[-]?\s*(\d+)\s*°', longitude_part) |
|
lon_min_match = re.search(r'(\d+)\s*\'', longitude_part) |
|
|
|
|
|
lon_sec_div = coord_cell.find('div', class_=lambda c: c and 'margin-right' in c) |
|
if lon_sec_div: |
|
lon_sec_value = lon_sec_div.get_text(strip=True) |
|
lon_sec_match = re.search(r'(\d+\.?\d*)', lon_sec_value) |
|
print(f" Found longitude seconds in div: {lon_sec_value}") |
|
else: |
|
lon_sec_match = re.search(r'(\d+\.?\d*)\s*\'\'', longitude_part) |
|
|
|
print(f" Parsed components - lat_deg: {lat_deg_match.group(1) if lat_deg_match else None}, " |
|
f"lat_min: {lat_min_match.group(1) if lat_min_match else None}, " |
|
f"lat_sec: {lat_sec_match.group(1) if lat_sec_match else None}") |
|
print(f" lon_deg: {lon_deg_match.group(1) if lon_deg_match else None}, " |
|
f"lon_min: {lon_min_match.group(1) if lon_min_match else None}, " |
|
f"lon_sec: {lon_sec_match.group(1) if lon_sec_match else None}") |
|
|
|
|
|
if lat_deg_match and lat_min_match and lat_sec_match and lon_deg_match and lon_min_match and lon_sec_match: |
|
lat_deg = float(lat_deg_match.group(1)) |
|
lat_min = float(lat_min_match.group(1)) |
|
lat_sec = float(lat_sec_match.group(1)) |
|
|
|
lon_deg = float(lon_deg_match.group(1)) |
|
lon_min = float(lon_min_match.group(1)) |
|
lon_sec = float(lon_sec_match.group(1)) |
|
|
|
|
|
latitude = lat_deg + lat_min/60 + lat_sec/3600 |
|
longitude = -(lon_deg + lon_min/60 + lon_sec/3600) |
|
|
|
print(f" Converted DMS to decimal: {latitude}, {longitude}") |
|
return latitude, longitude |
|
|
|
|
|
meta_tags = soup.find_all("meta") |
|
for meta in meta_tags: |
|
if meta.get("name") == "geo.position": |
|
coords = meta.get("content", "").split(";") |
|
if len(coords) >= 2: |
|
try: |
|
lat, lon = float(coords[0].strip()), float(coords[1].strip()) |
|
print(f" Found coordinates via meta tags: {lat}, {lon}") |
|
return lat, lon |
|
except ValueError: |
|
pass |
|
|
|
|
|
script_tags = soup.find_all("script") |
|
for script in script_tags: |
|
if not script.string: |
|
continue |
|
|
|
script_text = script.string |
|
|
|
|
|
if "L.map" in script_text or "leaflet" in script_text.lower(): |
|
setview_match = re.search(r'setView\s*\(\s*\[\s*(-?\d+\.?\d*)\s*,\s*(-?\d+\.?\d*)\s*\]', |
|
script_text, re.IGNORECASE) |
|
if setview_match: |
|
lat, lon = float(setview_match.group(1)), float(setview_match.group(2)) |
|
print(f" Found coordinates via map script: {lat}, {lon}") |
|
return lat, lon |
|
|
|
|
|
lat_match = re.search(r'(?:lat|latitude)\s*[=:]\s*(-?\d+\.?\d*)', script_text, re.IGNORECASE) |
|
lon_match = re.search(r'(?:lon|lng|longitude)\s*[=:]\s*(-?\d+\.?\d*)', script_text, re.IGNORECASE) |
|
|
|
if lat_match and lon_match: |
|
lat, lon = float(lat_match.group(1)), float(lon_match.group(1)) |
|
print(f" Found coordinates via script variables: {lat}, {lon}") |
|
return lat, lon |
|
|
|
|
|
known_coords = get_known_incident_coordinates(incident_url) |
|
if known_coords: |
|
print(f" Using known coordinates: {known_coords}") |
|
return known_coords |
|
|
|
print(f" No coordinates found for {incident_url}") |
|
return None, None |
|
|
|
except Exception as e: |
|
print(f" Error extracting coordinates from {incident_url}: {e}") |
|
return None, None |
|
|
|
def get_known_incident_coordinates(incident_url): |
|
"""Fallback coordinates for some known incident locations""" |
|
|
|
incident_id = incident_url.split('/')[-1] if incident_url else "" |
|
|
|
|
|
known_locations = { |
|
|
|
'horse-fire': (42.0, -104.0), |
|
'aggie-creek-fire': (64.0, -153.0), |
|
'big-creek-fire': (47.0, -114.0), |
|
'conner-fire': (39.5, -116.0), |
|
'trout-fire': (35.0, -106.0), |
|
'basin-fire': (34.0, -112.0), |
|
'rowena-fire': (45.0, -121.0), |
|
'post-fire': (44.0, -115.0), |
|
} |
|
|
|
for key, coords in known_locations.items(): |
|
if key in incident_id.lower(): |
|
return coords |
|
|
|
return None |
|
|
|
|
|
def add_coordinates_to_incidents(df, max_incidents=30): |
|
"""Add coordinates to incidents with improved success rate""" |
|
df = df.copy() |
|
df['latitude'] = None |
|
df['longitude'] = None |
|
|
|
|
|
recent_wildfires = df[ |
|
(df['type'].str.contains('Wildfire', na=False)) & |
|
(df['updated'].str.contains('ago|seconds|minutes|hours', na=False)) |
|
].head(max_incidents // 2) |
|
|
|
other_incidents = df[ |
|
~df.index.isin(recent_wildfires.index) |
|
].head(max_incidents // 2) |
|
|
|
sample_df = pd.concat([recent_wildfires, other_incidents]).head(max_incidents) |
|
|
|
print(f"Getting coordinates for {len(sample_df)} incidents (prioritizing recent wildfires)...") |
|
|
|
success_count = 0 |
|
for idx, row in sample_df.iterrows(): |
|
if pd.notna(row.get("link")): |
|
try: |
|
lat, lon = get_incident_coordinates_basic(row["link"]) |
|
if lat is not None and lon is not None: |
|
|
|
if 18.0 <= lat <= 72.0 and -180.0 <= lon <= -65.0: |
|
df.at[idx, 'latitude'] = lat |
|
df.at[idx, 'longitude'] = lon |
|
success_count += 1 |
|
print(f" ✅ {row['name']}: {lat:.4f}, {lon:.4f}") |
|
else: |
|
print(f" ❌ {row['name']}: Invalid coordinates {lat}, {lon}") |
|
else: |
|
print(f" ⚠️ {row['name']}: No coordinates found") |
|
|
|
|
|
time.sleep(0.3) |
|
|
|
except Exception as e: |
|
print(f" ❌ Error getting coordinates for {row['name']}: {e}") |
|
continue |
|
|
|
print(f"Successfully extracted coordinates for {success_count}/{len(sample_df)} incidents") |
|
return df |
|
|
|
|
|
def generate_enhanced_map(df, firms_df): |
|
"""Generate map showing only active InciWeb incidents and their associated FIRMS hotspots""" |
|
|
|
try: |
|
print("Starting focused map generation (active fires only)...") |
|
|
|
|
|
m = folium.Map(location=[39.8283, -98.5795], zoom_start=4) |
|
|
|
|
|
active_incidents = df[df.get('is_active', False) == True].copy() |
|
|
|
if active_incidents.empty: |
|
print("No active incidents found - showing basic map") |
|
legend_html = ''' |
|
<div style="position: fixed; |
|
bottom: 50px; left: 50px; width: 250px; height: 100px; |
|
border:2px solid grey; z-index:9999; font-size:12px; |
|
background-color:white; padding: 10px; |
|
border-radius: 5px; font-family: Arial;"> |
|
<div style="font-weight: bold; margin-bottom: 8px; font-size: 14px;">🔥 No Active Fires Detected</div> |
|
<div>No InciWeb incidents have nearby FIRMS hotspots in the last 24 hours.</div> |
|
</div> |
|
''' |
|
map_html = m._repr_html_() |
|
return map_html.replace('</body>', legend_html + '</body>') |
|
|
|
print(f"Found {len(active_incidents)} active incidents to display") |
|
|
|
|
|
all_nearby_hotspots = [] |
|
|
|
for _, incident in active_incidents.iterrows(): |
|
|
|
hotspot_coords_str = incident.get('hotspot_coords', '') |
|
if hotspot_coords_str and hotspot_coords_str != 'None': |
|
try: |
|
|
|
import ast |
|
hotspot_coords = ast.literal_eval(hotspot_coords_str) |
|
all_nearby_hotspots.extend(hotspot_coords) |
|
except: |
|
continue |
|
|
|
|
|
if all_nearby_hotspots: |
|
print(f"Adding {len(all_nearby_hotspots)} FIRMS hotspots near active incidents...") |
|
try: |
|
heat_data = [] |
|
for coord in all_nearby_hotspots: |
|
try: |
|
lat, lon, frp = float(coord[0]), float(coord[1]), float(coord[2]) |
|
if -90 <= lat <= 90 and -180 <= lon <= 180: |
|
heat_data.append([lat, lon, min(frp, 100)]) |
|
except (ValueError, TypeError, IndexError): |
|
continue |
|
|
|
if heat_data: |
|
HeatMap( |
|
heat_data, |
|
name="Active Fire Intensity (NASA FIRMS)", |
|
radius=15, |
|
blur=10, |
|
max_zoom=1, |
|
gradient={0.2: 'blue', 0.4: 'lime', 0.6: 'orange', 1: 'red'} |
|
).add_to(m) |
|
print(f"Added heatmap with {len(heat_data)} hotspots near active incidents") |
|
|
|
|
|
for i, coord in enumerate(all_nearby_hotspots[:100]): |
|
try: |
|
lat, lon, frp = float(coord[0]), float(coord[1]), float(coord[2]) |
|
|
|
if -90 <= lat <= 90 and -180 <= lon <= 180: |
|
folium.CircleMarker( |
|
location=[lat, lon], |
|
radius=2 + min(frp / 10, 8), |
|
popup=f"🔥 Active Hotspot<br>FRP: {frp:.1f} MW<br>Near active wildfire", |
|
color='red', |
|
fillColor='orange', |
|
fillOpacity=0.7, |
|
weight=1 |
|
).add_to(m) |
|
except (ValueError, TypeError, IndexError): |
|
continue |
|
|
|
except Exception as e: |
|
print(f"Error adding FIRMS data to map: {e}") |
|
|
|
|
|
print(f"Adding {len(active_incidents)} active InciWeb incidents to map...") |
|
|
|
try: |
|
incident_cluster = MarkerCluster(name="Active Wildfire Incidents").add_to(m) |
|
|
|
for _, row in active_incidents.iterrows(): |
|
try: |
|
lat, lon = float(row['latitude']), float(row['longitude']) |
|
|
|
if not (-90 <= lat <= 90 and -180 <= lon <= 180): |
|
continue |
|
|
|
|
|
activity_level = row.get('activity_level', 'Unknown') |
|
if activity_level == 'Very High': |
|
color = 'red' |
|
elif activity_level == 'High': |
|
color = 'orange' |
|
elif activity_level == 'Medium': |
|
color = 'yellow' |
|
else: |
|
color = 'lightred' |
|
|
|
|
|
name = str(row.get('name', 'Unknown')) |
|
incident_type = str(row.get('type', 'N/A')) |
|
location = str(row.get('location', 'N/A')) |
|
size = row.get('size', 'N/A') |
|
updated = str(row.get('updated', 'N/A')) |
|
|
|
firms_hotspots = int(row.get('firms_hotspots', 0)) |
|
total_frp = float(row.get('total_frp', 0)) |
|
avg_confidence = float(row.get('avg_confidence', 0)) |
|
|
|
popup_content = f""" |
|
<div style="width: 300px;"> |
|
<h4>🔥 {name}</h4> |
|
<b>Type:</b> {incident_type}<br> |
|
<b>Location:</b> {location}<br> |
|
<b>Size:</b> {size} acres<br> |
|
<b>Last Updated:</b> {updated}<br> |
|
|
|
<hr style="margin: 10px 0;"> |
|
<h5>📡 Satellite Fire Activity</h5> |
|
<b>Status:</b> 🔴 ACTIVE (FIRMS confirmed)<br> |
|
<b>Activity Level:</b> {activity_level}<br> |
|
<b>Hotspots (24h):</b> {firms_hotspots}<br> |
|
<b>Total Fire Power:</b> {total_frp:.1f} MW<br> |
|
<b>Detection Confidence:</b> {avg_confidence:.1f}%<br> |
|
|
|
<div style="margin-top: 8px; padding: 5px; background-color: #ffe6e6; border-radius: 3px;"> |
|
<small><b>🛰️ Real-time confirmed:</b> This fire has active satellite hotspots detected in the last 24 hours</small> |
|
</div> |
|
</div> |
|
""" |
|
|
|
folium.Marker( |
|
location=[lat, lon], |
|
popup=folium.Popup(popup_content, max_width=350), |
|
icon=folium.Icon(color=color, icon='fire', prefix='fa') |
|
).add_to(incident_cluster) |
|
|
|
except Exception as e: |
|
print(f"Error adding active incident marker: {e}") |
|
continue |
|
|
|
except Exception as e: |
|
print(f"Error creating active incident markers: {e}") |
|
|
|
|
|
total_active = len(active_incidents) |
|
total_hotspots = len(all_nearby_hotspots) |
|
|
|
legend_html = f''' |
|
<div style="position: fixed; |
|
bottom: 50px; left: 50px; width: 280px; height: 280px; |
|
border:2px solid grey; z-index:9999; font-size:12px; |
|
background-color:white; padding: 10px; |
|
border-radius: 5px; font-family: Arial;"> |
|
<div style="font-weight: bold; margin-bottom: 8px; font-size: 14px;">🔥 Active Wildfire Detection</div> |
|
|
|
<div style="margin-bottom: 8px;"><b>Fire Activity Levels:</b></div> |
|
<div style="display: flex; align-items: center; margin-bottom: 3px;"> |
|
<div style="background-color: red; width: 12px; height: 12px; margin-right: 5px; border-radius: 50%;"></div> |
|
<div>Very High Activity</div> |
|
</div> |
|
<div style="display: flex; align-items: center; margin-bottom: 3px;"> |
|
<div style="background-color: orange; width: 12px; height: 12px; margin-right: 5px; border-radius: 50%;"></div> |
|
<div>High Activity</div> |
|
</div> |
|
<div style="display: flex; align-items: center; margin-bottom: 3px;"> |
|
<div style="background-color: yellow; width: 12px; height: 12px; margin-right: 5px; border-radius: 50%;"></div> |
|
<div>Medium Activity</div> |
|
</div> |
|
<div style="display: flex; align-items: center; margin-bottom: 8px;"> |
|
<div style="background-color: lightcoral; width: 12px; height: 12px; margin-right: 5px; border-radius: 50%;"></div> |
|
<div>Low Activity</div> |
|
</div> |
|
|
|
<div style="margin-bottom: 5px;"><b>Satellite Data:</b></div> |
|
<div style="display: flex; align-items: center; margin-bottom: 8px;"> |
|
<div style="background-color: orange; width: 12px; height: 12px; margin-right: 5px; border-radius: 50%;"></div> |
|
<div>NASA FIRMS Hotspots</div> |
|
</div> |
|
|
|
<div style="font-size: 11px; margin-top: 10px; padding-top: 5px; border-top: 1px solid #ccc;"> |
|
<b>🎯 Filtered Results:</b><br> |
|
🔥 Active Fires: {total_active}<br> |
|
📡 Satellite Hotspots: {total_hotspots}<br> |
|
|
|
<div style="margin-top: 5px; font-style: italic; color: #666;"> |
|
Only showing incidents with recent satellite fire detection |
|
</div> |
|
</div> |
|
</div> |
|
''' |
|
|
|
|
|
try: |
|
folium.LayerControl().add_to(m) |
|
except Exception as e: |
|
print(f"Error adding layer control: {e}") |
|
|
|
|
|
try: |
|
map_html = m._repr_html_() |
|
map_with_legend = map_html.replace('</body>', legend_html + '</body>') |
|
print(f"Map generation completed successfully - showing {total_active} active fires") |
|
return map_with_legend |
|
except Exception as e: |
|
print(f"Error generating final map HTML: {e}") |
|
return f"<div style='padding: 20px; text-align: center;'>Map generation error: {str(e)}</div>" |
|
|
|
except Exception as e: |
|
print(f"Critical error in focused map generation: {e}") |
|
import traceback |
|
traceback.print_exc() |
|
return f"<div style='padding: 20px; text-align: center;'>Critical map error: {str(e)}</div>" |
|
|
|
|
|
def generate_enhanced_visualizations(df, firms_df): |
|
"""Generate enhanced visualizations focusing only on active fires with FIRMS data integration""" |
|
figures = [] |
|
|
|
try: |
|
print("Starting focused visualization generation (active fires only)...") |
|
|
|
if df.empty: |
|
print("Warning: Empty dataframe for visualizations") |
|
return [px.bar(title="No data available")] |
|
|
|
|
|
active_df = df[df.get('is_active', False) == True].copy() |
|
|
|
|
|
try: |
|
if not active_df.empty and 'activity_level' in active_df.columns: |
|
activity_levels = active_df['activity_level'].value_counts().reset_index() |
|
activity_levels.columns = ['activity_level', 'count'] |
|
|
|
|
|
level_order = ['Very High', 'High', 'Medium', 'Low', 'Minimal'] |
|
color_map = { |
|
'Very High': 'darkred', |
|
'High': 'red', |
|
'Medium': 'orange', |
|
'Low': 'yellow', |
|
'Minimal': 'lightblue' |
|
} |
|
|
|
fig1 = px.bar( |
|
activity_levels, |
|
x='activity_level', |
|
y='count', |
|
title="🔥 Active Fire Intensity Levels (NASA FIRMS Confirmed)", |
|
labels={'activity_level': 'Fire Activity Level', 'count': 'Number of Active Fires'}, |
|
color='activity_level', |
|
color_discrete_map=color_map, |
|
category_orders={'activity_level': level_order} |
|
) |
|
fig1.update_layout( |
|
title_font_size=16, |
|
showlegend=False |
|
) |
|
else: |
|
fig1 = px.bar(title="No active fires detected with FIRMS data") |
|
except Exception as e: |
|
print(f"Error creating activity level chart: {e}") |
|
fig1 = px.bar(title=f"Activity level error: {str(e)}") |
|
figures.append(fig1) |
|
|
|
|
|
try: |
|
if not active_df.empty and 'state' in active_df.columns: |
|
state_counts = active_df['state'].value_counts().reset_index() |
|
state_counts.columns = ['state_name', 'count'] |
|
|
|
fig2 = px.bar( |
|
state_counts, |
|
x='state_name', |
|
y='count', |
|
title="🗺️ Active Fires by State (FIRMS Confirmed)", |
|
labels={'state_name': 'State', 'count': 'Number of Active Fires'}, |
|
color='count', |
|
color_continuous_scale='Reds' |
|
) |
|
fig2.update_layout( |
|
title_font_size=16, |
|
showlegend=False |
|
) |
|
else: |
|
fig2 = px.bar(title="No active fires by state data available") |
|
except Exception as e: |
|
print(f"Error creating state distribution chart: {e}") |
|
fig2 = px.bar(title=f"State distribution error: {str(e)}") |
|
figures.append(fig2) |
|
|
|
|
|
try: |
|
if not active_df.empty and 'total_frp' in active_df.columns and 'size' in active_df.columns: |
|
|
|
scatter_df = active_df[ |
|
(active_df['total_frp'] > 0) & |
|
(active_df['size'].notna()) & |
|
(active_df['size'] > 0) |
|
].copy() |
|
|
|
if not scatter_df.empty: |
|
fig3 = px.scatter( |
|
scatter_df, |
|
x='size', |
|
y='total_frp', |
|
size='firms_hotspots', |
|
color='activity_level', |
|
hover_data=['name', 'state', 'firms_hotspots'], |
|
title="🔥 Fire Intensity vs Size (Active Fires Only)", |
|
labels={ |
|
'size': 'Fire Size (acres)', |
|
'total_frp': 'Satellite Fire Power (MW)', |
|
'firms_hotspots': 'Hotspot Count' |
|
}, |
|
color_discrete_map={ |
|
'Very High': 'darkred', |
|
'High': 'red', |
|
'Medium': 'orange', |
|
'Low': 'yellow' |
|
} |
|
) |
|
fig3.update_layout( |
|
title_font_size=16, |
|
xaxis_type="log", |
|
yaxis_type="log" |
|
) |
|
else: |
|
fig3 = px.bar(title="No active fires with size and intensity data") |
|
else: |
|
fig3 = px.bar(title="Fire intensity vs size data not available") |
|
except Exception as e: |
|
print(f"Error creating scatter plot: {e}") |
|
fig3 = px.bar(title=f"Scatter plot error: {str(e)}") |
|
figures.append(fig3) |
|
|
|
|
|
try: |
|
if not firms_df.empty and 'datetime' in firms_df.columns and not active_df.empty: |
|
|
|
all_nearby_hotspots_coords = [] |
|
for _, incident in active_df.iterrows(): |
|
hotspot_coords_str = incident.get('hotspot_coords', '') |
|
if hotspot_coords_str and hotspot_coords_str != 'None': |
|
try: |
|
import ast |
|
hotspot_coords = ast.literal_eval(hotspot_coords_str) |
|
all_nearby_hotspots_coords.extend(hotspot_coords) |
|
except: |
|
continue |
|
|
|
if all_nearby_hotspots_coords: |
|
|
|
firms_copy = firms_df.copy() |
|
firms_copy['hour'] = pd.to_datetime(firms_copy['datetime']).dt.floor('H') |
|
hourly_detections = firms_copy.groupby('hour').size().reset_index(name='detections') |
|
|
|
if not hourly_detections.empty: |
|
fig4 = px.line( |
|
hourly_detections, |
|
x='hour', |
|
y='detections', |
|
title="🕐 Active Fire Hotspot Detections Over Time (Near Active Incidents)", |
|
labels={'hour': 'Time', 'detections': 'Hotspots Detected'}, |
|
markers=True |
|
) |
|
fig4.update_traces(line_color='red', marker_color='orange') |
|
fig4.update_layout(title_font_size=16) |
|
else: |
|
fig4 = px.bar(title="No temporal FIRMS data available") |
|
else: |
|
fig4 = px.bar(title="No hotspots near active incidents found") |
|
else: |
|
fig4 = px.bar(title="FIRMS temporal data not available") |
|
except Exception as e: |
|
print(f"Error creating timeline chart: {e}") |
|
fig4 = px.bar(title=f"Timeline error: {str(e)}") |
|
figures.append(fig4) |
|
|
|
|
|
try: |
|
active_count = len(active_df) |
|
inactive_count = len(df) - active_count |
|
|
|
if active_count > 0 or inactive_count > 0: |
|
summary_data = pd.DataFrame({ |
|
'status': ['🔥 Active (FIRMS Confirmed)', '⚫ Inactive/No Data'], |
|
'count': [active_count, inactive_count] |
|
}) |
|
|
|
fig5 = px.pie( |
|
summary_data, |
|
values='count', |
|
names='status', |
|
title="📊 Fire Detection Summary (InciWeb vs FIRMS)", |
|
color_discrete_map={ |
|
'🔥 Active (FIRMS Confirmed)': 'red', |
|
'⚫ Inactive/No Data': 'gray' |
|
} |
|
) |
|
fig5.update_traces(textinfo='label+percent+value') |
|
fig5.update_layout(title_font_size=16) |
|
else: |
|
fig5 = px.bar(title="No fire status data available") |
|
except Exception as e: |
|
print(f"Error creating summary chart: {e}") |
|
fig5 = px.bar(title=f"Summary error: {str(e)}") |
|
figures.append(fig5) |
|
|
|
print(f"Generated {len(figures)} focused visualizations for {len(active_df)} active fires") |
|
return figures |
|
|
|
except Exception as e: |
|
print(f"Critical error in focused visualization generation: {e}") |
|
import traceback |
|
traceback.print_exc() |
|
return [px.bar(title=f"Critical visualization error: {str(e)}")] |
|
|
|
|
|
def create_focused_wildfire_app(): |
|
"""Create the focused active wildfire Gradio application""" |
|
|
|
with gr.Blocks(title="Focused Active Wildfire Tracker", theme=gr.themes.Soft()) as app: |
|
gr.Markdown(""" |
|
# 🔥 Focused Active Wildfire Tracker |
|
## InciWeb Incidents + NASA FIRMS Real-Time Fire Detection |
|
|
|
This application identifies **currently active wildfires** by combining official incident reports from InciWeb with real-time satellite fire detection data from NASA FIRMS: |
|
|
|
### 🎯 **What You'll See:** |
|
- **🔥 Active Fires Only**: InciWeb incidents that have nearby satellite-detected hotspots (confirmed burning) |
|
- **📡 Real-Time Data**: NASA FIRMS satellite fire detection from the last 24 hours |
|
- **🛰️ Fire Intensity**: Fire Radiative Power (FRP) measurements showing fire strength |
|
- **🗺️ Focused Map**: Clean visualization showing only confirmed active wildfires and their satellite data |
|
|
|
### 🚫 **What's Filtered Out:** |
|
- InciWeb incidents without recent satellite fire activity (likely contained/inactive) |
|
- Random FIRMS hotspots not near known incidents |
|
- Outdated or inactive fire reports |
|
|
|
**Result: A precise view of what's actually burning right now!** 🔥🛰️ |
|
""") |
|
|
|
with gr.Row(): |
|
fetch_btn = gr.Button("🚀 Fetch Active Wildfire Data (InciWeb + NASA FIRMS)", variant="primary", size="lg") |
|
status_text = gr.Textbox(label="Status", interactive=False, value="Ready to fetch active wildfire data...") |
|
|
|
with gr.Tabs(): |
|
with gr.TabItem("🗺️ Enhanced Map"): |
|
map_display = gr.HTML(label="Interactive Map with Fire Activity") |
|
|
|
with gr.TabItem("📊 Enhanced Analytics"): |
|
with gr.Row(): |
|
plot_selector = gr.Dropdown( |
|
choices=[ |
|
"Active Fire Intensity Levels", |
|
"Active Fires by State", |
|
"Fire Intensity vs Size", |
|
"Hotspot Detection Timeline", |
|
"Active vs Inactive Summary" |
|
], |
|
label="Select Visualization", |
|
value="Active Fire Intensity Levels" |
|
) |
|
plot_display = gr.Plot(label="Enhanced Analytics (Active Fires Focus)") |
|
|
|
with gr.TabItem("📋 Data Tables"): |
|
with gr.Tabs(): |
|
with gr.TabItem("🔥 Active Fires"): |
|
active_fires_table = gr.Dataframe(label="Active Fires (FIRMS Confirmed)") |
|
with gr.TabItem("📋 All InciWeb Incidents"): |
|
inciweb_table = gr.Dataframe(label="All InciWeb Incidents") |
|
with gr.TabItem("🛰️ NASA FIRMS Data"): |
|
firms_table = gr.Dataframe(label="NASA FIRMS Fire Hotspots (Near Active Incidents)") |
|
|
|
with gr.TabItem("📁 Export Data"): |
|
gr.Markdown("### Download Enhanced Dataset") |
|
with gr.Row(): |
|
download_csv = gr.File(label="Download Enhanced CSV") |
|
download_geojson = gr.File(label="Download GeoJSON") |
|
|
|
|
|
app_state = gr.State({}) |
|
|
|
def fetch_and_process_data(): |
|
"""Main data processing function with comprehensive error handling and debugging""" |
|
try: |
|
yield "📡 Fetching InciWeb incident data...", None, None, None, None, None, None, None |
|
|
|
|
|
try: |
|
print("Step 1: Fetching InciWeb data...") |
|
inciweb_df = fetch_inciweb_data() |
|
if inciweb_df.empty: |
|
yield "❌ Failed to fetch InciWeb data", None, None, None, None, None, None, None |
|
return |
|
print(f"Step 1 SUCCESS: Got {len(inciweb_df)} incidents") |
|
except Exception as e: |
|
print(f"Step 1 ERROR: {e}") |
|
yield f"❌ Error fetching InciWeb data: {str(e)}", None, None, None, None, None, None, None |
|
return |
|
|
|
yield f"✅ Found {len(inciweb_df)} InciWeb incidents. Getting coordinates...", None, None, None, None, None, None, None |
|
|
|
|
|
try: |
|
print("Step 2: Getting coordinates...") |
|
inciweb_df = add_coordinates_to_incidents(inciweb_df, max_incidents=15) |
|
coords_count = len(inciweb_df[(inciweb_df['latitude'].notna()) & (inciweb_df['longitude'].notna())]) |
|
print(f"Step 2 SUCCESS: Got coordinates for {coords_count} incidents") |
|
except Exception as e: |
|
print(f"Step 2 ERROR: {e}") |
|
|
|
|
|
yield "🛰️ Fetching NASA FIRMS fire detection data...", None, None, None, None, None, None, None |
|
|
|
|
|
try: |
|
print("Step 3: Fetching FIRMS data...") |
|
firms_df = fetch_firms_data() |
|
if firms_df.empty: |
|
print("Step 3 WARNING: FIRMS data empty") |
|
|
|
yield "⚠️ FIRMS data unavailable, generating basic visualization...", None, None, None, None, None, None, None |
|
|
|
|
|
try: |
|
print("Generating basic map without FIRMS...") |
|
map_html = generate_enhanced_map(inciweb_df, pd.DataFrame()) |
|
print("Generating basic visualizations...") |
|
plots = generate_enhanced_visualizations(inciweb_df, pd.DataFrame()) |
|
|
|
|
|
import tempfile |
|
csv_file = tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) |
|
inciweb_df.to_csv(csv_file.name, index=False) |
|
csv_file.close() |
|
|
|
|
|
active_fires_df = pd.DataFrame() |
|
|
|
final_status = f"✅ Partial success! Found {len(inciweb_df)} InciWeb incidents (FIRMS data unavailable)" |
|
yield (final_status, map_html, plots[0], active_fires_df, inciweb_df, pd.DataFrame(), csv_file.name, |
|
{"inciweb_df": inciweb_df, "firms_df": pd.DataFrame(), "plots": plots}) |
|
return |
|
except Exception as e: |
|
print(f"Error in basic visualization: {e}") |
|
yield f"❌ Error in basic visualization: {str(e)}", None, None, inciweb_df, None, pd.DataFrame(), None, None |
|
return |
|
|
|
print(f"Step 3 SUCCESS: Got {len(firms_df)} FIRMS hotspots") |
|
|
|
except Exception as e: |
|
print(f"Step 3 ERROR: {e}") |
|
yield f"❌ Error fetching FIRMS data: {str(e)}", None, None, inciweb_df, None, pd.DataFrame(), None, None |
|
return |
|
|
|
yield f"✅ Found {len(firms_df)} USA fire hotspots. Matching with incidents...", None, None, None, None, None, None, None |
|
|
|
|
|
try: |
|
print("Step 4: Matching FIRMS to InciWeb...") |
|
enhanced_df = match_firms_to_inciweb(inciweb_df, firms_df) |
|
print(f"Step 4 SUCCESS: Enhanced {len(enhanced_df)} incidents") |
|
except Exception as e: |
|
print(f"Step 4 ERROR: {e}") |
|
|
|
enhanced_df = inciweb_df |
|
print("Using original InciWeb data without FIRMS matching") |
|
|
|
yield "🗺️ Generating focused map and analytics (active fires only)...", None, None, None, None, None, None, None |
|
|
|
|
|
try: |
|
print("Step 5: Generating focused map...") |
|
map_html = generate_enhanced_map(enhanced_df, firms_df) |
|
print("Step 5a SUCCESS: Map generated") |
|
|
|
print("Step 5: Generating focused visualizations...") |
|
plots = generate_enhanced_visualizations(enhanced_df, firms_df) |
|
print("Step 5b SUCCESS: Visualizations generated") |
|
except Exception as e: |
|
print(f"Step 5 ERROR: {e}") |
|
|
|
map_html = f"<div style='padding: 20px; text-align: center;'>Map generation failed: {str(e)}<br>Data is available in tables below.</div>" |
|
plots = [px.bar(title=f"Visualization generation failed: {str(e)}")] |
|
|
|
|
|
try: |
|
print("Step 6: Creating data tables...") |
|
|
|
active_fires_df = enhanced_df[enhanced_df.get('is_active', False) == True].copy() |
|
|
|
|
|
firms_near_active = pd.DataFrame() |
|
if not active_fires_df.empty and not firms_df.empty: |
|
|
|
firms_near_active = firms_df.head(100) |
|
|
|
print(f"Step 6 SUCCESS: {len(active_fires_df)} active fires, {len(firms_near_active)} nearby FIRMS hotspots") |
|
except Exception as e: |
|
print(f"Step 6 ERROR: {e}") |
|
active_fires_df = pd.DataFrame() |
|
firms_near_active = pd.DataFrame() |
|
|
|
|
|
try: |
|
print("Step 7: Creating CSV export...") |
|
import tempfile |
|
csv_file = tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) |
|
enhanced_df.to_csv(csv_file.name, index=False) |
|
csv_file.close() |
|
print("Step 7 SUCCESS: CSV created") |
|
except Exception as e: |
|
print(f"Step 7 ERROR: {e}") |
|
csv_file = None |
|
|
|
|
|
try: |
|
active_count = len(active_fires_df) |
|
total_incidents = len(enhanced_df) |
|
total_hotspots = len(firms_df) if not firms_df.empty else 0 |
|
coords_count = len(enhanced_df[(enhanced_df['latitude'].notna()) & (enhanced_df['longitude'].notna())]) |
|
|
|
final_status = f"🎯 Focused Results: {active_count} active fires detected with satellite confirmation" |
|
print(f"FINAL SUCCESS: {final_status}") |
|
|
|
yield (final_status, map_html, plots[0], active_fires_df, enhanced_df, firms_near_active, csv_file.name if csv_file else None, |
|
{"inciweb_df": enhanced_df, "firms_df": firms_df, "plots": plots, "active_df": active_fires_df}) |
|
except Exception as e: |
|
print(f"Error calculating final statistics: {e}") |
|
final_status = "✅ Process completed with some errors" |
|
yield (final_status, map_html, plots[0], active_fires_df, enhanced_df, firms_near_active, csv_file.name if csv_file else None, |
|
{"inciweb_df": enhanced_df, "firms_df": firms_df, "plots": plots, "active_df": active_fires_df}) |
|
|
|
except Exception as e: |
|
import traceback |
|
error_details = traceback.format_exc() |
|
print(f"CRITICAL ERROR in main process: {error_details}") |
|
yield f"❌ Critical Error: {str(e)}", None, None, None, None, None, None, None |
|
|
|
def update_plot(plot_name, state_data): |
|
"""Update plot based on selection""" |
|
if not state_data or "plots" not in state_data: |
|
return px.bar(title="No data available") |
|
|
|
plot_options = [ |
|
"Active Fire Intensity Levels", |
|
"Active Fires by State", |
|
"Fire Intensity vs Size", |
|
"Hotspot Detection Timeline", |
|
"Active vs Inactive Summary" |
|
] |
|
|
|
try: |
|
plot_index = plot_options.index(plot_name) |
|
return state_data["plots"][plot_index] |
|
except (ValueError, IndexError): |
|
return state_data["plots"][0] if state_data["plots"] else px.bar(title="Plot not available") |
|
|
|
|
|
fetch_btn.click( |
|
fetch_and_process_data, |
|
outputs=[status_text, map_display, plot_display, active_fires_table, inciweb_table, firms_table, download_csv, app_state] |
|
) |
|
|
|
plot_selector.change( |
|
update_plot, |
|
inputs=[plot_selector, app_state], |
|
outputs=[plot_display] |
|
) |
|
|
|
return app |
|
|
|
|
|
if __name__ == "__main__": |
|
app = create_focused_wildfire_app() |
|
app.launch(share=True, debug=True) |