|
import gradio as gr |
|
import requests |
|
import folium |
|
import pandas as pd |
|
import time |
|
import os |
|
import zipfile |
|
import io |
|
from typing import Dict, List, Tuple |
|
from datetime import datetime, timedelta |
|
import pytz |
|
|
|
class AccurateAirQualityMapper: |
|
"""Air Quality Mapper with precise EPA coordinates""" |
|
|
|
def __init__(self): |
|
self.airnow_base_url = "https://files.airnowtech.org" |
|
self.epa_base_url = "https://aqs.epa.gov/aqsweb/airdata" |
|
self.aqi_colors = { |
|
"Good": "#00E400", |
|
"Moderate": "#FFFF00", |
|
"Unhealthy for Sensitive Groups": "#FF7E00", |
|
"Unhealthy": "#FF0000", |
|
"Very Unhealthy": "#8F3F97", |
|
"Hazardous": "#7E0023" |
|
} |
|
self.aqi_ranges = { |
|
(0, 50): "Good", |
|
(51, 100): "Moderate", |
|
(101, 150): "Unhealthy for Sensitive Groups", |
|
(151, 200): "Unhealthy", |
|
(201, 300): "Very Unhealthy", |
|
(301, 500): "Hazardous" |
|
} |
|
|
|
self.coordinate_cache = {} |
|
self.fallback_coordinates = self.get_fallback_coordinates() |
|
|
|
def get_fallback_coordinates(self) -> Dict[str, Tuple[float, float]]: |
|
"""Fallback coordinates for major monitoring locations""" |
|
return { |
|
|
|
"Los Angeles": (34.0522, -118.2437), |
|
"New York": (40.7128, -74.0060), |
|
"Chicago": (41.8781, -87.6298), |
|
"Houston": (29.7604, -95.3698), |
|
"Phoenix": (33.4484, -112.0740), |
|
"Philadelphia": (39.9526, -75.1652), |
|
"San Antonio": (29.4241, -98.4936), |
|
"San Diego": (32.7157, -117.1611), |
|
"Dallas": (32.7767, -96.7970), |
|
"San Francisco": (37.7749, -122.4194), |
|
"Boston": (42.3601, -71.0589), |
|
"Seattle": (47.6062, -122.3321), |
|
"Denver": (39.7392, -104.9903), |
|
"Atlanta": (33.7490, -84.3880), |
|
"Miami": (25.7617, -80.1918) |
|
} |
|
|
|
def download_epa_coordinates(self) -> Dict[str, Tuple[float, float]]: |
|
"""Download EPA monitor coordinates and create lookup dictionary""" |
|
|
|
print("πΊοΈ Downloading EPA monitor coordinates...") |
|
coordinates = {} |
|
|
|
try: |
|
|
|
monitors_url = f"{self.epa_base_url}/aqs_monitors.zip" |
|
print(f"Downloading: {monitors_url}") |
|
|
|
response = requests.get(monitors_url, timeout=60) |
|
if response.status_code == 200: |
|
print(f"β
Downloaded monitors file ({len(response.content)} bytes)") |
|
|
|
|
|
with zipfile.ZipFile(io.BytesIO(response.content)) as z: |
|
csv_files = [f for f in z.namelist() if f.endswith('.csv')] |
|
if csv_files: |
|
csv_filename = csv_files[0] |
|
print(f"π Extracting: {csv_filename}") |
|
|
|
with z.open(csv_filename) as f: |
|
|
|
df = pd.read_csv(f, dtype=str) |
|
|
|
print(f"π Loaded {len(df)} monitor records") |
|
print(f"Columns: {list(df.columns)}") |
|
|
|
|
|
if len(df) > 0: |
|
print("Sample row:") |
|
print(df.iloc[0].to_dict()) |
|
|
|
|
|
for _, row in df.iterrows(): |
|
try: |
|
|
|
state_code = None |
|
county_code = None |
|
site_number = None |
|
lat = None |
|
lon = None |
|
|
|
|
|
for col in ['State Code', 'State_Code', 'state_code', 'STATE_CODE']: |
|
if col in df.columns and pd.notna(row.get(col)): |
|
state_code = str(row[col]).zfill(2) |
|
break |
|
|
|
|
|
for col in ['County Code', 'County_Code', 'county_code', 'COUNTY_CODE']: |
|
if col in df.columns and pd.notna(row.get(col)): |
|
county_code = str(row[col]).zfill(3) |
|
break |
|
|
|
|
|
for col in ['Site Number', 'Site_Number', 'site_number', 'SITE_NUMBER']: |
|
if col in df.columns and pd.notna(row.get(col)): |
|
site_number = str(row[col]).zfill(4) |
|
break |
|
|
|
|
|
for col in ['Latitude', 'latitude', 'LATITUDE', 'Lat']: |
|
if col in df.columns and pd.notna(row.get(col)): |
|
lat = float(row[col]) |
|
break |
|
|
|
|
|
for col in ['Longitude', 'longitude', 'LONGITUDE', 'Lon']: |
|
if col in df.columns and pd.notna(row.get(col)): |
|
lon = float(row[col]) |
|
break |
|
|
|
|
|
if all([state_code, county_code, site_number, lat, lon]): |
|
if lat != 0 and lon != 0: |
|
aqs_id = f"{state_code}{county_code}{site_number}" |
|
coordinates[aqs_id] = (lat, lon) |
|
|
|
|
|
site_id = f"{state_code}{county_code}{site_number}" |
|
coordinates[site_id[:9]] = (lat, lon) |
|
coordinates[site_id[:7]] = (lat, lon) |
|
|
|
except (ValueError, TypeError) as e: |
|
continue |
|
|
|
print(f"β
Created coordinate lookup for {len(coordinates)} stations") |
|
|
|
else: |
|
print(f"β Failed to download monitors: HTTP {response.status_code}") |
|
|
|
except Exception as e: |
|
print(f"β Error downloading EPA coordinates: {str(e)}") |
|
|
|
|
|
if len(coordinates) < 100: |
|
print("π Trying alternative coordinate source...") |
|
try: |
|
|
|
sites_url = f"{self.epa_base_url}/aqs_sites.zip" |
|
response = requests.get(sites_url, timeout=60) |
|
|
|
if response.status_code == 200: |
|
with zipfile.ZipFile(io.BytesIO(response.content)) as z: |
|
csv_files = [f for f in z.namelist() if f.endswith('.csv')] |
|
if csv_files: |
|
with z.open(csv_files[0]) as f: |
|
df = pd.read_csv(f, dtype=str) |
|
print(f"π Backup file has {len(df)} records") |
|
|
|
for _, row in df.iterrows(): |
|
try: |
|
|
|
state_code = str(row.get('State Code', row.get('STATE_CODE', ''))).zfill(2) |
|
county_code = str(row.get('County Code', row.get('COUNTY_CODE', ''))).zfill(3) |
|
site_number = str(row.get('Site Number', row.get('SITE_NUMBER', ''))).zfill(4) |
|
|
|
lat = float(row.get('Latitude', row.get('LATITUDE', 0))) |
|
lon = float(row.get('Longitude', row.get('LONGITUDE', 0))) |
|
|
|
if all([state_code != "00", county_code != "000", site_number != "0000"]) and lat != 0 and lon != 0: |
|
aqs_id = f"{state_code}{county_code}{site_number}" |
|
coordinates[aqs_id] = (lat, lon) |
|
coordinates[aqs_id[:9]] = (lat, lon) |
|
coordinates[aqs_id[:7]] = (lat, lon) |
|
|
|
except (ValueError, TypeError): |
|
continue |
|
|
|
print(f"β
Total coordinates after backup: {len(coordinates)}") |
|
|
|
except Exception as e: |
|
print(f"β Error with backup coordinates: {str(e)}") |
|
|
|
self.coordinate_cache = coordinates |
|
return coordinates |
|
|
|
def get_aqi_category(self, aqi_value: int) -> str: |
|
"""Get AQI category based on value""" |
|
for (min_val, max_val), category in self.aqi_ranges.items(): |
|
if min_val <= aqi_value <= max_val: |
|
return category |
|
return "Unknown" |
|
|
|
def calculate_aqi(self, parameter: str, value: float) -> int: |
|
"""Calculate AQI for common parameters""" |
|
if parameter == 'OZONE' and value > 0: |
|
if value <= 54: return int((50/54) * value) |
|
elif value <= 70: return int(51 + (49/16) * (value - 54)) |
|
elif value <= 85: return int(101 + (49/15) * (value - 70)) |
|
elif value <= 105: return int(151 + (49/20) * (value - 85)) |
|
else: return int(201 + (199/95) * min(value - 105, 95)) |
|
|
|
elif parameter == 'PM2.5' and value >= 0: |
|
if value <= 12.0: return int((50/12) * value) |
|
elif value <= 35.4: return int(51 + (49/23.4) * (value - 12)) |
|
elif value <= 55.4: return int(101 + (49/20) * (value - 35.4)) |
|
elif value <= 150.4: return int(151 + (49/95) * (value - 55.4)) |
|
else: return int(201 + (199/149.6) * min(value - 150.4, 149.6)) |
|
|
|
elif parameter == 'PM10' and value >= 0: |
|
if value <= 54: return int((50/54) * value) |
|
elif value <= 154: return int(51 + (49/100) * (value - 54)) |
|
elif value <= 254: return int(101 + (49/100) * (value - 154)) |
|
elif value <= 354: return int(151 + (49/100) * (value - 254)) |
|
else: return int(201 + (199/146) * min(value - 354, 146)) |
|
|
|
return 0 |
|
|
|
def fetch_airnow_bulk_data(self) -> Tuple[List[Dict], str]: |
|
"""Fetch current AirNow bulk data""" |
|
print("π― Fetching AirNow bulk data...") |
|
|
|
try: |
|
|
|
gmt_now = datetime.now(pytz.UTC) |
|
|
|
|
|
for hour_offset in range(0, 12): |
|
try: |
|
target_time = gmt_now - timedelta(hours=hour_offset) |
|
filename = f"HourlyData_{target_time.strftime('%Y%m%d%H')}.dat" |
|
|
|
url = f"{self.airnow_base_url}/airnow/today/{filename}" |
|
print(f"π Trying: {url}") |
|
|
|
response = requests.get(url, timeout=30) |
|
|
|
if response.status_code == 200 and response.text.strip(): |
|
print(f"β
Found data file with {len(response.text.splitlines())} lines") |
|
print(f"First few lines:") |
|
lines = response.text.strip().split('\n') |
|
for i, line in enumerate(lines[:3]): |
|
print(f" Line {i+1}: {line}") |
|
|
|
|
|
data = self.parse_hourly_data_file(response.text) |
|
|
|
if data: |
|
print(f"π Parsed {len(data)} station records") |
|
return data, f"β
SUCCESS: {len(data)} monitoring stations from {filename}" |
|
|
|
except Exception as e: |
|
print(f"β Error trying hour {hour_offset}: {str(e)}") |
|
continue |
|
|
|
time.sleep(0.1) |
|
|
|
|
|
print("π No recent data found, creating demo data...") |
|
demo_data = self.create_demo_data() |
|
return demo_data, f"β οΈ DEMO: {len(demo_data)} demo stations (no recent AirNow data available)" |
|
|
|
except Exception as e: |
|
|
|
demo_data = self.create_demo_data() |
|
return demo_data, f"β Error fetching data, showing demo: {str(e)}" |
|
|
|
def create_demo_data(self) -> List[Dict]: |
|
"""Create demo data with known coordinates""" |
|
demo_data = [] |
|
|
|
for city, (lat, lon) in self.fallback_coordinates.items(): |
|
|
|
demo_data.append({ |
|
'DateObserved': datetime.now().strftime('%m/%d/%y'), |
|
'HourObserved': str(datetime.now().hour).zfill(2), |
|
'AQSID': f"DEMO_{city}_AQ", |
|
'SiteName': f"{city} Air Quality Monitor", |
|
'ParameterName': 'PM2.5', |
|
'ReportingUnits': 'UG/M3', |
|
'Value': 15.0 + (hash(city) % 20), |
|
'DataSource': 'DEMO', |
|
'Latitude': lat, |
|
'Longitude': lon, |
|
'AQI': 50 + (hash(city) % 50), |
|
'Category': {'Name': 'Moderate'}, |
|
'ReportingArea': city, |
|
'StateCode': 'US', |
|
'IsAirQuality': True |
|
}) |
|
|
|
|
|
demo_data.append({ |
|
'DateObserved': datetime.now().strftime('%m/%d/%y'), |
|
'HourObserved': str(datetime.now().hour).zfill(2), |
|
'AQSID': f"DEMO_{city}_MET", |
|
'SiteName': f"{city} Weather Station", |
|
'ParameterName': 'TEMP', |
|
'ReportingUnits': 'FAHRENHEIT', |
|
'Value': 70.0 + (hash(city) % 30), |
|
'DataSource': 'DEMO', |
|
'Latitude': lat + 0.01, |
|
'Longitude': lon + 0.01, |
|
'AQI': 0, |
|
'Category': {'Name': 'Meteorological'}, |
|
'ReportingArea': city, |
|
'StateCode': 'US', |
|
'IsAirQuality': False |
|
}) |
|
|
|
return demo_data |
|
|
|
def parse_hourly_data_file(self, text: str) -> List[Dict]: |
|
"""Parse AirNow hourly data format""" |
|
lines = text.strip().split('\n') |
|
data = [] |
|
|
|
|
|
if not self.coordinate_cache: |
|
self.download_epa_coordinates() |
|
|
|
print(f"π Parsing {len(lines)} lines with {len(self.coordinate_cache)} coordinate entries") |
|
|
|
found_coordinates = 0 |
|
|
|
for line_num, line in enumerate(lines): |
|
if not line.strip(): |
|
continue |
|
|
|
try: |
|
fields = line.split('|') |
|
|
|
if len(fields) >= 8: |
|
aqs_id = fields[2] if len(fields) > 2 else '' |
|
|
|
|
|
lat, lon = 0, 0 |
|
|
|
|
|
if aqs_id in self.coordinate_cache: |
|
lat, lon = self.coordinate_cache[aqs_id] |
|
|
|
elif len(aqs_id) >= 9 and aqs_id[:9] in self.coordinate_cache: |
|
lat, lon = self.coordinate_cache[aqs_id[:9]] |
|
|
|
elif len(aqs_id) >= 7 and aqs_id[:7] in self.coordinate_cache: |
|
lat, lon = self.coordinate_cache[aqs_id[:7]] |
|
|
|
else: |
|
for cached_id in self.coordinate_cache: |
|
if len(aqs_id) >= 5 and len(cached_id) >= 5: |
|
if aqs_id[:5] == cached_id[:5]: |
|
lat, lon = self.coordinate_cache[cached_id] |
|
break |
|
|
|
|
|
if lat == 0 and lon == 0 and len(fields) > 3: |
|
site_name = fields[3].upper() |
|
for city, coords in self.fallback_coordinates.items(): |
|
if city.upper() in site_name: |
|
lat, lon = coords |
|
break |
|
|
|
|
|
if lat == 0 and lon == 0: |
|
continue |
|
|
|
found_coordinates += 1 |
|
|
|
|
|
try: |
|
value = float(fields[7]) if len(fields) > 7 and fields[7].replace('.','').replace('-','').replace('+','').isdigit() else 0 |
|
except: |
|
value = 0 |
|
|
|
parameter = fields[5] if len(fields) > 5 else 'UNKNOWN' |
|
site_name = fields[3] if len(fields) > 3 else 'Unknown Site' |
|
units = fields[6] if len(fields) > 6 else '' |
|
|
|
|
|
aqi = self.calculate_aqi(parameter, value) |
|
|
|
|
|
air_quality_params = ['OZONE', 'PM2.5', 'PM10', 'NO2', 'SO2', 'CO'] |
|
is_air_quality = parameter in air_quality_params |
|
|
|
record = { |
|
'DateObserved': fields[0] if len(fields) > 0 else '', |
|
'HourObserved': fields[1] if len(fields) > 1 else '', |
|
'AQSID': aqs_id, |
|
'SiteName': site_name, |
|
'ParameterName': parameter, |
|
'ReportingUnits': units, |
|
'Value': value, |
|
'DataSource': fields[8] if len(fields) > 8 else '', |
|
'Latitude': lat, |
|
'Longitude': lon, |
|
'AQI': aqi, |
|
'Category': {'Name': self.get_aqi_category(aqi) if is_air_quality else 'Meteorological'}, |
|
'ReportingArea': site_name, |
|
'StateCode': aqs_id[:2] if len(aqs_id) >= 2 else 'US', |
|
'IsAirQuality': is_air_quality |
|
} |
|
|
|
data.append(record) |
|
|
|
|
|
if found_coordinates <= 3: |
|
print(f"β
Match {found_coordinates}: {site_name} -> {lat:.4f}, {lon:.4f}") |
|
|
|
except Exception as e: |
|
if line_num < 5: |
|
print(f"β Error parsing line {line_num}: {str(e)}") |
|
continue |
|
|
|
print(f"β
Found coordinates for {found_coordinates} out of {len(lines)} stations") |
|
return data |
|
|
|
def create_map(self, data: List[Dict]) -> str: |
|
"""Create interactive map with accurate coordinates""" |
|
if not data: |
|
m = folium.Map(location=[39.8283, -98.5795], zoom_start=4) |
|
folium.Marker( |
|
[39.8283, -98.5795], |
|
popup="No air quality data available.", |
|
icon=folium.Icon(color='red', icon='info-sign') |
|
).add_to(m) |
|
return m._repr_html_() |
|
|
|
|
|
lats = [item['Latitude'] for item in data] |
|
lons = [item['Longitude'] for item in data] |
|
center_lat = sum(lats) / len(lats) |
|
center_lon = sum(lons) / len(lons) |
|
|
|
print(f"πΊοΈ Creating map centered at {center_lat:.4f}, {center_lon:.4f} with {len(data)} markers") |
|
|
|
|
|
m = folium.Map(location=[center_lat, center_lon], zoom_start=4) |
|
|
|
|
|
added_markers = 0 |
|
for item in data: |
|
try: |
|
lat = item['Latitude'] |
|
lon = item['Longitude'] |
|
aqi = item['AQI'] |
|
parameter = item['ParameterName'] |
|
site_name = item['SiteName'] |
|
value = item['Value'] |
|
units = item['ReportingUnits'] |
|
category = item['Category']['Name'] |
|
is_air_quality = item.get('IsAirQuality', False) |
|
|
|
|
|
if is_air_quality: |
|
popup_content = f""" |
|
<div style="width: 250px;"> |
|
<h4>{site_name} <span style="color: red;">π¬οΈ Air Quality</span></h4> |
|
<p><b>Parameter:</b> {parameter}</p> |
|
<p><b>Value:</b> {value} {units}</p> |
|
<p><b>AQI:</b> {aqi} ({category})</p> |
|
<p><b>Coordinates:</b> {lat:.4f}, {lon:.4f}</p> |
|
<p><b>Time:</b> {item['DateObserved']} {item['HourObserved']}:00 GMT</p> |
|
<p><b>Station ID:</b> {item['AQSID']}</p> |
|
</div> |
|
""" |
|
tooltip_text = f"{site_name}: {parameter} = {value} {units} (AQI: {aqi})" |
|
else: |
|
popup_content = f""" |
|
<div style="width: 250px;"> |
|
<h4>{site_name} <span style="color: blue;">π‘οΈ Meteorological</span></h4> |
|
<p><b>Parameter:</b> {parameter}</p> |
|
<p><b>Value:</b> {value} {units}</p> |
|
<p><b>Coordinates:</b> {lat:.4f}, {lon:.4f}</p> |
|
<p><b>Time:</b> {item['DateObserved']} {item['HourObserved']}:00 GMT</p> |
|
<p><b>Station ID:</b> {item['AQSID']}</p> |
|
</div> |
|
""" |
|
tooltip_text = f"{site_name}: {parameter} = {value} {units}" |
|
|
|
|
|
if is_air_quality: |
|
|
|
if aqi <= 50: |
|
marker_color = 'green' |
|
elif aqi <= 100: |
|
marker_color = 'orange' |
|
elif aqi <= 150: |
|
marker_color = 'orange' |
|
elif aqi <= 200: |
|
marker_color = 'red' |
|
elif aqi <= 300: |
|
marker_color = 'purple' |
|
else: |
|
marker_color = 'darkred' |
|
icon_type = 'cloud' |
|
else: |
|
|
|
marker_color = 'blue' |
|
icon_type = 'info-sign' |
|
|
|
|
|
folium.Marker( |
|
[lat, lon], |
|
popup=folium.Popup(popup_content, max_width=300), |
|
tooltip=tooltip_text, |
|
icon=folium.Icon(color=marker_color, icon=icon_type) |
|
).add_to(m) |
|
|
|
added_markers += 1 |
|
|
|
except Exception as e: |
|
print(f"β Error adding marker: {str(e)}") |
|
continue |
|
|
|
print(f"β
Added {added_markers} markers to map") |
|
|
|
|
|
legend_html = """ |
|
<div style="position: fixed; |
|
bottom: 50px; left: 50px; width: 200px; height: 260px; |
|
background-color: white; border:2px solid grey; z-index:9999; |
|
font-size:12px; padding: 10px"> |
|
<h4>Station Legend</h4> |
|
<p><b>π¬οΈ Air Quality (AQI):</b></p> |
|
<p><i class="fa fa-circle" style="color:green"></i> Good (0-50)</p> |
|
<p><i class="fa fa-circle" style="color:orange"></i> Moderate (51-100)</p> |
|
<p><i class="fa fa-circle" style="color:orange"></i> Unhealthy for Sensitive (101-150)</p> |
|
<p><i class="fa fa-circle" style="color:red"></i> Unhealthy (151-200)</p> |
|
<p><i class="fa fa-circle" style="color:purple"></i> Very Unhealthy (201-300)</p> |
|
<p><i class="fa fa-circle" style="color:darkred"></i> Hazardous (301+)</p> |
|
<p><b>π‘οΈ Meteorological:</b></p> |
|
<p><i class="fa fa-circle" style="color:blue"></i> Weather Data</p> |
|
</div> |
|
""" |
|
m.get_root().html.add_child(folium.Element(legend_html)) |
|
|
|
return m._repr_html_() |
|
|
|
def create_data_table(self, data: List[Dict]) -> pd.DataFrame: |
|
"""Create data table""" |
|
if not data: |
|
return pd.DataFrame() |
|
|
|
table_data = [] |
|
for item in data: |
|
is_air_quality = item.get('IsAirQuality', False) |
|
table_data.append({ |
|
'Site Name': item['SiteName'], |
|
'State': item['StateCode'], |
|
'Parameter': item['ParameterName'], |
|
'Type': 'π¬οΈ Air Quality' if is_air_quality else 'π‘οΈ Meteorological', |
|
'Value': item['Value'], |
|
'Units': item['ReportingUnits'], |
|
'AQI': item['AQI'] if is_air_quality else 'N/A', |
|
'Category': item['Category']['Name'], |
|
'Latitude': round(item['Latitude'], 4), |
|
'Longitude': round(item['Longitude'], 4), |
|
'Date': item['DateObserved'], |
|
'Hour (GMT)': item['HourObserved'], |
|
'Station ID': item['AQSID'] |
|
}) |
|
|
|
df = pd.DataFrame(table_data) |
|
|
|
|
|
df['AQI_numeric'] = pd.to_numeric(df['AQI'], errors='coerce').fillna(0) |
|
|
|
|
|
df_sorted = df.sort_values(['AQI_numeric', 'Parameter'], ascending=[False, True]) |
|
|
|
|
|
return df_sorted.drop('AQI_numeric', axis=1) |
|
|
|
|
|
mapper = AccurateAirQualityMapper() |
|
|
|
def update_map(): |
|
"""Update map with accurate coordinates""" |
|
print("π Starting comprehensive air quality and meteorological mapping...") |
|
|
|
|
|
data, status = mapper.fetch_airnow_bulk_data() |
|
|
|
if data: |
|
|
|
df_temp = pd.DataFrame(data) |
|
param_counts = df_temp['ParameterName'].value_counts() |
|
|
|
print(f"\nπ Data Summary:") |
|
print(f"Total stations: {len(df_temp)}") |
|
print(f"Parameters monitored: {df_temp['ParameterName'].nunique()}") |
|
print(f"Unique sites: {df_temp['SiteName'].nunique()}") |
|
|
|
print(f"\nParameter breakdown:") |
|
for param, count in param_counts.head(10).items(): |
|
print(f"{param}: {count}") |
|
|
|
|
|
air_quality_count = len([d for d in data if d.get('IsAirQuality', False)]) |
|
met_count = len(data) - air_quality_count |
|
status = f"β
SUCCESS: {len(data)} total stations ({air_quality_count} air quality + {met_count} meteorological) from {len(set(d['SiteName'] for d in data))} unique sites" |
|
|
|
|
|
map_html = mapper.create_map(data) |
|
|
|
|
|
df = mapper.create_data_table(data) |
|
|
|
return map_html, df, status |
|
|
|
|
|
with gr.Blocks(title="Accurate AirNow Sensor Map", theme=gr.themes.Soft()) as demo: |
|
|
|
gr.Markdown( |
|
""" |
|
# π― Complete AirNow Monitoring Network Map (FIXED) |
|
|
|
**β
IMPROVED COORDINATE MATCHING + FALLBACK DATA** |
|
|
|
This fixed version addresses the coordinate matching issues: |
|
1. **Better EPA Data Parsing**: Handles different CSV column formats |
|
2. **Multiple Lookup Strategies**: Tries various AQS ID matching approaches |
|
3. **Fallback Coordinates**: Uses known city coordinates when EPA lookup fails |
|
4. **Demo Data**: Shows working map even if AirNow data is unavailable |
|
5. **Enhanced Error Handling**: Better debugging and error recovery |
|
|
|
## Key Improvements: |
|
- π§ **Fixed coordinate lookup** with multiple fallback strategies |
|
- π **Demo stations** in major cities if real data unavailable |
|
- π **Better error handling** and debugging output |
|
- π **More robust data parsing** for different file formats |
|
- β‘ **Guaranteed map display** with at least demo data |
|
""" |
|
) |
|
|
|
with gr.Row(): |
|
load_button = gr.Button("π― Load Complete Monitoring Network (FIXED)", variant="primary", size="lg") |
|
|
|
status_text = gr.Markdown("Click the button above to load monitoring stations with improved coordinate matching.") |
|
|
|
with gr.Tabs(): |
|
with gr.TabItem("πΊοΈ Complete Network Map"): |
|
map_output = gr.HTML(label="Fixed AirNow Monitoring Network with Working Coordinates") |
|
|
|
with gr.TabItem("π All Station Data"): |
|
data_table = gr.Dataframe( |
|
label="All Monitoring Stations (Air Quality + Meteorological)", |
|
interactive=False |
|
) |
|
|
|
gr.Markdown( |
|
""" |
|
## Fixes Applied: |
|
|
|
**1. Coordinate Matching**: Multiple strategies for matching AQS IDs with EPA coordinates |
|
**2. Error Recovery**: Fallback to demo data if real data unavailable |
|
**3. Better Parsing**: Handles different CSV column name formats |
|
**4. Debug Output**: Shows exactly what's happening during data processing |
|
**5. Guaranteed Results**: Will always show at least demo stations on map |
|
|
|
## Data Sources: |
|
- **EPA Coordinates**: aqs_monitors.zip (primary) + aqs_sites.zip (backup) |
|
- **AirNow Data**: Real-time hourly files from files.airnowtech.org |
|
- **Fallback**: Demo stations in major US cities with known coordinates |
|
""" |
|
) |
|
|
|
|
|
load_button.click( |
|
fn=update_map, |
|
inputs=[], |
|
outputs=[map_output, data_table, status_text] |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |