import gradio as gr import requests import json import pandas as pd from datetime import datetime import matplotlib.pyplot as plt import io import base64 from PIL import Image import re # NIWA Snow Monitoring Stations SNOW_STATIONS = { "Mahanga EWS": { "name": "Mahanga Electronic Weather Station", "location": "Mount Mahanga, Tasman", "elevation": "1940m", "image_url": "https://webstatic.niwa.co.nz/snow-plots/mahanga-ews-snow-depth-web.png" }, "Mueller Hut": { "name": "Mueller Hut", "location": "Aoraki/Mount Cook National Park", "elevation": "1800m", "image_url": "https://webstatic.niwa.co.nz/snow-plots/mueller-hut-snow-depth-web.png" }, "Rose Ridge": { "name": "Rose Ridge", "location": "Canterbury", "elevation": "1580m", "image_url": "https://webstatic.niwa.co.nz/snow-plots/rose-ridge-snow-depth-web.png" } } def fetch_snow_depth_image(station_key): """Fetch snow depth chart image from NIWA servers""" try: station = SNOW_STATIONS[station_key] image_url = station["image_url"] headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36' } response = requests.get(image_url, headers=headers, timeout=10) if response.status_code == 200: # Convert to PIL Image image = Image.open(io.BytesIO(response.content)) info = f""" **Station:** {station['name']} **Location:** {station['location']} **Elevation:** {station['elevation']} **Last Updated:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} (Image fetch time) *Note: This chart shows current snow depth relative to historical records. Data is updated weekly.* """ return image, info, "βœ… Successfully fetched snow depth data" else: return None, f"❌ Failed to fetch data. HTTP {response.status_code}", f"Error fetching from {image_url}" except Exception as e: return None, f"❌ Error: {str(e)}", "Failed to connect to NIWA servers" def try_fetch_json_data(): """Attempt to find JSON endpoints for snow data""" potential_endpoints = [ "https://data.niwa.co.nz/api/snow/depth", "https://api.niwa.co.nz/snow/depth", "https://webstatic.niwa.co.nz/api/snow-data.json", "https://niwa.co.nz/api/snow/stations" ] results = [] headers = {'User-Agent': 'Mozilla/5.0 (compatible; SnowDataFetcher/1.0)'} for endpoint in potential_endpoints: try: response = requests.get(endpoint, headers=headers, timeout=5) if response.status_code == 200: try: data = response.json() results.append(f"βœ… {endpoint}: Found JSON data") except: results.append(f"πŸ” {endpoint}: Response received but not JSON") else: results.append(f"❌ {endpoint}: HTTP {response.status_code}") except: results.append(f"❌ {endpoint}: Connection failed") return "\n".join(results) def get_all_stations_data(): """Fetch data for all available stations""" results = [] images = [] for station_key in SNOW_STATIONS.keys(): try: image, info, status = fetch_snow_depth_image(station_key) if image: images.append((image, f"{SNOW_STATIONS[station_key]['name']} Snow Depth")) results.append(f"βœ… {station_key}: {status}") else: results.append(f"❌ {station_key}: {status}") except Exception as e: results.append(f"❌ {station_key}: Error - {str(e)}") return images, "\n".join(results) def search_additional_endpoints(): """Search for additional NIWA data endpoints""" # Try to find other potential data sources search_urls = [ "https://niwa.co.nz/freshwater/snow-and-ice-network/", "https://data.niwa.co.nz/", "https://developer.niwa.co.nz/" ] findings = [] headers = {'User-Agent': 'Mozilla/5.0'} for url in search_urls: try: response = requests.get(url, headers=headers, timeout=10) if response.status_code == 200: # Look for API references or data URLs in the content content = response.text.lower() if 'api' in content: findings.append(f"βœ… {url}: Contains API references") if 'json' in content: findings.append(f"πŸ” {url}: Contains JSON references") if 'data' in content: findings.append(f"πŸ“Š {url}: Contains data references") else: findings.append(f"❌ {url}: HTTP {response.status_code}") except: findings.append(f"❌ {url}: Connection failed") return "\n".join(findings) if findings else "No additional endpoints found" # Create Gradio Interface with gr.Blocks(title="NIWA Snow Depth Monitor", theme=gr.themes.Soft()) as app: gr.Markdown(""" # πŸ”οΈ NIWA Snow Depth Data Fetcher This app fetches real-time snow depth data from New Zealand's National Institute of Water and Atmospheric Research (NIWA) public servers. **Data Sources:** - Snow depth charts from NIWA's Electronic Weather Stations - Updated weekly with quality-controlled data - Covers key South Island alpine monitoring sites """) with gr.Tab("πŸ“Š Individual Station Data"): with gr.Row(): station_dropdown = gr.Dropdown( choices=list(SNOW_STATIONS.keys()), value="Mahanga EWS", label="Select Snow Monitoring Station", info="Choose a NIWA snow monitoring station" ) fetch_btn = gr.Button("πŸ”„ Fetch Snow Data", variant="primary") with gr.Row(): with gr.Column(scale=2): snow_image = gr.Image(label="Snow Depth Chart", height=400) with gr.Column(scale=1): station_info = gr.Markdown(label="Station Information") fetch_status = gr.Textbox(label="Status", interactive=False) with gr.Tab("πŸ—ΊοΈ All Stations Overview"): with gr.Row(): fetch_all_btn = gr.Button("πŸ“‘ Fetch All Station Data", variant="primary") all_stations_gallery = gr.Gallery( label="All Snow Monitoring Stations", columns=2, height=400 ) all_stations_status = gr.Textbox(label="Fetch Results", interactive=False) with gr.Tab("πŸ” API Discovery"): gr.Markdown(""" ### API Endpoint Discovery This section attempts to find additional JSON/API endpoints for accessing raw snow data. """) with gr.Row(): search_json_btn = gr.Button("πŸ” Search for JSON APIs", variant="secondary") search_endpoints_btn = gr.Button("🌐 Search Additional Endpoints", variant="secondary") json_results = gr.Textbox(label="JSON API Search Results", lines=8, interactive=False) endpoint_results = gr.Textbox(label="Additional Endpoint Search", lines=6, interactive=False) with gr.Tab("ℹ️ About"): gr.Markdown(""" ### About This App **Data Source:** New Zealand's National Institute of Water and Atmospheric Research (NIWA) **Monitoring Stations:** - **Mahanga EWS**: Mount Mahanga, Tasman (1940m elevation) - **Mueller Hut**: Aoraki/Mount Cook National Park (1800m elevation) - **Rose Ridge**: Canterbury (1580m elevation) **Data Notes:** - Charts show current snow depth relative to historical records - Data is updated weekly from raw measurements - Single monitoring point may not represent entire surrounding area - Inherent uncertainty in snow depth measurements **Technical Details:** - Fetches PNG chart images from NIWA's public web servers - Attempts to discover JSON API endpoints for raw data access - No authentication required for public data **Use Cases:** - Alpine weather monitoring - Skiing and mountaineering planning - Climate research and analysis - Avalanche risk assessment support **Limitations:** - Visual charts only (numerical data requires NIWA DataHub registration) - Weekly update frequency - Limited to available monitoring stations For raw numerical data access, visit: https://data.niwa.co.nz/ """) # Event handlers fetch_btn.click( fn=fetch_snow_depth_image, inputs=[station_dropdown], outputs=[snow_image, station_info, fetch_status] ) fetch_all_btn.click( fn=get_all_stations_data, outputs=[all_stations_gallery, all_stations_status] ) search_json_btn.click( fn=try_fetch_json_data, outputs=[json_results] ) search_endpoints_btn.click( fn=search_additional_endpoints, outputs=[endpoint_results] ) # Launch instructions for HuggingFace Spaces if __name__ == "__main__": app.launch() # Requirements for HuggingFace Spaces (requirements.txt): """ gradio>=4.0.0 requests>=2.25.0 pandas>=1.3.0 matplotlib>=3.5.0 Pillow>=8.0.0 """ # App file structure for HuggingFace Spaces: """ app.py (this file) requirements.txt (with the above dependencies) README.md (with app description) """