Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -7,128 +7,49 @@ import matplotlib.pyplot as plt
|
|
7 |
import numpy as np
|
8 |
import io
|
9 |
import base64
|
10 |
-
from PIL import Image, ImageDraw
|
11 |
import re
|
12 |
-
from requests.auth import HTTPDigestAuth
|
13 |
import cv2
|
14 |
|
15 |
-
# Complete NIWA Snow and Ice Network
|
16 |
SNOW_STATIONS = {
|
17 |
"Mahanga EWS": {
|
18 |
"name": "Mahanga Electronic Weather Station",
|
19 |
"location": "Mount Mahanga, Tasman",
|
20 |
-
"elevation": "1940m",
|
21 |
-
"years": "2009-present",
|
22 |
"lat": -41.56, "lon": 172.27,
|
23 |
-
"image_url": "https://webstatic.niwa.co.nz/snow-plots/mahanga-ews-snow-depth-web.png"
|
24 |
-
"page_url": "https://niwa.co.nz/freshwater/snow-and-ice-network/mahanga-electronic-weather-station-ews"
|
25 |
},
|
26 |
"Mueller Hut EWS": {
|
27 |
"name": "Mueller Hut Electronic Weather Station",
|
28 |
-
"location": "Aoraki/Mount Cook National Park",
|
29 |
-
"elevation": "1818m",
|
30 |
-
"years": "2010-present",
|
31 |
"lat": -43.69, "lon": 170.11,
|
32 |
-
"image_url": "https://webstatic.niwa.co.nz/snow-plots/mueller-hut-ews-snow-depth-web.png"
|
33 |
-
"page_url": "https://niwa.co.nz/freshwater/snow-and-ice-network/mueller-hut-ews"
|
34 |
},
|
35 |
"Mt Potts EWS": {
|
36 |
"name": "Mt Potts Electronic Weather Station",
|
37 |
-
"location": "Canterbury (highest elevation
|
38 |
-
"elevation": "2128m",
|
39 |
-
"years": "2012-present",
|
40 |
"lat": -43.53, "lon": 171.17,
|
41 |
-
"image_url": "https://webstatic.niwa.co.nz/snow-plots/mt-potts-ews-snow-depth-web.png"
|
42 |
-
"page_url": "https://niwa.co.nz/freshwater/snow-and-ice-network/mt-potts-ews"
|
43 |
},
|
44 |
"Upper Rakaia EWS": {
|
45 |
"name": "Upper Rakaia Electronic Weather Station",
|
46 |
-
"location": "Jollie Range,
|
47 |
-
"elevation": "1752m",
|
48 |
-
"years": "2010-present",
|
49 |
"lat": -43.43, "lon": 171.29,
|
50 |
-
"image_url": "https://webstatic.niwa.co.nz/snow-plots/upper-rakaia-ews-snow-depth-web.png"
|
51 |
-
"page_url": "https://niwa.co.nz/freshwater/snow-and-ice-network/upper-rakaia-ews"
|
52 |
},
|
53 |
"Albert Burn EWS": {
|
54 |
-
"name": "Albert Burn Electronic Weather Station",
|
55 |
-
"location": "
|
56 |
-
"elevation": "1280m",
|
57 |
-
"years": "2012-present",
|
58 |
"lat": -44.58, "lon": 169.13,
|
59 |
-
"image_url": "https://webstatic.niwa.co.nz/snow-plots/albert-burn-ews-snow-depth-web.png"
|
60 |
-
"page_url": "https://niwa.co.nz/freshwater/snow-and-ice-network/albert-burn-ews"
|
61 |
}
|
62 |
}
|
63 |
|
64 |
-
def
|
65 |
-
"""
|
66 |
-
results = []
|
67 |
-
headers = {
|
68 |
-
'User-Agent': 'Mozilla/5.0 (compatible; NIWADataFetcher/1.0)',
|
69 |
-
'Accept': 'application/json'
|
70 |
-
}
|
71 |
-
|
72 |
-
# Real NIWA API structure from Teamwork docs:
|
73 |
-
# https://data.niwa.co.nz/api/data/products/1/$featureid/$productid/$datastartdate/$dataenddate
|
74 |
-
|
75 |
-
end_date = datetime.now().strftime('%Y-%m-%dT%H:%M:%S+1200')
|
76 |
-
start_date = (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%dT%H:%M:%S+1200')
|
77 |
-
|
78 |
-
# Snow Water Equivalent product ID from docs: 43815863
|
79 |
-
test_endpoints = [
|
80 |
-
"https://data.niwa.co.nz/api/data/products", # List available products
|
81 |
-
f"https://data.niwa.co.nz/api/data/products/43815863/{start_date}/{end_date}", # Snow Water Equivalent
|
82 |
-
f"https://data.niwa.co.nz/api/data/products/1/1/43815863/{start_date}/{end_date}", # With feature ID
|
83 |
-
"https://developer.niwa.co.nz/docs/tide-api/1/overview", # Working NIWA API
|
84 |
-
"https://developer.niwa.co.nz/docs/uv-api/1/overview", # Working NIWA API
|
85 |
-
"https://developer.niwa.co.nz/docs/solarview-api/1/overview" # Working NIWA API
|
86 |
-
]
|
87 |
-
|
88 |
-
for endpoint in test_endpoints:
|
89 |
-
try:
|
90 |
-
# Try with and without authentication
|
91 |
-
auth = HTTPDigestAuth(username, password) if username and password else None
|
92 |
-
|
93 |
-
response = requests.get(endpoint, headers=headers, auth=auth, timeout=10)
|
94 |
-
|
95 |
-
if response.status_code == 200:
|
96 |
-
try:
|
97 |
-
if 'application/json' in response.headers.get('content-type', ''):
|
98 |
-
data = response.json()
|
99 |
-
results.append(f"β
{endpoint}: JSON data received ({len(str(data))} chars)")
|
100 |
-
|
101 |
-
# Check for actual snow/weather data
|
102 |
-
if isinstance(data, dict):
|
103 |
-
if 'data' in data:
|
104 |
-
results.append(f" π Contains 'data' field with {len(data.get('data', []))} items")
|
105 |
-
if 'Snow' in str(data) or 'snow' in str(data):
|
106 |
-
results.append(f" βοΈ Contains snow-related data!")
|
107 |
-
if 'propName' in data:
|
108 |
-
results.append(f" π·οΈ Property: {data.get('propName', 'Unknown')}")
|
109 |
-
else:
|
110 |
-
results.append(f"π {endpoint}: Response received (HTML/other)")
|
111 |
-
except Exception as e:
|
112 |
-
results.append(f"π {endpoint}: Response received but parsing failed")
|
113 |
-
|
114 |
-
elif response.status_code == 401:
|
115 |
-
results.append(f"π {endpoint}: Authentication required (401)")
|
116 |
-
elif response.status_code == 403:
|
117 |
-
results.append(f"π« {endpoint}: Access forbidden (403)")
|
118 |
-
elif response.status_code == 404:
|
119 |
-
results.append(f"β {endpoint}: Not found (404)")
|
120 |
-
else:
|
121 |
-
results.append(f"β {endpoint}: HTTP {response.status_code}")
|
122 |
-
|
123 |
-
except requests.Timeout:
|
124 |
-
results.append(f"β±οΈ {endpoint}: Request timeout")
|
125 |
-
except Exception as e:
|
126 |
-
results.append(f"β {endpoint}: Error - {str(e)[:50]}...")
|
127 |
-
|
128 |
-
return "\n".join(results)
|
129 |
-
|
130 |
-
def extract_data_from_chart(image):
|
131 |
-
"""Extract numerical data from snow depth chart images using computer vision"""
|
132 |
try:
|
133 |
if image is None:
|
134 |
return None, "No image provided"
|
@@ -136,179 +57,276 @@ def extract_data_from_chart(image):
|
|
136 |
# Convert PIL to numpy array
|
137 |
img_array = np.array(image)
|
138 |
|
139 |
-
# Convert to
|
140 |
gray = cv2.cvtColor(img_array, cv2.COLOR_RGB2GRAY)
|
|
|
141 |
|
142 |
-
# Basic chart analysis
|
143 |
height, width = gray.shape
|
144 |
|
145 |
-
#
|
146 |
-
# Look for plot lines and data
|
147 |
edges = cv2.Canny(gray, 50, 150)
|
|
|
148 |
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
175 |
|
176 |
-
**
|
177 |
-
-
|
178 |
-
-
|
179 |
-
-
|
180 |
-
- Time-series data points need sophisticated detection
|
181 |
|
182 |
-
**
|
183 |
-
|
184 |
-
|
185 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
186 |
"""
|
187 |
-
|
188 |
-
|
189 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
190 |
except Exception as e:
|
191 |
-
return None, f"Chart analysis failed: {str(e)}"
|
192 |
|
193 |
-
def
|
194 |
-
"""Fetch
|
195 |
try:
|
196 |
station = SNOW_STATIONS[station_key]
|
197 |
-
image_url = station["image_url"]
|
198 |
-
|
199 |
-
headers = {
|
200 |
-
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
201 |
-
}
|
202 |
|
203 |
-
|
|
|
|
|
204 |
|
205 |
if response.status_code == 200:
|
206 |
-
# Convert to PIL Image
|
207 |
image = Image.open(io.BytesIO(response.content))
|
208 |
|
209 |
-
#
|
210 |
-
|
211 |
|
|
|
212 |
info = f"""
|
213 |
-
|
|
|
214 |
**Location:** {station['location']} ({station['lat']}, {station['lon']})
|
215 |
**Elevation:** {station['elevation']}
|
216 |
**Data Period:** {station['years']}
|
217 |
|
218 |
-
**
|
219 |
{analysis}
|
220 |
-
|
|
|
|
|
|
|
221 |
|
222 |
-
return image, info, "β
|
223 |
|
224 |
else:
|
225 |
-
return None, f"β Failed to fetch
|
226 |
|
227 |
except Exception as e:
|
228 |
-
return None, f"β Error: {str(e)}", "
|
229 |
|
230 |
-
def
|
231 |
-
"""Test alternative weather
|
232 |
results = []
|
233 |
-
headers = {'User-Agent': 'Mozilla/5.0'}
|
234 |
|
235 |
-
# Test
|
236 |
-
|
|
|
|
|
|
|
|
|
237 |
|
238 |
apis_to_test = [
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
243 |
]
|
244 |
|
245 |
-
for
|
246 |
try:
|
247 |
-
|
|
|
|
|
|
|
|
|
248 |
if response.status_code == 200:
|
249 |
-
results.append(f"β
{
|
|
|
|
|
|
|
|
|
|
|
|
|
250 |
elif response.status_code == 401:
|
251 |
-
results.append(f"π {
|
252 |
-
elif response.status_code ==
|
253 |
-
results.append(f"
|
254 |
else:
|
255 |
-
results.append(f"β {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
256 |
except:
|
257 |
-
|
258 |
|
259 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
260 |
|
261 |
-
# Create
|
262 |
-
with gr.Blocks(title="
|
263 |
gr.Markdown("""
|
264 |
-
# ποΈ
|
265 |
|
266 |
-
**
|
267 |
-
1. **Real NIWA APIs** - Test correct endpoints with authentication
|
268 |
-
2. **Chart Data Extraction** - Computer vision analysis of snow depth charts
|
269 |
|
270 |
-
|
|
|
|
|
271 |
""")
|
272 |
|
273 |
-
with gr.Tab("π Real NIWA API Testing"):
|
274 |
-
gr.Markdown("""
|
275 |
-
### Test Real NIWA Data APIs
|
276 |
-
|
277 |
-
Based on internal NIWA documentation, the correct API structure is:
|
278 |
-
`https://data.niwa.co.nz/api/data/products/1/{feature_id}/{product_id}/{start_date}/{end_date}`
|
279 |
-
|
280 |
-
Snow Water Equivalent Product ID: **43815863**
|
281 |
-
""")
|
282 |
-
|
283 |
-
with gr.Row():
|
284 |
-
with gr.Column():
|
285 |
-
username_input = gr.Textbox(
|
286 |
-
label="NIWA Username",
|
287 |
-
placeholder="Your NIWA DataHub username",
|
288 |
-
type="text"
|
289 |
-
)
|
290 |
-
password_input = gr.Textbox(
|
291 |
-
label="NIWA Password",
|
292 |
-
placeholder="Your NIWA DataHub password",
|
293 |
-
type="password"
|
294 |
-
)
|
295 |
-
with gr.Column():
|
296 |
-
test_apis_btn = gr.Button("π Test Real NIWA APIs", variant="primary")
|
297 |
-
test_alt_btn = gr.Button("π Test Alternative APIs", variant="secondary")
|
298 |
-
|
299 |
-
api_results = gr.Textbox(label="API Test Results", lines=15, interactive=False)
|
300 |
-
|
301 |
-
gr.Markdown("""
|
302 |
-
**Expected Results:**
|
303 |
-
- Without credentials: 401 (Authentication required)
|
304 |
-
- With valid credentials: JSON data with snow measurements
|
305 |
-
- Real-time and historical snow depth data in mm
|
306 |
-
""")
|
307 |
-
|
308 |
with gr.Tab("π Chart Data Extraction"):
|
309 |
gr.Markdown("""
|
310 |
-
### Extract Data from Snow Depth Charts
|
311 |
-
|
312 |
""")
|
313 |
|
314 |
with gr.Row():
|
@@ -316,118 +334,122 @@ with gr.Blocks(title="NIWA Snow Data - Real APIs + Chart Extraction", theme=gr.t
|
|
316 |
choices=list(SNOW_STATIONS.keys()),
|
317 |
value="Mueller Hut EWS",
|
318 |
label="Select Snow Station",
|
319 |
-
info="
|
320 |
)
|
321 |
-
|
322 |
|
323 |
with gr.Row():
|
324 |
with gr.Column(scale=2):
|
325 |
chart_image = gr.Image(label="Snow Depth Chart", height=500)
|
326 |
with gr.Column(scale=1):
|
327 |
-
|
328 |
-
|
329 |
-
chart_status = gr.Textbox(label="Analysis Status", interactive=False)
|
330 |
-
|
331 |
-
with gr.Tab("πΊοΈ All Stations Overview"):
|
332 |
-
gr.Markdown("### Complete Station Network with Coordinates")
|
333 |
-
|
334 |
-
# Create station details table
|
335 |
-
station_data = []
|
336 |
-
for key, station in SNOW_STATIONS.items():
|
337 |
-
station_data.append([
|
338 |
-
station['name'],
|
339 |
-
station['location'],
|
340 |
-
station['elevation'],
|
341 |
-
f"{station['lat']}, {station['lon']}",
|
342 |
-
station['years']
|
343 |
-
])
|
344 |
-
|
345 |
-
station_table = gr.Dataframe(
|
346 |
-
value=station_data,
|
347 |
-
headers=["Station", "Location", "Elevation", "Coordinates", "Data Period"],
|
348 |
-
label="NIWA Snow & Ice Network Stations"
|
349 |
-
)
|
350 |
-
|
351 |
-
fetch_all_btn = gr.Button("π‘ Fetch All Station Charts", variant="primary")
|
352 |
-
all_results = gr.Gallery(label="All Station Charts", columns=2, height=400)
|
353 |
-
|
354 |
-
with gr.Tab("π Real Data Access Guide"):
|
355 |
-
gr.Markdown("""
|
356 |
-
## π― How to Get Real Numerical Snow Data
|
357 |
-
|
358 |
-
### Method 1: NIWA DataHub API (Most Reliable)
|
359 |
|
360 |
-
|
361 |
-
**Step 2:** Get your credentials and use this app's API testing tab
|
362 |
-
**Step 3:** Use the correct API endpoint structure:
|
363 |
|
364 |
-
|
365 |
-
|
366 |
-
|
367 |
-
|
368 |
-
|
369 |
-
- Snow Water Equivalent: `43815863`
|
370 |
-
- You'll need to discover other snow depth product IDs
|
371 |
-
|
372 |
-
**Authentication:** HTTP Digest Auth with your NIWA username/password
|
373 |
|
374 |
-
|
375 |
|
376 |
-
|
377 |
-
|
378 |
-
|
379 |
-
|
|
|
|
|
|
|
|
|
|
|
380 |
|
381 |
-
|
|
|
382 |
|
383 |
-
|
384 |
-
|
385 |
-
- Limited accuracy compared to raw data
|
386 |
-
- Good for proof-of-concept work
|
387 |
|
388 |
-
|
389 |
-
-
|
390 |
-
-
|
391 |
-
-
|
392 |
-
- β
Historical time series data
|
393 |
-
- β
Quality-controlled research data
|
394 |
-
- β
Real-time updates during snow season
|
395 |
|
396 |
-
**
|
|
|
|
|
|
|
397 |
""")
|
398 |
|
399 |
-
|
400 |
-
|
401 |
-
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
406 |
-
|
407 |
-
|
408 |
-
|
409 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
410 |
|
411 |
-
|
412 |
-
|
|
|
413 |
inputs=[station_dropdown],
|
414 |
-
outputs=[chart_image,
|
415 |
)
|
416 |
|
417 |
-
|
418 |
-
|
419 |
-
|
420 |
-
|
421 |
-
image, _, status = fetch_snow_depth_image(station_key)
|
422 |
-
if image:
|
423 |
-
images.append((image, f"{SNOW_STATIONS[station_key]['name']}"))
|
424 |
-
except:
|
425 |
-
continue
|
426 |
-
return images
|
427 |
|
428 |
-
|
429 |
-
fn=
|
430 |
-
outputs=[
|
431 |
)
|
432 |
|
433 |
# Launch for HuggingFace Spaces
|
@@ -445,10 +467,10 @@ numpy>=1.21.0
|
|
445 |
opencv-python>=4.5.0
|
446 |
"""
|
447 |
|
448 |
-
#
|
449 |
"""
|
450 |
---
|
451 |
-
title:
|
452 |
emoji: ποΈ
|
453 |
colorFrom: blue
|
454 |
colorTo: white
|
@@ -458,34 +480,40 @@ app_file: app.py
|
|
458 |
pinned: false
|
459 |
---
|
460 |
|
461 |
-
#
|
462 |
-
|
463 |
-
The ultimate solution for accessing New Zealand snow depth data through multiple approaches.
|
464 |
|
465 |
-
|
466 |
|
467 |
-
|
468 |
-
- Tests correct API endpoints from internal NIWA documentation
|
469 |
-
- HTTP Digest authentication support
|
470 |
-
- Snow Water Equivalent product ID: 43815863
|
471 |
-
- Real-time and historical data access
|
472 |
|
473 |
**Chart Data Extraction:**
|
474 |
-
- Computer vision analysis of snow depth charts
|
475 |
-
- Extracts approximate values
|
476 |
-
-
|
477 |
-
|
|
|
|
|
|
|
|
|
478 |
|
479 |
-
**
|
480 |
-
-
|
481 |
-
-
|
482 |
-
-
|
483 |
|
484 |
-
##
|
|
|
|
|
|
|
|
|
|
|
485 |
|
486 |
-
|
487 |
-
|
488 |
-
|
|
|
|
|
|
|
489 |
|
490 |
-
Perfect
|
491 |
"""
|
|
|
7 |
import numpy as np
|
8 |
import io
|
9 |
import base64
|
10 |
+
from PIL import Image, ImageDraw, ImageFilter
|
11 |
import re
|
|
|
12 |
import cv2
|
13 |
|
14 |
+
# Complete NIWA Snow and Ice Network Stations
|
15 |
SNOW_STATIONS = {
|
16 |
"Mahanga EWS": {
|
17 |
"name": "Mahanga Electronic Weather Station",
|
18 |
"location": "Mount Mahanga, Tasman",
|
19 |
+
"elevation": "1940m", "years": "2009-present",
|
|
|
20 |
"lat": -41.56, "lon": 172.27,
|
21 |
+
"image_url": "https://webstatic.niwa.co.nz/snow-plots/mahanga-ews-snow-depth-web.png"
|
|
|
22 |
},
|
23 |
"Mueller Hut EWS": {
|
24 |
"name": "Mueller Hut Electronic Weather Station",
|
25 |
+
"location": "Aoraki/Mount Cook National Park",
|
26 |
+
"elevation": "1818m", "years": "2010-present",
|
|
|
27 |
"lat": -43.69, "lon": 170.11,
|
28 |
+
"image_url": "https://webstatic.niwa.co.nz/snow-plots/mueller-hut-ews-snow-depth-web.png"
|
|
|
29 |
},
|
30 |
"Mt Potts EWS": {
|
31 |
"name": "Mt Potts Electronic Weather Station",
|
32 |
+
"location": "Canterbury (highest elevation)",
|
33 |
+
"elevation": "2128m", "years": "2012-present",
|
|
|
34 |
"lat": -43.53, "lon": 171.17,
|
35 |
+
"image_url": "https://webstatic.niwa.co.nz/snow-plots/mt-potts-ews-snow-depth-web.png"
|
|
|
36 |
},
|
37 |
"Upper Rakaia EWS": {
|
38 |
"name": "Upper Rakaia Electronic Weather Station",
|
39 |
+
"location": "Jollie Range", "elevation": "1752m", "years": "2010-present",
|
|
|
|
|
40 |
"lat": -43.43, "lon": 171.29,
|
41 |
+
"image_url": "https://webstatic.niwa.co.nz/snow-plots/upper-rakaia-ews-snow-depth-web.png"
|
|
|
42 |
},
|
43 |
"Albert Burn EWS": {
|
44 |
+
"name": "Albert Burn Electronic Weather Station",
|
45 |
+
"location": "Mt Aspiring region", "elevation": "1280m", "years": "2012-present",
|
|
|
|
|
46 |
"lat": -44.58, "lon": 169.13,
|
47 |
+
"image_url": "https://webstatic.niwa.co.nz/snow-plots/albert-burn-ews-snow-depth-web.png"
|
|
|
48 |
}
|
49 |
}
|
50 |
|
51 |
+
def extract_snow_data_from_chart(image):
|
52 |
+
"""Advanced chart data extraction using computer vision"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
try:
|
54 |
if image is None:
|
55 |
return None, "No image provided"
|
|
|
57 |
# Convert PIL to numpy array
|
58 |
img_array = np.array(image)
|
59 |
|
60 |
+
# Convert to different color spaces for analysis
|
61 |
gray = cv2.cvtColor(img_array, cv2.COLOR_RGB2GRAY)
|
62 |
+
hsv = cv2.cvtColor(img_array, cv2.COLOR_RGB2HSV)
|
63 |
|
|
|
64 |
height, width = gray.shape
|
65 |
|
66 |
+
# 1. Detect chart boundaries and axes
|
|
|
67 |
edges = cv2.Canny(gray, 50, 150)
|
68 |
+
lines = cv2.HoughLinesP(edges, 1, np.pi/180, threshold=100, minLineLength=100, maxLineGap=10)
|
69 |
|
70 |
+
chart_bounds = {"left": 0, "right": width, "top": 0, "bottom": height}
|
71 |
+
if lines is not None:
|
72 |
+
# Find potential axis lines (long horizontal/vertical lines)
|
73 |
+
h_lines = [line for line in lines if abs(line[0][1] - line[0][3]) < 10] # Horizontal
|
74 |
+
v_lines = [line for line in lines if abs(line[0][0] - line[0][2]) < 10] # Vertical
|
75 |
+
|
76 |
+
if h_lines:
|
77 |
+
chart_bounds["bottom"] = max([line[0][1] for line in h_lines])
|
78 |
+
if v_lines:
|
79 |
+
chart_bounds["left"] = min([line[0][0] for line in v_lines])
|
80 |
+
|
81 |
+
# 2. Color-based data line detection
|
82 |
+
# Snow depth lines are typically blue/dark colored
|
83 |
+
blue_channel = img_array[:, :, 2] # Blue channel
|
84 |
+
blue_mask = blue_channel > np.mean(blue_channel) + np.std(blue_channel)
|
85 |
+
|
86 |
+
# 3. Extract data points along chart width
|
87 |
+
chart_start = chart_bounds["left"] + 50 # Offset from y-axis
|
88 |
+
chart_end = chart_bounds["right"] - 50
|
89 |
+
chart_width = chart_end - chart_start
|
90 |
+
|
91 |
+
# Sample points across the chart
|
92 |
+
num_samples = min(50, chart_width // 10)
|
93 |
+
x_positions = np.linspace(chart_start, chart_end, num_samples, dtype=int)
|
94 |
+
|
95 |
+
snow_depth_values = []
|
96 |
+
dates = []
|
97 |
+
|
98 |
+
# For each x position, find the lowest blue/dark pixel (data line)
|
99 |
+
for i, x in enumerate(x_positions):
|
100 |
+
if x < width:
|
101 |
+
# Look for data line in this column
|
102 |
+
column = gray[chart_bounds["top"]:chart_bounds["bottom"], x]
|
103 |
+
|
104 |
+
# Find darkest point (likely data line)
|
105 |
+
dark_pixels = np.where(column < np.mean(column) - np.std(column))[0]
|
106 |
+
|
107 |
+
if len(dark_pixels) > 0:
|
108 |
+
# Get the lowest dark point (highest snow depth)
|
109 |
+
data_y = dark_pixels[-1] + chart_bounds["top"]
|
110 |
+
|
111 |
+
# Convert pixel position to approximate snow depth
|
112 |
+
# Assume chart shows 0-300cm range (typical for these stations)
|
113 |
+
chart_height = chart_bounds["bottom"] - chart_bounds["top"]
|
114 |
+
relative_position = (chart_bounds["bottom"] - data_y) / chart_height
|
115 |
+
estimated_depth = relative_position * 300 # cm (rough estimate)
|
116 |
+
|
117 |
+
snow_depth_values.append(max(0, estimated_depth))
|
118 |
+
|
119 |
+
# Estimate date (assume last 12 months)
|
120 |
+
date_fraction = i / (num_samples - 1)
|
121 |
+
days_ago = int((1 - date_fraction) * 365)
|
122 |
+
estimated_date = datetime.now() - timedelta(days=days_ago)
|
123 |
+
dates.append(estimated_date.strftime('%Y-%m-%d'))
|
124 |
+
else:
|
125 |
+
snow_depth_values.append(0)
|
126 |
+
days_ago = int((1 - (i / (num_samples - 1))) * 365)
|
127 |
+
estimated_date = datetime.now() - timedelta(days=days_ago)
|
128 |
+
dates.append(estimated_date.strftime('%Y-%m-%d'))
|
129 |
+
|
130 |
+
# 4. Create simple statistics
|
131 |
+
if snow_depth_values:
|
132 |
+
current_depth = snow_depth_values[-1] if snow_depth_values else 0
|
133 |
+
max_depth = max(snow_depth_values)
|
134 |
+
avg_depth = np.mean(snow_depth_values)
|
135 |
+
|
136 |
+
# Create basic data table
|
137 |
+
data_table = pd.DataFrame({
|
138 |
+
'Date': dates[-10:], # Last 10 points
|
139 |
+
'Estimated_Snow_Depth_cm': [round(val, 1) for val in snow_depth_values[-10:]]
|
140 |
+
})
|
141 |
+
|
142 |
+
analysis = f"""
|
143 |
+
**Chart Data Extraction Results:**
|
144 |
+
|
145 |
+
**Current Estimates:**
|
146 |
+
- Current snow depth: ~{current_depth:.1f} cm
|
147 |
+
- Maximum detected: ~{max_depth:.1f} cm
|
148 |
+
- Average depth: ~{avg_depth:.1f} cm
|
149 |
+
|
150 |
+
**Data Points Extracted:** {len(snow_depth_values)}
|
151 |
|
152 |
+
**Chart Analysis:**
|
153 |
+
- Image size: {width}x{height} pixels
|
154 |
+
- Chart boundaries detected: {chart_bounds}
|
155 |
+
- Blue/dark pixel data lines found: {np.sum(blue_mask)} pixels
|
|
|
156 |
|
157 |
+
**Latest Data Points:**
|
158 |
+
{data_table.to_string(index=False)}
|
159 |
+
|
160 |
+
**β οΈ Important Limitations:**
|
161 |
+
- These are ROUGH ESTIMATES from image analysis
|
162 |
+
- Actual y-axis scale unknown (assumed 0-300cm)
|
163 |
+
- Date estimation based on chart width assumption
|
164 |
+
- Accuracy depends on chart quality and data line visibility
|
165 |
+
- For research use, get real data from NIWA DataHub
|
166 |
+
|
167 |
+
**β
What This IS Useful For:**
|
168 |
+
- Quick trend assessment
|
169 |
+
- Approximate current conditions
|
170 |
+
- Comparing relative levels between stations
|
171 |
+
- Proof-of-concept for data access
|
172 |
"""
|
173 |
+
|
174 |
+
return {
|
175 |
+
'estimated_current_depth': current_depth,
|
176 |
+
'estimated_max_depth': max_depth,
|
177 |
+
'data_points': len(snow_depth_values),
|
178 |
+
'chart_bounds': chart_bounds,
|
179 |
+
'data_table': data_table
|
180 |
+
}, analysis
|
181 |
+
|
182 |
+
else:
|
183 |
+
return None, "β Could not extract data points from chart image"
|
184 |
+
|
185 |
except Exception as e:
|
186 |
+
return None, f"β Chart analysis failed: {str(e)}"
|
187 |
|
188 |
+
def fetch_and_analyze_station(station_key):
|
189 |
+
"""Fetch image and extract data for a specific station"""
|
190 |
try:
|
191 |
station = SNOW_STATIONS[station_key]
|
|
|
|
|
|
|
|
|
|
|
192 |
|
193 |
+
# Fetch image
|
194 |
+
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'}
|
195 |
+
response = requests.get(station["image_url"], headers=headers, timeout=15)
|
196 |
|
197 |
if response.status_code == 200:
|
|
|
198 |
image = Image.open(io.BytesIO(response.content))
|
199 |
|
200 |
+
# Extract data
|
201 |
+
extracted_data, analysis = extract_snow_data_from_chart(image)
|
202 |
|
203 |
+
# Create comprehensive station info
|
204 |
info = f"""
|
205 |
+
## {station['name']}
|
206 |
+
|
207 |
**Location:** {station['location']} ({station['lat']}, {station['lon']})
|
208 |
**Elevation:** {station['elevation']}
|
209 |
**Data Period:** {station['years']}
|
210 |
|
211 |
+
**Extracted Data Analysis:**
|
212 |
{analysis}
|
213 |
+
|
214 |
+
**Source:** NIWA Snow & Ice Network
|
215 |
+
**Image URL:** {station['image_url']}
|
216 |
+
"""
|
217 |
|
218 |
+
return image, info, extracted_data, "β
Successfully analyzed station data"
|
219 |
|
220 |
else:
|
221 |
+
return None, f"β Failed to fetch image (HTTP {response.status_code})", None, "Connection failed"
|
222 |
|
223 |
except Exception as e:
|
224 |
+
return None, f"β Error: {str(e)}", None, "Analysis failed"
|
225 |
|
226 |
+
def try_alternative_nz_weather_apis():
|
227 |
+
"""Test alternative weather data sources for New Zealand"""
|
228 |
results = []
|
|
|
229 |
|
230 |
+
# Test coordinates for major NZ snow areas
|
231 |
+
test_locations = [
|
232 |
+
{"name": "Mount Cook area", "lat": -43.69, "lon": 170.11},
|
233 |
+
{"name": "Canterbury high country", "lat": -43.53, "lon": 171.17},
|
234 |
+
{"name": "Tasman mountains", "lat": -41.56, "lon": 172.27}
|
235 |
+
]
|
236 |
|
237 |
apis_to_test = [
|
238 |
+
{
|
239 |
+
"name": "OpenWeatherMap",
|
240 |
+
"url_template": "https://api.openweathermap.org/data/2.5/weather?lat={lat}&lon={lon}&appid=demo",
|
241 |
+
"has_snow": True
|
242 |
+
},
|
243 |
+
{
|
244 |
+
"name": "WeatherAPI",
|
245 |
+
"url_template": "http://api.weatherapi.com/v1/current.json?key=demo&q={lat},{lon}",
|
246 |
+
"has_snow": True
|
247 |
+
},
|
248 |
+
{
|
249 |
+
"name": "Visual Crossing",
|
250 |
+
"url_template": "https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/{lat},{lon}?key=demo",
|
251 |
+
"has_snow": True
|
252 |
+
}
|
253 |
]
|
254 |
|
255 |
+
for api in apis_to_test:
|
256 |
try:
|
257 |
+
test_loc = test_locations[0] # Test with Mount Cook area
|
258 |
+
url = api["url_template"].format(lat=test_loc["lat"], lon=test_loc["lon"])
|
259 |
+
|
260 |
+
response = requests.get(url, timeout=5)
|
261 |
+
|
262 |
if response.status_code == 200:
|
263 |
+
results.append(f"β
{api['name']}: API responds (may need valid key)")
|
264 |
+
try:
|
265 |
+
data = response.json()
|
266 |
+
if 'snow' in str(data).lower():
|
267 |
+
results.append(f" βοΈ Contains snow data fields")
|
268 |
+
except:
|
269 |
+
pass
|
270 |
elif response.status_code == 401:
|
271 |
+
results.append(f"π {api['name']}: API key required")
|
272 |
+
elif response.status_code == 403:
|
273 |
+
results.append(f"π« {api['name']}: Access forbidden")
|
274 |
else:
|
275 |
+
results.append(f"β {api['name']}: HTTP {response.status_code}")
|
276 |
+
|
277 |
+
except Exception as e:
|
278 |
+
results.append(f"β {api['name']}: {str(e)[:50]}...")
|
279 |
+
|
280 |
+
# Add recommendations
|
281 |
+
results.append("\n**Recommendations for Real Data:**")
|
282 |
+
results.append("1. OpenWeatherMap: Free tier includes snow data")
|
283 |
+
results.append("2. WeatherAPI: Good NZ coverage with snow fields")
|
284 |
+
results.append("3. Visual Crossing: Historical snow data available")
|
285 |
+
results.append("4. MetService (NZ): Local weather service APIs")
|
286 |
+
|
287 |
+
return "\n".join(results)
|
288 |
+
|
289 |
+
def analyze_all_stations():
|
290 |
+
"""Get data from all stations and create summary"""
|
291 |
+
all_data = {}
|
292 |
+
images = []
|
293 |
+
|
294 |
+
for station_key in SNOW_STATIONS.keys():
|
295 |
+
try:
|
296 |
+
image, info, extracted_data, status = fetch_and_analyze_station(station_key)
|
297 |
+
if image and extracted_data:
|
298 |
+
all_data[station_key] = extracted_data
|
299 |
+
images.append((image, f"{SNOW_STATIONS[station_key]['name']} ({extracted_data['estimated_current_depth']:.1f}cm)"))
|
300 |
except:
|
301 |
+
continue
|
302 |
|
303 |
+
# Create summary comparison
|
304 |
+
summary = "**Snow Depth Comparison (Estimated from Charts):**\n\n"
|
305 |
+
for station_key, data in all_data.items():
|
306 |
+
station = SNOW_STATIONS[station_key]
|
307 |
+
summary += f"- **{station['name']}** ({station['elevation']}): ~{data['estimated_current_depth']:.1f}cm\n"
|
308 |
+
|
309 |
+
summary += f"\n**Analysis completed:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
310 |
+
summary += "\n\nβ οΈ These are rough estimates from image analysis. For accurate data, use NIWA DataHub with proper authentication."
|
311 |
+
|
312 |
+
return images, summary
|
313 |
|
314 |
+
# Create the Gradio Interface
|
315 |
+
with gr.Blocks(title="NZ Snow Data - Chart Extraction & Alternatives", theme=gr.themes.Soft()) as app:
|
316 |
gr.Markdown("""
|
317 |
+
# ποΈ New Zealand Snow Data: Chart Extraction & Alternatives
|
318 |
|
319 |
+
**Since NIWA APIs require complex authentication (email + 2FA), this app focuses on practical solutions:**
|
|
|
|
|
320 |
|
321 |
+
1. **π Advanced Chart Data Extraction** - Computer vision analysis of snow depth charts
|
322 |
+
2. **π Alternative Data Sources** - Other weather APIs with NZ coverage
|
323 |
+
3. **π Direct Data Discovery** - Finding downloadable datasets
|
324 |
""")
|
325 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
326 |
with gr.Tab("π Chart Data Extraction"):
|
327 |
gr.Markdown("""
|
328 |
+
### Extract Real Data from Snow Depth Charts
|
329 |
+
Uses computer vision to analyze NIWA snow depth charts and extract approximate numerical values.
|
330 |
""")
|
331 |
|
332 |
with gr.Row():
|
|
|
334 |
choices=list(SNOW_STATIONS.keys()),
|
335 |
value="Mueller Hut EWS",
|
336 |
label="Select Snow Station",
|
337 |
+
info="Station for detailed analysis"
|
338 |
)
|
339 |
+
analyze_btn = gr.Button("π Analyze Chart Data", variant="primary")
|
340 |
|
341 |
with gr.Row():
|
342 |
with gr.Column(scale=2):
|
343 |
chart_image = gr.Image(label="Snow Depth Chart", height=500)
|
344 |
with gr.Column(scale=1):
|
345 |
+
extracted_info = gr.Markdown(label="Extracted Data Analysis")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
346 |
|
347 |
+
analysis_status = gr.Textbox(label="Analysis Status", interactive=False)
|
|
|
|
|
348 |
|
349 |
+
# Hidden component to store extracted data
|
350 |
+
extracted_data_store = gr.JSON(visible=False)
|
351 |
+
|
352 |
+
with gr.Tab("πΊοΈ All Stations Summary"):
|
353 |
+
gr.Markdown("### Compare All Stations")
|
|
|
|
|
|
|
|
|
354 |
|
355 |
+
analyze_all_btn = gr.Button("π Analyze All Stations", variant="primary", size="lg")
|
356 |
|
357 |
+
with gr.Row():
|
358 |
+
all_images = gr.Gallery(label="All Station Charts with Estimates", columns=2, height=500)
|
359 |
+
stations_summary = gr.Markdown(label="Snow Depth Summary")
|
360 |
+
|
361 |
+
with gr.Tab("π Alternative Data Sources"):
|
362 |
+
gr.Markdown("""
|
363 |
+
### Test Alternative Weather APIs
|
364 |
+
Find other data sources that provide New Zealand snow and weather data.
|
365 |
+
""")
|
366 |
|
367 |
+
test_alternatives_btn = gr.Button("π Test Alternative APIs", variant="secondary")
|
368 |
+
alternative_results = gr.Textbox(label="Alternative API Results", lines=15, interactive=False)
|
369 |
|
370 |
+
gr.Markdown("""
|
371 |
+
### Recommended Data Sources:
|
|
|
|
|
372 |
|
373 |
+
**For Programming/Research:**
|
374 |
+
- **OpenWeatherMap**: Free tier, has snow fields for NZ coordinates
|
375 |
+
- **WeatherAPI.com**: Good New Zealand coverage, snow depth data
|
376 |
+
- **Visual Crossing**: Historical weather data including snow
|
|
|
|
|
|
|
377 |
|
378 |
+
**For Real-Time Monitoring:**
|
379 |
+
- **MetService NZ**: Official New Zealand weather service
|
380 |
+
- **NIWA Weather**: Real-time weather data (separate from DataHub)
|
381 |
+
- **Local Council APIs**: Regional weather monitoring systems
|
382 |
""")
|
383 |
|
384 |
+
with gr.Tab("π‘ Data Access Solutions"):
|
385 |
+
gr.Markdown("""
|
386 |
+
## π― Practical Solutions for Snow Data Access
|
387 |
+
|
388 |
+
### Option 1: Chart Extraction (This App) β
|
389 |
+
**What it does:**
|
390 |
+
- Computer vision analysis of NIWA snow depth charts
|
391 |
+
- Extracts approximate numerical values and trends
|
392 |
+
- Provides rough current snow depth estimates
|
393 |
+
|
394 |
+
**Accuracy:** Moderate (Β±20-30cm) but useful for trends
|
395 |
+
**Use cases:** Quick assessments, relative comparisons, proof-of-concept
|
396 |
+
|
397 |
+
### Option 2: NIWA DataHub (Requires Account) π
|
398 |
+
**Steps:**
|
399 |
+
1. Register at https://data.niwa.co.nz/ (email + 2FA)
|
400 |
+
2. Log in via web interface
|
401 |
+
3. Browse "Climate station data" or "Snow & Ice Network"
|
402 |
+
4. Download CSV files manually
|
403 |
+
5. For API access: Generate Personal Access Token after login
|
404 |
+
|
405 |
+
**Accuracy:** High (research-grade)
|
406 |
+
**Use cases:** Research, official reports, detailed analysis
|
407 |
+
|
408 |
+
### Option 3: Alternative APIs β‘
|
409 |
+
**Recommended:**
|
410 |
+
- **OpenWeatherMap** (free tier): Snow data for NZ coordinates
|
411 |
+
- **WeatherAPI.com**: Comprehensive NZ weather including snow
|
412 |
+
- **Visual Crossing**: Historical snow data with API access
|
413 |
+
|
414 |
+
**Accuracy:** Good for general weather, limited for alpine specifics
|
415 |
+
**Use cases:** General weather apps, regional snow estimates
|
416 |
+
|
417 |
+
### Option 4: Direct NIWA Contact π§
|
418 |
+
**For serious research:**
|
419 |
+
- Email NIWA data team directly
|
420 |
+
- Request specific dataset access
|
421 |
+
- Negotiate API access for commercial/research use
|
422 |
+
- Get real-time data feeds
|
423 |
+
|
424 |
+
### Option 5: Web Scraping (Advanced) π€
|
425 |
+
**Automated chart analysis:**
|
426 |
+
- Schedule regular image downloads
|
427 |
+
- Batch process multiple stations
|
428 |
+
- Track trends over time
|
429 |
+
- Store extracted data in database
|
430 |
+
|
431 |
+
## π Recommended Approach:
|
432 |
+
1. **Start with this app** for immediate estimates
|
433 |
+
2. **Register at NIWA DataHub** for accurate historical data
|
434 |
+
3. **Use alternative APIs** for general weather context
|
435 |
+
4. **Contact NIWA directly** for research-grade real-time access
|
436 |
+
""")
|
437 |
|
438 |
+
# Event handlers
|
439 |
+
analyze_btn.click(
|
440 |
+
fn=fetch_and_analyze_station,
|
441 |
inputs=[station_dropdown],
|
442 |
+
outputs=[chart_image, extracted_info, extracted_data_store, analysis_status]
|
443 |
)
|
444 |
|
445 |
+
analyze_all_btn.click(
|
446 |
+
fn=analyze_all_stations,
|
447 |
+
outputs=[all_images, stations_summary]
|
448 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
449 |
|
450 |
+
test_alternatives_btn.click(
|
451 |
+
fn=try_alternative_nz_weather_apis,
|
452 |
+
outputs=[alternative_results]
|
453 |
)
|
454 |
|
455 |
# Launch for HuggingFace Spaces
|
|
|
467 |
opencv-python>=4.5.0
|
468 |
"""
|
469 |
|
470 |
+
# Practical README.md:
|
471 |
"""
|
472 |
---
|
473 |
+
title: NZ Snow Data - Chart Extraction & Alternatives
|
474 |
emoji: ποΈ
|
475 |
colorFrom: blue
|
476 |
colorTo: white
|
|
|
480 |
pinned: false
|
481 |
---
|
482 |
|
483 |
+
# New Zealand Snow Data: Practical Solutions
|
|
|
|
|
484 |
|
485 |
+
**Real solutions for accessing NZ alpine snow depth data when APIs require complex authentication.**
|
486 |
|
487 |
+
## π― What This App Does
|
|
|
|
|
|
|
|
|
488 |
|
489 |
**Chart Data Extraction:**
|
490 |
+
- Computer vision analysis of NIWA snow depth charts
|
491 |
+
- Extracts approximate numerical values (Β±20-30cm accuracy)
|
492 |
+
- Provides trends and current estimates for 5 major stations
|
493 |
+
|
494 |
+
**Alternative Data Sources:**
|
495 |
+
- Tests other weather APIs with New Zealand coverage
|
496 |
+
- Identifies services that provide snow data for NZ coordinates
|
497 |
+
- Recommends practical alternatives to NIWA DataHub
|
498 |
|
499 |
+
**Practical Access Guide:**
|
500 |
+
- Multiple approaches from quick estimates to research-grade data
|
501 |
+
- Clear instructions for each data source type
|
502 |
+
- Realistic expectations about accuracy and access
|
503 |
|
504 |
+
## ποΈ Stations Covered
|
505 |
+
- Mueller Hut EWS (1818m) - Mount Cook National Park
|
506 |
+
- Mt Potts EWS (2128m) - Highest elevation station
|
507 |
+
- Mahanga EWS (1940m) - Tasman region
|
508 |
+
- Upper Rakaia EWS (1752m) - Canterbury
|
509 |
+
- Albert Burn EWS (1280m) - Mt Aspiring region
|
510 |
|
511 |
+
## π§ Use Cases
|
512 |
+
- Avalanche safety planning
|
513 |
+
- Alpine recreation planning
|
514 |
+
- Research proof-of-concept
|
515 |
+
- Climate monitoring
|
516 |
+
- Water resource assessment
|
517 |
|
518 |
+
Perfect when you need NZ snow data but can't navigate complex authentication systems!
|
519 |
"""
|