weather-geo-era5 / weather_geo_utils.py
NaaVrug's picture
Add Python utilities and usage examples
91899e8
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Weather Geo ERA5 Dataset Utilities
Helper functions for working with the geographically partitioned ERA5 weather dataset.
This module provides convenient access patterns for the HuggingFace hosted dataset.
Dataset: https://huggingface.co/datasets/NaaVrug/weather-geo-era5
License: CC-BY-4.0
Attribution: Contains modified Copernicus Climate Change Service information 2024
"""
import pandas as pd
from pathlib import Path
from typing import List, Tuple, Optional, Union
from datetime import datetime, date
import warnings
try:
from huggingface_hub import hf_hub_download
HF_AVAILABLE = True
except ImportError:
HF_AVAILABLE = False
warnings.warn("huggingface_hub not available. Install with: pip install huggingface-hub")
# Dataset configuration
REPO_ID = "NaaVrug/weather-geo-era5"
REPO_TYPE = "dataset"
# Geographic constants (30° lat × 45° lon tiles)
LAT_BANDS = [
("m90_m60", -90, -60), # Antarctic
("m60_m30", -60, -30), # Southern mid-latitudes
("m30_p00", -30, 0), # Southern tropics
("p00_p30", 0, 30), # Northern tropics
("p30_p60", 30, 60), # Northern mid-latitudes
("p60_p90", 60, 90), # Arctic
]
LON_BANDS = [
("000_045", 0, 45), # Europe/Africa West
("045_090", 45, 90), # Middle East/Central Asia
("090_135", 90, 135), # East Asia
("135_180", 135, 180), # Western Pacific
("180_225", 180, 225), # Central Pacific
("225_270", 225, 270), # Eastern Pacific
("270_315", 270, 315), # Americas West
("315_360", 315, 360), # Americas East/Atlantic
]
def get_tile_name(lat: float, lon: float) -> str:
"""
Get the tile filename for given coordinates.
Args:
lat: Latitude in degrees (-90 to 90)
lon: Longitude in degrees (-180 to 180 or 0 to 360)
Returns:
Tile filename (e.g., "lat_p30_p60__lon_000_045.parquet")
Examples:
>>> get_tile_name(48.8566, 2.3522) # Paris
'lat_p30_p60__lon_000_045.parquet'
>>> get_tile_name(40.7128, -74.0060) # NYC
'lat_p30_p60__lon_270_315.parquet'
>>> get_tile_name(-33.8688, 151.2093) # Sydney
'lat_m60_m30__lon_135_180.parquet'
"""
# Validate inputs
if not -90 <= lat <= 90:
raise ValueError(f"Latitude must be between -90 and 90, got {lat}")
# Normalize longitude to 0-360 range
lon = lon % 360
# Find latitude band
lat_band = None
for band_name, lat_min, lat_max in LAT_BANDS:
if lat_min <= lat < lat_max or (lat == 90 and lat_max == 90):
lat_band = band_name
break
if lat_band is None:
raise ValueError(f"Could not determine latitude band for {lat}")
# Find longitude band
lon_band = None
for band_name, lon_min, lon_max in LON_BANDS:
if lon_min <= lon < lon_max or (lon == 360 and lon_max == 360):
lon_band = band_name
break
if lon_band is None:
raise ValueError(f"Could not determine longitude band for {lon}")
return f"lat_{lat_band}__lon_{lon_band}.parquet"
def get_tiles_for_region(lat_min: float, lat_max: float,
lon_min: float, lon_max: float) -> List[str]:
"""
Get all tile names that overlap with a rectangular region.
Args:
lat_min, lat_max: Latitude bounds
lon_min, lon_max: Longitude bounds
Returns:
List of tile filenames
Example:
>>> # Europe region
>>> tiles = get_tiles_for_region(35, 70, -10, 40)
>>> print(len(tiles)) # Multiple tiles covering Europe
"""
# Normalize longitude
lon_min = lon_min % 360
lon_max = lon_max % 360
tiles = set()
# Handle longitude wrap-around
if lon_min > lon_max:
# Region crosses 0° longitude
for lat_band_name, lat_band_min, lat_band_max in LAT_BANDS:
if not (lat_max < lat_band_min or lat_min >= lat_band_max):
for lon_band_name, lon_band_min, lon_band_max in LON_BANDS:
if lon_band_min >= lon_min or lon_band_max <= lon_max:
tiles.add(f"lat_{lat_band_name}__lon_{lon_band_name}.parquet")
else:
# Normal case
for lat_band_name, lat_band_min, lat_band_max in LAT_BANDS:
if not (lat_max < lat_band_min or lat_min >= lat_band_max):
for lon_band_name, lon_band_min, lon_band_max in LON_BANDS:
if not (lon_max < lon_band_min or lon_min >= lon_band_max):
tiles.add(f"lat_{lat_band_name}__lon_{lon_band_name}.parquet")
return sorted(list(tiles))
def load_point_data(lat: float, lon: float,
start_date: Optional[Union[str, date, datetime]] = None,
end_date: Optional[Union[str, date, datetime]] = None,
cache_dir: Optional[str] = None) -> pd.DataFrame:
"""
Load weather data for a specific point.
Args:
lat: Latitude in degrees
lon: Longitude in degrees
start_date: Start date (YYYY-MM-DD or datetime object)
end_date: End date (YYYY-MM-DD or datetime object)
cache_dir: Local cache directory for downloaded files
Returns:
DataFrame with weather data for the point
Example:
>>> # Load 2020 data for Paris
>>> df = load_point_data(48.8566, 2.3522, "2020-01-01", "2020-12-31")
>>> print(f"Loaded {len(df)} records")
"""
if not HF_AVAILABLE:
raise ImportError("huggingface_hub required. Install with: pip install huggingface-hub")
# Get tile name
tile_name = get_tile_name(lat, lon)
# Download tile
file_path = hf_hub_download(
repo_id=REPO_ID,
filename=f"tiles/{tile_name}",
repo_type=REPO_TYPE,
cache_dir=cache_dir
)
# Load data
df = pd.read_parquet(file_path)
# Filter for approximate coordinates (within 0.25° grid)
lat_tolerance = 0.125 # Half grid resolution
lon_tolerance = 0.125
df = df[
(df['latitude'].between(lat - lat_tolerance, lat + lat_tolerance)) &
(df['longitude'].between(lon - lon_tolerance, lon + lon_tolerance))
]
# Filter by date range if specified
if start_date is not None:
df = df[df['time'] >= pd.to_datetime(start_date)]
if end_date is not None:
df = df[df['time'] <= pd.to_datetime(end_date)]
return df.sort_values('time').reset_index(drop=True)
def load_region_data(lat_min: float, lat_max: float,
lon_min: float, lon_max: float,
start_date: Optional[Union[str, date, datetime]] = None,
end_date: Optional[Union[str, date, datetime]] = None,
cache_dir: Optional[str] = None) -> pd.DataFrame:
"""
Load weather data for a rectangular region.
Args:
lat_min, lat_max: Latitude bounds
lon_min, lon_max: Longitude bounds
start_date: Start date filter
end_date: End date filter
cache_dir: Local cache directory
Returns:
Combined DataFrame for the region
Example:
>>> # Load data for a region around Paris
>>> df = load_region_data(48, 49, 2, 3, "2023-01-01", "2023-12-31")
"""
if not HF_AVAILABLE:
raise ImportError("huggingface_hub required. Install with: pip install huggingface-hub")
# Get all tiles covering the region
tiles = get_tiles_for_region(lat_min, lat_max, lon_min, lon_max)
if not tiles:
raise ValueError(f"No tiles found for region: lat[{lat_min}, {lat_max}], lon[{lon_min}, {lon_max}]")
print(f"Loading {len(tiles)} tiles for region...")
dfs = []
for tile_name in tiles:
print(f" Loading {tile_name}...")
# Download tile
file_path = hf_hub_download(
repo_id=REPO_ID,
filename=f"tiles/{tile_name}",
repo_type=REPO_TYPE,
cache_dir=cache_dir
)
# Load and filter data
df = pd.read_parquet(file_path)
# Filter by geographic bounds
df = df[
(df['latitude'].between(lat_min, lat_max)) &
(df['longitude'].between(lon_min, lon_max))
]
# Filter by date range if specified
if start_date is not None:
df = df[df['time'] >= pd.to_datetime(start_date)]
if end_date is not None:
df = df[df['time'] <= pd.to_datetime(end_date)]
if len(df) > 0:
dfs.append(df)
if not dfs:
raise ValueError("No data found in the specified region and time range")
# Combine all data
result = pd.concat(dfs, ignore_index=True)
return result.sort_values(['time', 'latitude', 'longitude']).reset_index(drop=True)
def load_cities_data(cities: List[Tuple[float, float, str]],
start_date: Optional[Union[str, date, datetime]] = None,
end_date: Optional[Union[str, date, datetime]] = None,
cache_dir: Optional[str] = None) -> pd.DataFrame:
"""
Load weather data for multiple cities.
Args:
cities: List of (lat, lon, name) tuples
start_date: Start date filter
end_date: End date filter
cache_dir: Local cache directory
Returns:
DataFrame with data for all cities, including 'city' column
Example:
>>> cities = [
... (48.8566, 2.3522, "Paris"),
... (51.5074, -0.1278, "London"),
... (52.5200, 13.4050, "Berlin"),
... ]
>>> df = load_cities_data(cities, "2023-01-01", "2023-12-31")
"""
if not HF_AVAILABLE:
raise ImportError("huggingface_hub required. Install with: pip install huggingface-hub")
dfs = []
for lat, lon, city_name in cities:
print(f"Loading data for {city_name}...")
try:
df = load_point_data(lat, lon, start_date, end_date, cache_dir)
df['city'] = city_name
dfs.append(df)
except Exception as e:
print(f" Warning: Failed to load data for {city_name}: {e}")
if not dfs:
raise ValueError("No data loaded for any cities")
return pd.concat(dfs, ignore_index=True)
def convert_temperature_units(df: pd.DataFrame,
from_unit: str = 'K',
to_unit: str = 'C') -> pd.DataFrame:
"""
Convert temperature units in the dataset.
Args:
df: DataFrame with temperature columns
from_unit: Source unit ('K', 'C', 'F')
to_unit: Target unit ('K', 'C', 'F')
Returns:
DataFrame with converted temperatures
"""
df = df.copy()
temp_columns = ['t2m', 'd2m']
for col in temp_columns:
if col in df.columns:
# Convert to Kelvin first
if from_unit == 'C':
temps_k = df[col] + 273.15
elif from_unit == 'F':
temps_k = (df[col] - 32) * 5/9 + 273.15
else: # Assume Kelvin
temps_k = df[col]
# Convert from Kelvin to target
if to_unit == 'C':
df[col] = temps_k - 273.15
elif to_unit == 'F':
df[col] = (temps_k - 273.15) * 9/5 + 32
else: # Keep Kelvin
df[col] = temps_k
return df
def get_dataset_info() -> dict:
"""Get information about the dataset."""
return {
"name": "Weather Geo ERA5 Dataset",
"repo_id": REPO_ID,
"url": f"https://huggingface.co/datasets/{REPO_ID}",
"license": "CC-BY-4.0",
"time_range": "1940-2024",
"resolution": "0.25° x 0.25°",
"total_tiles": len(LAT_BANDS) * len(LON_BANDS),
"variables": ["t2m", "tp", "d2m", "msl", "u10", "v10"],
"attribution": "Contains modified Copernicus Climate Change Service information 2024"
}
# Example usage
if __name__ == "__main__":
# Print dataset info
info = get_dataset_info()
print("Weather Geo ERA5 Dataset")
print("=" * 40)
for key, value in info.items():
print(f"{key}: {value}")
print("\nExample tile names:")
cities = [
(48.8566, 2.3522, "Paris"),
(40.7128, -74.0060, "New York"),
(-33.8688, 151.2093, "Sydney"),
(35.6762, 139.6503, "Tokyo"),
]
for lat, lon, name in cities:
tile = get_tile_name(lat, lon)
print(f" {name} ({lat:.4f}, {lon:.4f}): {tile}")