diff --git a/Dockerfile b/Dockerfile index 06231e5426..835e2b7346 100644 --- a/Dockerfile +++ b/Dockerfile @@ -32,6 +32,8 @@ COPY ./backend/uv.lock /app/ COPY ./backend/packages/wps-api/pyproject.toml /app/packages/wps-api/ COPY ./backend/packages/wps-shared/pyproject.toml /app/packages/wps-shared/ COPY ./backend/packages/wps-shared/src /app/packages/wps-shared/src +COPY ./backend/packages/wps-wf1/pyproject.toml /app/packages/wps-wf1/ +COPY ./backend/packages/wps-wf1/src /app/packages/wps-wf1/src # Switch to root to set file permissions USER 0 @@ -39,8 +41,10 @@ USER 0 # Set configuration files to read-only for security RUN chmod 444 /app/pyproject.toml /app/uv.lock \ /app/packages/wps-api/pyproject.toml \ - /app/packages/wps-shared/pyproject.toml -RUN chmod -R a-w /app/packages/wps-shared/src + /app/packages/wps-shared/pyproject.toml \ + /app/packages/wps-wf1/pyproject.toml +RUN chmod -R a-w /app/packages/wps-shared/src \ + /app/packages/wps-wf1/src # Switch back to non-root user USER $USERNAME @@ -77,6 +81,7 @@ WORKDIR /app COPY --from=builder /app/pyproject.toml /app/ COPY --from=builder /app/packages/wps-api/pyproject.toml /app/packages/wps-api/ COPY --from=builder /app/packages/wps-shared/pyproject.toml /app/packages/wps-shared/ +COPY --from=builder /app/packages/wps-wf1/pyproject.toml /app/packages/wps-wf1/ # Switch back to our non-root user USER $USERNAME @@ -96,8 +101,9 @@ COPY ./backend/packages/wps-api/alembic.ini /app COPY ./backend/packages/wps-api/prestart.sh /app COPY ./backend/packages/wps-api/start.sh /app -# Make uv happy by copying wps_shared +# Make uv happy by copying wps_shared and wps_wf1 COPY ./backend/packages/wps-shared/src /app/packages/wps-shared/src +COPY ./backend/packages/wps-wf1/src /app/packages/wps-wf1/src # Copy installed Python packages COPY --from=builder /app/.venv /app/.venv @@ -115,7 +121,9 @@ ENV VIRTUAL_ENV="/app/.venv" # root user please USER 0 # Remove write permissions from copied configuration and source files for security -RUN chmod -R a-w /app/pyproject.toml /app/packages/wps-api/pyproject.toml /app/advisory /app/libs /app/alembic /app/alembic.ini /app/prestart.sh /app/start.sh /app/packages/wps-shared/src +RUN chmod -R a-w /app/pyproject.toml /app/packages/wps-api/pyproject.toml /app/advisory /app/libs \ + /app/alembic /app/alembic.ini /app/prestart.sh /app/start.sh /app/packages/wps-shared/src \ + /app/packages/wps-wf1/src # We don't know what user uv is going to run as, so we give everyone write access directories # in the app folder. We need write access for .pyc files to be created. .pyc files are good, # they speed up python. diff --git a/Dockerfile.jobs b/Dockerfile.jobs index 67a404052e..95e0683010 100644 --- a/Dockerfile.jobs +++ b/Dockerfile.jobs @@ -30,6 +30,8 @@ COPY ./backend/uv.lock /app/ COPY ./backend/packages/wps-jobs/pyproject.toml /app/packages/wps-jobs/ COPY ./backend/packages/wps-shared/pyproject.toml /app/packages/wps-shared/ COPY ./backend/packages/wps-shared/src /app/packages/wps-shared/src +COPY ./backend/packages/wps-wf1/pyproject.toml /app/packages/wps-wf1/ +COPY ./backend/packages/wps-wf1/src /app/packages/wps-wf1/src # Switch to root to set file permissions USER 0 @@ -37,8 +39,10 @@ USER 0 # Set configuration files to read-only for security RUN chmod 444 /app/pyproject.toml /app/uv.lock \ /app/packages/wps-jobs/pyproject.toml \ - /app/packages/wps-shared/pyproject.toml -RUN chmod -R a-w /app/packages/wps-shared/src + /app/packages/wps-shared/pyproject.toml \ + /app/packages/wps-wf1/pyproject.toml +RUN chmod -R a-w /app/packages/wps-shared/src \ + /app/packages/wps-wf1/src # Switch back to non-root user USER $USERNAME @@ -75,6 +79,7 @@ WORKDIR /app COPY --from=builder /app/pyproject.toml /app/ COPY --from=builder /app/packages/wps-jobs/pyproject.toml /app/packages/wps-jobs/ COPY --from=builder /app/packages/wps-shared/pyproject.toml /app/packages/wps-shared/ +COPY --from=builder /app/packages/wps-wf1/pyproject.toml /app/packages/wps-wf1/ # Switch back to our non-root user USER $USERNAME @@ -82,6 +87,7 @@ USER $USERNAME # Copy the jobs from src layout: COPY ./backend/packages/wps-jobs/src /app COPY ./backend/packages/wps-shared/src /app/packages/wps-shared/src +COPY ./backend/packages/wps-wf1/src /app/packages/wps-wf1/src # Copy installed Python packages COPY --from=builder /app/.venv /app/.venv @@ -96,7 +102,8 @@ USER 0 # Create writable data directory for library caches (e.g., herbie BallTree) RUN mkdir -p /data && chmod 777 /data # Remove write permissions from copied configuration and source files for security -RUN chmod -R a-w /app/pyproject.toml /app/packages/wps-jobs/pyproject.toml /app/weather_model_jobs /app/packages/wps-shared/src +RUN chmod -R a-w /app/pyproject.toml /app/packages/wps-jobs/pyproject.toml /app/weather_model_jobs \ + /app/packages/wps-shared/src /app/packages/wps-wf1/src # We don't know what user uv is going to run as, so we give everyone write access directories # in the app folder. We need write access for .pyc files to be created. .pyc files are good, # they speed up python. diff --git a/backend/packages/wps-api/pyproject.toml b/backend/packages/wps-api/pyproject.toml index 5b73f2ed46..3e46c7b873 100644 --- a/backend/packages/wps-api/pyproject.toml +++ b/backend/packages/wps-api/pyproject.toml @@ -47,6 +47,7 @@ dependencies = [ "geopandas>=1.0.1,<2", "shapely>=2.0.5,<3", "gdal==3.9.2", + "wps-wf1" ] [project.optional-dependencies] @@ -62,6 +63,7 @@ dev = [ [tool.uv.sources] wps-shared = { workspace = true } +wps-wf1 = { workspace = true } cffdrs = { git = "https://github.com/cffdrs/cffdrs_py.git", rev = "c760307" } [tool.hatch.metadata] diff --git a/backend/packages/wps-api/src/app/auto_spatial_advisory/critical_hours.py b/backend/packages/wps-api/src/app/auto_spatial_advisory/critical_hours.py index cd0017aaa7..d5e2ad6c2b 100644 --- a/backend/packages/wps-api/src/app/auto_spatial_advisory/critical_hours.py +++ b/backend/packages/wps-api/src/app/auto_spatial_advisory/critical_hours.py @@ -16,12 +16,6 @@ from pydantic_core import to_jsonable_python from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession - -from app.auto_spatial_advisory.debug_critical_hours import get_critical_hours_json_from_s3 -from app.auto_spatial_advisory.fuel_type_layer import get_fuel_type_raster_by_year -from app.fire_behaviour import cffdrs -from app.fire_behaviour.prediction import build_hourly_rh_dict, calculate_cfb, get_critical_hours -from app.hourlies import get_hourly_readings_in_time_interval from wps_shared.db.crud.auto_spatial_advisory import ( get_containing_zone, get_fuel_type_stats_in_advisory_area, @@ -30,6 +24,7 @@ get_run_parameters_id, save_all_critical_hours, ) +from wps_shared.db.crud.hfi_calc import get_fire_centre_station_codes from wps_shared.db.database import get_async_write_session_scope from wps_shared.db.models.auto_spatial_advisory import ( AdvisoryFuelStats, @@ -41,13 +36,19 @@ from wps_shared.geospatial.geospatial import PointTransformer from wps_shared.run_type import RunType from wps_shared.schemas.fba_calc import AdjustedFWIResult, CriticalHoursHFI -from wps_shared.schemas.observations import WeatherStationHourlyReadings from wps_shared.stations import get_stations_asynchronously -from wps_shared.utils.s3 import apply_retention_policy_on_date_folders, get_client +from wps_shared.utils.s3 import get_client from wps_shared.utils.time import get_hour_20_from_date, get_julian_date -from wps_shared.wildfire_one import wfwx_api -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api from wps_shared.wps_logging import configure_logging +from wps_wf1.models import WFWXWeatherStation, WeatherStationHourlyReadings +from wps_wf1.wfwx_api import WfwxApi + +from app.auto_spatial_advisory.debug_critical_hours import get_critical_hours_json_from_s3 +from app.auto_spatial_advisory.fuel_type_layer import get_fuel_type_raster_by_year +from app.fire_behaviour import cffdrs +from app.fire_behaviour.prediction import build_hourly_rh_dict, calculate_cfb, get_critical_hours +from app.hourlies import get_hourly_readings_in_time_interval logger = logging.getLogger(__name__) @@ -371,8 +372,7 @@ async def get_hourly_observations( async def get_dailies_by_station_id( - client_session: ClientSession, - header: dict, + wfwx_api: WfwxApi, wfwx_stations: List[WFWXWeatherStation], time_of_interest: datetime, ): @@ -386,7 +386,7 @@ async def get_dailies_by_station_id( :return: Daily observations or forecasts from WF1. """ dailies = await wfwx_api.get_dailies_generator( - client_session, header, wfwx_stations, time_of_interest, time_of_interest + wfwx_stations, time_of_interest, time_of_interest ) # turn it into a dictionary so we can easily get at data using a station id dailies_by_station_id = {raw_daily.get("stationId"): raw_daily async for raw_daily in dailies} @@ -417,7 +417,7 @@ def get_fuel_types_by_area( async def get_inputs_for_critical_hours( - for_date: date, header: dict, wfwx_stations: List[WFWXWeatherStation] + for_date: date, wfwx_api: WfwxApi, wfwx_stations: List[WFWXWeatherStation] ) -> CriticalHoursInputs: """ Retrieves the inputs required for computing critical hours based on the station list and for date @@ -427,35 +427,34 @@ async def get_inputs_for_critical_hours( :param wfwx_stations: list of stations to compute critical hours for :return: critical hours inputs """ - unique_station_codes = list(set(station.code for station in wfwx_stations)) + unique_station_codes = list({station.code for station in wfwx_stations}) time_of_interest = get_hour_20_from_date(for_date) # get the dailies for all the stations - async with ClientSession() as client_session: - dailies_by_station_id = await get_dailies_by_station_id( - client_session, header, wfwx_stations, time_of_interest - ) - # must retrieve the previous day's observed/forecasted FFMC value from WFWX - prev_day = time_of_interest - timedelta(days=1) - # get the "daily" data for the station for the previous day - yesterday_dailies_by_station_id = await get_dailies_by_station_id( - client_session, header, wfwx_stations, prev_day - ) - # get hourly observation history from our API (used for calculating morning diurnal FFMC) - hourly_observations_by_station_code = await get_hourly_observations( - unique_station_codes, time_of_interest - timedelta(days=4), time_of_interest - ) + dailies_by_station_id = await get_dailies_by_station_id( + wfwx_api, wfwx_stations, time_of_interest + ) + # must retrieve the previous day's observed/forecasted FFMC value from WFWX + prev_day = time_of_interest - timedelta(days=1) + # get the "daily" data for the station for the previous day + yesterday_dailies_by_station_id = await get_dailies_by_station_id( + wfwx_api, wfwx_stations, prev_day + ) + # get hourly observation history from our API (used for calculating morning diurnal FFMC) + hourly_observations_by_station_code = await get_hourly_observations( + unique_station_codes, time_of_interest - timedelta(days=4), time_of_interest + ) - return CriticalHoursInputs( - dailies_by_station_id=dailies_by_station_id, - yesterday_dailies_by_station_id=yesterday_dailies_by_station_id, - hourly_observations_by_station_code=hourly_observations_by_station_code, - ) + return CriticalHoursInputs( + dailies_by_station_id=dailies_by_station_id, + yesterday_dailies_by_station_id=yesterday_dailies_by_station_id, + hourly_observations_by_station_code=hourly_observations_by_station_code, + ) async def calculate_critical_hours_by_zone( db_session: AsyncSession, - header: dict, + wfwx_api: WfwxApi, stations_by_zone: Dict[int, List[WFWXWeatherStation]], run_parameters_id: int, for_date: date, @@ -479,7 +478,9 @@ async def calculate_critical_hours_by_zone( fuel_types_by_area = get_fuel_types_by_area(advisory_fuel_stats) wfwx_stations = stations_by_zone[zone_key] - critical_hours_inputs = await get_inputs_for_critical_hours(for_date, header, wfwx_stations) + critical_hours_inputs = await get_inputs_for_critical_hours( + for_date, wfwx_api, wfwx_stations + ) critical_hours_by_fuel_type = calculate_critical_hours_by_fuel_type( wfwx_stations, critical_hours_inputs, @@ -562,11 +563,12 @@ async def calculate_critical_hours(run_type: RunType, run_datetime: datetime, fo fuel_type_raster = await get_fuel_type_raster_by_year(db_session, for_date.year) async with ClientSession() as client_session: - header = await wfwx_api.get_auth_header(client_session) all_stations = await get_stations_asynchronously() - station_codes = list(station.code for station in all_stations) + station_codes = [station.code for station in all_stations] + fire_centre_station_codes = get_fire_centre_station_codes() + wfwx_api = create_wfwx_api(client_session) stations = await wfwx_api.get_wfwx_stations_from_station_codes( - client_session, header, station_codes + station_codes, fire_centre_station_codes ) stations_by_zone: Dict[int, List[WFWXWeatherStation]] = defaultdict(list) transformer = PointTransformer(4326, 3005) @@ -578,7 +580,7 @@ async def calculate_critical_hours(run_type: RunType, run_datetime: datetime, fo await calculate_critical_hours_by_zone( db_session, - header, + wfwx_api, stations_by_zone, run_parameters_id, for_date, diff --git a/backend/packages/wps-api/src/app/fire_behaviour/fwi_adjust.py b/backend/packages/wps-api/src/app/fire_behaviour/fwi_adjust.py index 06e83c74ab..14a2ec5c67 100644 --- a/backend/packages/wps-api/src/app/fire_behaviour/fwi_adjust.py +++ b/backend/packages/wps-api/src/app/fire_behaviour/fwi_adjust.py @@ -1,6 +1,7 @@ from datetime import datetime + +from wps_wf1.models import WFWXWeatherStation from app.fire_behaviour import cffdrs -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation from wps_shared.schemas.fba_calc import StationRequest, AdjustedFWIResult """ diff --git a/backend/packages/wps-api/src/app/fire_behaviour/prediction.py b/backend/packages/wps-api/src/app/fire_behaviour/prediction.py index 21ccd3cd13..4a5d5f4e1b 100644 --- a/backend/packages/wps-api/src/app/fire_behaviour/prediction.py +++ b/backend/packages/wps-api/src/app/fire_behaviour/prediction.py @@ -1,18 +1,19 @@ """Fire Behaviour Analysis Calculator Tool""" -from datetime import datetime -from enum import Enum +import logging import math import os +from datetime import datetime +from enum import Enum from typing import List -import logging + import pandas as pd +from app.fire_behaviour import c7b, cffdrs +from app.utils.singleton import Singleton from wps_shared.fuel_types import FuelTypeEnum, is_grass_fuel_type -from wps_shared.schemas.observations import WeatherReading from wps_shared.schemas.fba_calc import CriticalHoursHFI -from app.utils.singleton import Singleton -from app.fire_behaviour import cffdrs, c7b from wps_shared.utils.time import convert_utc_to_pdt, get_julian_date, get_julian_date_now +from wps_wf1.models import WeatherReading logger = logging.getLogger(__name__) @@ -42,13 +43,17 @@ class DiurnalFFMCLookupTable: """ def __init__(self): - afternoon_filename = os.path.join(os.path.dirname(__file__), "../data/diurnal_ffmc_lookups/afternoon_overnight.csv") + afternoon_filename = os.path.join( + os.path.dirname(__file__), "../data/diurnal_ffmc_lookups/afternoon_overnight.csv" + ) with open(afternoon_filename, "rb") as afternoon_file: afternoon_df = pd.read_csv(afternoon_file) afternoon_df.columns = afternoon_df.columns.astype(int) afternoon_df.set_index(17, inplace=True) - morning_filename = os.path.join(os.path.dirname(__file__), "../data/diurnal_ffmc_lookups/morning.csv") + morning_filename = os.path.join( + os.path.dirname(__file__), "../data/diurnal_ffmc_lookups/morning.csv" + ) with open(morning_filename, "rb") as morning_file: morning_df = pd.read_csv(morning_file, header=[0, 1]) prev_days_daily_ffmc_keys = morning_df.iloc[:, 0].values @@ -65,7 +70,9 @@ def __init__(self): rh_lookup_keys += [level_2] morning_df.set_index(prev_days_daily_ffmc_keys, inplace=True) - header = pd.MultiIndex.from_tuples(list(zip(hour_lookup_keys, rh_lookup_keys)), names=["hour", "RH"]) + header = pd.MultiIndex.from_tuples( + list(zip(hour_lookup_keys, rh_lookup_keys)), names=["hour", "RH"] + ) morning_df.columns = header morning_df.drop(columns=[("", "")], inplace=True) @@ -75,7 +82,14 @@ def __init__(self): def calculate_cfb(fuel_type: FuelTypeEnum, fmc: float, sfc: float, ros: float, cbh: float): """Calculate the crown fraction burned (returning 0 for fuel types without crowns to burn)""" - if fuel_type in [FuelTypeEnum.D1, FuelTypeEnum.O1A, FuelTypeEnum.O1B, FuelTypeEnum.S1, FuelTypeEnum.S2, FuelTypeEnum.S3]: + if fuel_type in [ + FuelTypeEnum.D1, + FuelTypeEnum.O1A, + FuelTypeEnum.O1B, + FuelTypeEnum.S1, + FuelTypeEnum.S2, + FuelTypeEnum.S3, + ]: # These fuel types don't have a crown fraction burnt. But CFB is needed for other calculations, # so we go with 0. cfb = 0 @@ -87,7 +101,14 @@ def calculate_cfb(fuel_type: FuelTypeEnum, fmc: float, sfc: float, ros: float, c return cfb -def get_fire_size(fuel_type: FuelTypeEnum, ros: float, bros: float, elapsed_minutes: int, cfb: float, lb_ratio: float): +def get_fire_size( + fuel_type: FuelTypeEnum, + ros: float, + bros: float, + elapsed_minutes: int, + cfb: float, + lb_ratio: float, +): """ Fire size based on Eq. 8 (Alexander, M.E. 1985. Estimating the length-to-breadth ratio of elliptical forest fire patterns.). @@ -96,7 +117,9 @@ def get_fire_size(fuel_type: FuelTypeEnum, ros: float, bros: float, elapsed_minu raise cffdrs.CFFDRSException() # Using acceleration: fire_spread_distance = cffdrs.fire_distance(fuel_type, ros + bros, elapsed_minutes, cfb) - length_to_breadth_at_time = cffdrs.length_to_breadth_ratio_t(fuel_type, lb_ratio, elapsed_minutes, cfb) + length_to_breadth_at_time = cffdrs.length_to_breadth_ratio_t( + fuel_type, lb_ratio, elapsed_minutes, cfb + ) # Not using acceleration: # fros = cffdrs.flank_rate_of_spread(ros, bros, lb_ratio) # # Flank Fire Spread Distance a.k.a. DF in R/FBPcalc.r @@ -106,7 +129,9 @@ def get_fire_size(fuel_type: FuelTypeEnum, ros: float, bros: float, elapsed_minu # Essentially using Eq. 8 (Alexander, M.E. 1985. Estimating the length-to-breadth ratio of elliptical # forest fire patterns.) - but feeding it L/B and ROS from CFFDRS. - return math.pi / (4.0 * length_to_breadth_at_time) * math.pow(fire_spread_distance, 2.0) / 10000.0 + return ( + math.pi / (4.0 * length_to_breadth_at_time) * math.pow(fire_spread_distance, 2.0) / 10000.0 + ) def get_fire_type(fuel_type: FuelTypeEnum, crown_fraction_burned: float) -> FireTypeEnum: @@ -182,7 +207,12 @@ def get_morning_diurnal_ffmc(hour_of_interest: int, prev_day_daily_ffmc: float, return None -def get_critical_hours_start(critical_ffmc: float, daily_ffmc: float, prev_day_daily_ffmc: float, last_observed_morning_rh_values: dict): +def get_critical_hours_start( + critical_ffmc: float, + daily_ffmc: float, + prev_day_daily_ffmc: float, + last_observed_morning_rh_values: dict, +): """Returns the hour of day (on 24H clock) at which the hourly FFMC crosses the threshold of critical_ffmc. Returns None if the hourly FFMC never reaches critical_ffmc. @@ -217,7 +247,9 @@ def get_critical_hours_start(critical_ffmc: float, daily_ffmc: float, prev_day_d return clock_time -def get_critical_hours_end(critical_ffmc: float, solar_noon_ffmc: float, critical_hour_start: float): +def get_critical_hours_end( + critical_ffmc: float, solar_noon_ffmc: float, critical_hour_start: float +): """Returns the hour of day (on 24H clock) at which the hourly FFMC drops below the threshold of critical_ffmc. Should only be called if critical_hour_start is not None. @@ -264,25 +296,53 @@ def get_critical_hours( that cause HFI >= target_hfi. """ critical_ffmc, resulting_hfi = cffdrs.get_ffmc_for_target_hfi( - fuel_type, percentage_conifer, percentage_dead_balsam_fir, bui, wind_speed, grass_cure, crown_base_height, daily_ffmc, fmc, cfb, cfl, target_hfi + fuel_type, + percentage_conifer, + percentage_dead_balsam_fir, + bui, + wind_speed, + grass_cure, + crown_base_height, + daily_ffmc, + fmc, + cfb, + cfl, + target_hfi, + ) + logger.debug( + "Critical FFMC %s, resulting HFI %s; target HFI %s", + critical_ffmc, + resulting_hfi, + target_hfi, ) - logger.debug("Critical FFMC %s, resulting HFI %s; target HFI %s", critical_ffmc, resulting_hfi, target_hfi) # Scenario 1 (resulting_hfi < target_hfi) - will happen when it's impossible to get # a HFI value large enough to >= target_hfi, because FFMC influences the HFI value, # and FFMC has an upper bound of 101. So basically, in this scenario the resulting_hfi # would equal the resulting HFI when FFMC is set to 101. if critical_ffmc >= 100.9 and resulting_hfi < target_hfi: - logger.debug("No critical hours for HFI %s. Critical FFMC %s has HFI %s", target_hfi, critical_ffmc, resulting_hfi) + logger.debug( + "No critical hours for HFI %s. Critical FFMC %s has HFI %s", + target_hfi, + critical_ffmc, + resulting_hfi, + ) return None # Scenario 2: the HFI is always >= target_hfi, even when FFMC = 0. In this case, all hours # of the day will be critical hours. if critical_ffmc == 0.0 and resulting_hfi >= target_hfi: - logger.info("All hours critical for HFI %s. FFMC %s has HFI %s", target_hfi, critical_ffmc, resulting_hfi) + logger.info( + "All hours critical for HFI %s. FFMC %s has HFI %s", + target_hfi, + critical_ffmc, + resulting_hfi, + ) return CriticalHoursHFI(start=13.0, end=7.0) # Scenario 3: there is a critical_ffmc between (0, 101) that corresponds to # resulting_hfi >= target_hfi. Now have to determine what hours of the day (if any) # will see hourly FFMC (adjusted according to diurnal curve) >= critical_ffmc. - critical_hours_start = get_critical_hours_start(critical_ffmc, daily_ffmc, prev_daily_ffmc, last_observed_morning_rh_values) + critical_hours_start = get_critical_hours_start( + critical_ffmc, daily_ffmc, prev_daily_ffmc, last_observed_morning_rh_values + ) if critical_hours_start is None: return None critical_hours_end = get_critical_hours_end(critical_ffmc, daily_ffmc, critical_hours_start) @@ -311,7 +371,9 @@ def build_hourly_rh_dict(hourly_observations: List[WeatherReading]): class FireBehaviourPrediction: """Structure for storing fire behaviour prediction data.""" - def __init__(self, ros: float, hfi: float, intensity_group, sixty_minute_fire_size: float, fire_type) -> None: + def __init__( + self, ros: float, hfi: float, intensity_group, sixty_minute_fire_size: float, fire_type + ) -> None: self.ros = ros self.hfi = hfi self.intensity_group = intensity_group @@ -362,7 +424,9 @@ def calculate_fire_behaviour_prediction_using_cffdrs( fmc = cffdrs.foliar_moisture_content(latitude, longitude, elevation, julian_date) sfc = cffdrs.surface_fuel_consumption(fuel_type, bui, ffmc, pc) - ros = cffdrs.rate_of_spread(FuelTypeEnum[fuel_type], isi, bui, fmc, sfc, pc=pc, cc=cc, pdf=pdf, cbh=cbh) + ros = cffdrs.rate_of_spread( + FuelTypeEnum[fuel_type], isi, bui, fmc, sfc, pc=pc, cc=cc, pdf=pdf, cbh=cbh + ) if sfc is not None: cfb = calculate_cfb(FuelTypeEnum[fuel_type], fmc, sfc, ros, cbh) @@ -443,7 +507,9 @@ def calculate_fire_behaviour_prediction_using_c7b( fmc = cffdrs.foliar_moisture_content(latitude, longitude, elevation, julian_date) sfc = cffdrs.surface_fuel_consumption(fuel_type=FuelTypeEnum.C7, bui=bui, ffmc=ffmc, pc=None) - cfb = cffdrs.crown_fraction_burned(fuel_type=FuelTypeEnum.C7, fmc=fmc, sfc=sfc, ros=ros, cbh=cbh) + cfb = cffdrs.crown_fraction_burned( + fuel_type=FuelTypeEnum.C7, fmc=fmc, sfc=sfc, ros=ros, cbh=cbh + ) hfi = cffdrs.head_fire_intensity( fuel_type=FuelTypeEnum.C7, @@ -490,7 +556,9 @@ def calculate_fire_behaviour_prediction( calculation_datetime: datetime = None, ): """Calculate the fire behaviour prediction.""" - julian_date = get_julian_date(calculation_datetime) if calculation_datetime else get_julian_date_now() + julian_date = ( + get_julian_date(calculation_datetime) if calculation_datetime else get_julian_date_now() + ) if wind_speed is None: raise FireBehaviourPredictionInputError("Wind speed must be specified") diff --git a/backend/packages/wps-api/src/app/fire_watch/calculate_weather.py b/backend/packages/wps-api/src/app/fire_watch/calculate_weather.py index b0a5f9c3c2..af7087bc72 100644 --- a/backend/packages/wps-api/src/app/fire_watch/calculate_weather.py +++ b/backend/packages/wps-api/src/app/fire_watch/calculate_weather.py @@ -8,12 +8,12 @@ ) from app.morecast_v2.forecasts import calculate_fwi_from_seed_indeterminates from sqlalchemy.ext.asyncio import AsyncSession - from wps_shared.db.crud.fire_watch import ( get_all_prescription_status, get_fire_watch_weather_by_fire_watch_id_and_model_run, get_fire_watches_missing_weather_for_run, ) +from wps_shared.db.crud.hfi_calc import get_fire_centre_station_codes from wps_shared.db.crud.weather_models import ( get_latest_daily_model_prediction_for_stations, get_latest_prediction_timestamp_id_for_model, @@ -21,16 +21,11 @@ from wps_shared.db.database import get_async_write_session_scope from wps_shared.db.models.fire_watch import FireWatch, FireWatchWeather from wps_shared.fuel_types import FUEL_TYPE_DEFAULTS -from wps_shared.schemas.morecast_v2 import WeatherDeterminate, WeatherIndeterminate from wps_shared.schemas.weather_models import ModelPredictionDetails from wps_shared.utils.time import assert_all_utc, get_utc_now from wps_shared.weather_models import ModelEnum -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation -from wps_shared.wildfire_one.wfwx_api import ( - get_auth_header, - get_daily_determinates_for_stations_and_date, - get_wfwx_stations_from_station_codes, -) +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api +from wps_wf1.models import WFWXWeatherStation, WeatherDeterminate, WeatherIndeterminate logger = logging.getLogger(__name__) @@ -320,8 +315,11 @@ def check_prescription_status( async def get_station_metadata(station_ids: list[int]) -> dict[int, WFWXWeatherStation]: """Fetch station metadata from the WFWX API.""" async with ClientSession() as session: - header = await get_auth_header(session) - wfwx_stations = await get_wfwx_stations_from_station_codes(session, header, station_ids) + fire_centre_station_codes = get_fire_centre_station_codes() + wfwx_api = create_wfwx_api(session) + wfwx_stations = await wfwx_api.get_wfwx_stations_from_station_codes( + station_ids, fire_centre_station_codes + ) return {station.code: station for station in wfwx_stations} @@ -330,9 +328,10 @@ async def get_actuals_and_forecasts( ) -> tuple[list[WeatherIndeterminate], list[WeatherIndeterminate]]: """Fetch actuals and forecasts from the WFWX API.""" async with ClientSession() as session: - header = await get_auth_header(session) - wf1_actuals, wf1_forecasts = await get_daily_determinates_for_stations_and_date( - session, header, start_date, end_date, station_ids + fire_centre_station_codes = get_fire_centre_station_codes() + wfwx_api = create_wfwx_api(session) + wf1_actuals, wf1_forecasts = await wfwx_api.get_daily_determinates_for_stations_and_date( + start_date, end_date, station_ids, fire_centre_station_codes ) return wf1_actuals, wf1_forecasts @@ -479,7 +478,7 @@ async def process_all_fire_watch_weather(): ) return - station_ids = set(fire_watch.station_code for fire_watch in fire_watches_to_process) + station_ids = {fire_watch.station_code for fire_watch in fire_watches_to_process} wfwx_station_map = await get_station_metadata(list(station_ids)) status_id_dict = await get_all_prescription_status(session) diff --git a/backend/packages/wps-api/src/app/forecasts/noon_forecasts.py b/backend/packages/wps-api/src/app/forecasts/noon_forecasts.py index 8fc2a5b591..d581003554 100644 --- a/backend/packages/wps-api/src/app/forecasts/noon_forecasts.py +++ b/backend/packages/wps-api/src/app/forecasts/noon_forecasts.py @@ -1,27 +1,30 @@ -""" This module is used to fetch noon forecasts for weather stations from +"""This module is used to fetch noon forecasts for weather stations from the noon_forecasts table in our database. """ + import logging +import math from collections import defaultdict -from typing import List from datetime import datetime, timezone -import math -from wps_shared.schemas.forecasts import NoonForecast, NoonForecastResponse, NoonForecastValue -from wps_shared.schemas.stations import StationCodeList +from typing import List + import wps_shared.db.database -from wps_shared.db.crud.forecasts import query_noon_forecast_records import wps_shared.db.models - +from wps_shared.db.crud.forecasts import query_noon_forecast_records +from wps_shared.schemas.forecasts import NoonForecast, NoonForecastResponse, NoonForecastValue +from wps_wf1.models import StationCodeList logger = logging.getLogger(__name__) class StationNotFoundException(Exception): - """ Custom exception for when a station cannot be found """ + """Custom exception for when a station cannot be found""" -def parse_table_records_to_noon_forecast_response(data: List[wps_shared.db.models.forecasts.NoonForecast]): - """ Given a list of table records from the database, parse each record +def parse_table_records_to_noon_forecast_response( + data: List[wps_shared.db.models.forecasts.NoonForecast], +): + """Given a list of table records from the database, parse each record (which is a NoonForecast object) and structure it as a NoonForecast object, then return the list of NoonForecast objects as a NoonForecastResponse """ @@ -35,14 +38,16 @@ def parse_table_records_to_noon_forecast_response(data: List[wps_shared.db.model month=record.weather_date.month, day=record.weather_date.day, hour=record.weather_date.hour, - tzinfo=timezone.utc).isoformat(), + tzinfo=timezone.utc, + ).isoformat(), temp_valid=record.temp_valid, temperature=None if math.isnan(record.temperature) else record.temperature, rh_valid=record.rh_valid, - relative_humidity=None if math.isnan(record.relative_humidity) else record.relative_humidity, + relative_humidity=None + if math.isnan(record.relative_humidity) + else record.relative_humidity, wdir_valid=record.wdir_valid, - wind_direction=None if math.isnan( - record.wind_direction) else record.wind_direction, + wind_direction=None if math.isnan(record.wind_direction) else record.wind_direction, wspeed_valid=record.wspeed_valid, wind_speed=record.wind_speed, precip_valid=record.precip_valid, @@ -55,30 +60,27 @@ def parse_table_records_to_noon_forecast_response(data: List[wps_shared.db.model bui=None if math.isnan(record.bui) else record.bui, fwi=None if math.isnan(record.fwi) else record.fwi, danger_rating=None, - created_at=record.created_at + created_at=record.created_at, ) noon_forecasts[station_code].append(noon_forecast_value) values = [] for key, value in noon_forecasts.items(): - noon_forecast = NoonForecast( - station_code=key, - values=value - ) + noon_forecast = NoonForecast(station_code=key, values=value) values.append(noon_forecast) return NoonForecastResponse(noon_forecasts=values) -def fetch_noon_forecasts(stations: StationCodeList, - start_date: datetime, - end_date: datetime) -> NoonForecastResponse: - """ Query all noon forecasts between start_date and end_date for the specified weather station. Note that +def fetch_noon_forecasts( + stations: StationCodeList, start_date: datetime, end_date: datetime +) -> NoonForecastResponse: + """Query all noon forecasts between start_date and end_date for the specified weather station. Note that there may be multiple records for the same weather station and same weather_date, as noon forecasts - are updated twice daily. """ - logger.debug('Querying noon forecasts for stations %s from %s to %s', - stations, start_date, end_date) + are updated twice daily.""" + logger.debug( + "Querying noon forecasts for stations %s from %s to %s", stations, start_date, end_date + ) with wps_shared.db.database.get_read_session_scope() as session: - forecasts = query_noon_forecast_records( - session, stations, start_date, end_date) + forecasts = query_noon_forecast_records(session, stations, start_date, end_date) return parse_table_records_to_noon_forecast_response(forecasts) diff --git a/backend/packages/wps-api/src/app/forecasts/noon_forecasts_summaries.py b/backend/packages/wps-api/src/app/forecasts/noon_forecasts_summaries.py index 95f46977e3..2cb3944ff8 100644 --- a/backend/packages/wps-api/src/app/forecasts/noon_forecasts_summaries.py +++ b/backend/packages/wps-api/src/app/forecasts/noon_forecasts_summaries.py @@ -1,22 +1,27 @@ -""" This module is used to fetch noon forecasts summaries with minimum and maximum values for each day """ +"""This module is used to fetch noon forecasts summaries with minimum and maximum values for each day""" import json import logging from collections import defaultdict from datetime import datetime -import wps_shared.stations + import wps_shared.db.database +import wps_shared.stations from wps_shared.db.crud.forecasts import query_noon_forecast_records -from wps_shared.schemas.forecasts import NoonForecastSummariesResponse, NoonForecastSummary, NoonForecastSummaryValues -from wps_shared.schemas.stations import WeatherStation, StationCodeList +from wps_shared.schemas.forecasts import ( + NoonForecastSummariesResponse, + NoonForecastSummary, + NoonForecastSummaryValues, +) +from wps_wf1.models import StationCodeList, WeatherStation logger = logging.getLogger(__name__) -def create_noon_forecast_summary(station: WeatherStation, - records_by_station: dict - ) -> NoonForecastSummary: - """ Returns NoonForecastSummary with min and max for each day """ +def create_noon_forecast_summary( + station: WeatherStation, records_by_station: dict +) -> NoonForecastSummary: + """Returns NoonForecastSummary with min and max for each day""" summary = NoonForecastSummary(station=station) records_for_one_station = records_by_station[station.code] @@ -27,33 +32,31 @@ def create_noon_forecast_summary(station: WeatherStation, for record in records_for_one_station: date = record.weather_date.isoformat() - nested_dict[date]['temp'].append(record.temperature) - nested_dict[date]['rh'].append(record.relative_humidity) + nested_dict[date]["temp"].append(record.temperature) + nested_dict[date]["rh"].append(record.relative_humidity) logger.debug(json.dumps(nested_dict, sort_keys=True, indent=4)) for date in nested_dict: min_max_values = NoonForecastSummaryValues( datetime=date, - tmp_min=min(nested_dict[date]['temp']), - tmp_max=max(nested_dict[date]['temp']), - rh_min=min(nested_dict[date]['rh']), - rh_max=max(nested_dict[date]['rh']), + tmp_min=min(nested_dict[date]["temp"]), + tmp_max=max(nested_dict[date]["temp"]), + rh_min=min(nested_dict[date]["rh"]), + rh_max=max(nested_dict[date]["rh"]), ) summary.values.append(min_max_values) return summary -async def fetch_noon_forecasts_summaries(station_codes: StationCodeList, - start_date: datetime, - end_date: datetime - ) -> NoonForecastSummariesResponse: - """ Fetch noon forecasts from the database and parse them, - then calculate min&max and put them in NoonForecastSummariesResponse """ +async def fetch_noon_forecasts_summaries( + station_codes: StationCodeList, start_date: datetime, end_date: datetime +) -> NoonForecastSummariesResponse: + """Fetch noon forecasts from the database and parse them, + then calculate min&max and put them in NoonForecastSummariesResponse""" with wps_shared.db.database.get_read_session_scope() as session: - records = query_noon_forecast_records( - session, station_codes, start_date, end_date) + records = query_noon_forecast_records(session, station_codes, start_date, end_date) records_by_station = defaultdict(list) for record in records: diff --git a/backend/packages/wps-api/src/app/hfi/hfi_calc.py b/backend/packages/wps-api/src/app/hfi/hfi_calc.py index 6c9dcb892c..556272d0ca 100644 --- a/backend/packages/wps-api/src/app/hfi/hfi_calc.py +++ b/backend/packages/wps-api/src/app/hfi/hfi_calc.py @@ -1,43 +1,59 @@ """HFI calculation logic""" -import math import logging -from typing import Optional, List, Dict, Set, Tuple +import math +from collections import defaultdict from datetime import date, datetime, timedelta, timezone +from itertools import groupby from statistics import mean +from typing import Dict, List, Optional, Set, Tuple + +import wps_shared.utils.time from aiohttp.client import ClientSession -from collections import defaultdict -from itertools import groupby from sqlalchemy.orm import Session -import app +from wps_shared.db.crud.hfi_calc import ( + get_fire_centre_fire_start_ranges, + get_fire_centre_station_codes, + get_fire_start_lookup, + get_fire_weather_stations, + get_fuel_types, +) from wps_shared.db.database import get_read_session_scope -from wps_shared.db.models.hfi_calc import FuelType as FuelTypeModel, PlanningWeatherStation -from app.fire_behaviour.cffdrs import CFFDRSException -from app.fire_behaviour.prediction import FireBehaviourPredictionInputError, calculate_fire_behaviour_prediction, FireBehaviourPrediction +from wps_shared.db.models.hfi_calc import FuelType as FuelTypeModel +from wps_shared.db.models.hfi_calc import PlanningWeatherStation +from wps_shared.fuel_types import FUEL_TYPE_DEFAULTS, FuelTypeEnum from wps_shared.schemas.hfi_calc import ( DailyResult, DateRange, + FireCentre, FireStartRange, HFIResultRequest, + PlanningArea, PlanningAreaResult, StationDaily, StationInfo, ValidatedStationDaily, + WeatherStation, + WeatherStationProperties, required_daily_fields, ) -from wps_shared.schemas.hfi_calc import WeatherStationProperties, FuelType as FuelTypeSchema, FireCentre, PlanningArea, WeatherStation -from wps_shared.fuel_types import FUEL_TYPE_DEFAULTS, FuelTypeEnum -from wps_shared.schemas.stations import WeatherStation as WFWXWeatherStationDetails from wps_shared.utils.time import get_hour_20_from_date, get_pst_now -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation -from wps_shared.wildfire_one.wfwx_api import get_auth_header, get_stations_by_codes, get_wfwx_stations_from_station_codes, get_raw_dailies_in_range_generator -from wps_shared.db.crud.hfi_calc import get_fire_weather_stations, get_fire_centre_fire_start_ranges, get_fire_start_lookup, get_fuel_types -import wps_shared.utils.time +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api +from wps_wf1.models import WFWXWeatherStation, WeatherStation as WFWXWeatherStationDetails + +from app.fire_behaviour.cffdrs import CFFDRSException +from app.fire_behaviour.prediction import ( + FireBehaviourPrediction, + FireBehaviourPredictionInputError, + calculate_fire_behaviour_prediction, +) logger = logging.getLogger(__name__) -def generate_station_daily(raw_daily: dict, station: WFWXWeatherStation, fuel_type: FuelTypeModel) -> StationDaily: +def generate_station_daily( + raw_daily: dict, station: WFWXWeatherStation, fuel_type: FuelTypeModel +) -> StationDaily: """Transform from the raw daily json object returned by wf1, to our daily object.""" pc = fuel_type.percentage_conifer pdf = fuel_type.percentage_dead_fir @@ -67,7 +83,9 @@ def generate_station_daily(raw_daily: dict, station: WFWXWeatherStation, fuel_ty cfl=cfl, ) except (FireBehaviourPredictionInputError, CFFDRSException) as error: - logger.info("Error calculating fire behaviour prediction for station %s : %s", station.code, error) + logger.info( + "Error calculating fire behaviour prediction for station %s : %s", station.code, error + ) fire_behaviour_prediction = FireBehaviourPrediction(None, None, None, None, None) return StationDaily( @@ -102,17 +120,23 @@ def generate_station_daily(raw_daily: dict, station: WFWXWeatherStation, fuel_ty fire_type=fire_behaviour_prediction.fire_type, error=raw_daily.get("observationValidInd", None), error_message=raw_daily.get("observationValidComment", None), - last_updated=datetime.fromtimestamp(raw_daily["lastEntityUpdateTimestamp"] / 1000, tz=timezone.utc), + last_updated=datetime.fromtimestamp( + raw_daily["lastEntityUpdateTimestamp"] / 1000, tz=timezone.utc + ), ) -def get_prep_day_dailies(dailies_date: date, area_dailies: List[StationDaily]) -> List[StationDaily]: +def get_prep_day_dailies( + dailies_date: date, area_dailies: List[StationDaily] +) -> List[StationDaily]: """Return all the dailies (that's noon, or 20 hours UTC) for a given date""" dailies_date_time = get_hour_20_from_date(dailies_date) return list(filter(lambda daily: (daily.date == dailies_date_time), area_dailies)) -def get_hydrated_stations(stations: List[PlanningWeatherStation], stations_by_code: Dict[int, WFWXWeatherStationDetails]): +def get_hydrated_stations( + stations: List[PlanningWeatherStation], stations_by_code: Dict[int, WFWXWeatherStationDetails] +): """ Merges all details of stations from our database and the WFWX API together. @@ -128,7 +152,11 @@ def get_hydrated_stations(stations: List[PlanningWeatherStation], stations_by_co WeatherStation( code=wfwx_station.code, order_of_appearance_in_planning_area_list=station.order_of_appearance_in_planning_area_list, - station_props=WeatherStationProperties(name=wfwx_station.name, elevation=wfwx_station.elevation, wfwx_station_uuid=wfwx_station.wfwx_station_uuid), + station_props=WeatherStationProperties( + name=wfwx_station.name, + elevation=wfwx_station.elevation, + wfwx_station_uuid=wfwx_station.wfwx_station_uuid, + ), ) ) return hydrated_stations @@ -151,14 +179,21 @@ async def hydrate_fire_centres(): stations_by_area = groupby(sorted_rows, key=lambda row: row[0].planning_area_id) station_codes = [station.station_code for (station, _, __, ___) in rows] - wfwx_stations_data = await get_stations_by_codes(list(set(station_codes))) - stations_by_code: Dict[int, WFWXWeatherStationDetails] = {station.code: station for station in wfwx_stations_data} + # TODO: Could this use wps_shared.stations.get_stations_by_codes + async with ClientSession() as session: + wfwx_api = create_wfwx_api(session) + wfwx_stations_data = await wfwx_api.get_stations_by_codes(list(set(station_codes))) + stations_by_code: Dict[int, WFWXWeatherStationDetails] = { + station.code: station for station in wfwx_stations_data + } planning_areas_by_fire_centre_id = defaultdict(list) fire_centres_by_id = {fire_centre.id: fire_centre for (_, __, ___, fire_centre) in rows} for _, records in stations_by_area: - stations_with_planning_areas = [(station, planning_area) for (station, _, planning_area, ___) in list(records)] + stations_with_planning_areas = [ + (station, planning_area) for (station, _, planning_area, ___) in records + ] stations, planning_areas = zip(*stations_with_planning_areas) planning_area = planning_areas[0] hydrated_stations = get_hydrated_stations(stations, stations_by_code) @@ -177,39 +212,56 @@ async def hydrate_fire_centres(): for fire_centre_id, fire_centre in fire_centres_by_id.items(): planning_areas = planning_areas_by_fire_centre_id.get(fire_centre_id, []) - hydrated_fire_centres.append(FireCentre(id=fire_centre.id, name=fire_centre.name, planning_areas=planning_areas)) + hydrated_fire_centres.append( + FireCentre(id=fire_centre.id, name=fire_centre.name, planning_areas=planning_areas) + ) return hydrated_fire_centres async def calculate_latest_hfi_results( - orm_session: Session, request: HFIResultRequest, fire_centre_fire_start_ranges: List[FireStartRange] + orm_session: Session, + request: HFIResultRequest, + fire_centre_fire_start_ranges: List[FireStartRange], ) -> Tuple[List[PlanningAreaResult], DateRange]: """Set up time range and fire centre data for calculating HFI results""" # ensure we have valid start and end dates valid_date_range = validate_date_range(request.date_range) # wf1 talks in terms of timestamps, so we convert the dates to the correct timestamps. - start_timestamp = int(wps_shared.utils.time.get_hour_20_from_date(valid_date_range.start_date).timestamp() * 1000) - end_timestamp = int(wps_shared.utils.time.get_hour_20_from_date(valid_date_range.end_date).timestamp() * 1000) + start_timestamp = int( + wps_shared.utils.time.get_hour_20_from_date(valid_date_range.start_date).timestamp() * 1000 + ) + end_timestamp = int( + wps_shared.utils.time.get_hour_20_from_date(valid_date_range.end_date).timestamp() * 1000 + ) async with ClientSession() as session: - header = await get_auth_header(session) # Fetching dailies is an expensive operation. When a user is clicking and unclicking stations # in the front end, we'd prefer to not change the call that's going to wfwx so that we can # use cached values. So we don't actually filter out the "selected" stations, but rather go # get all the stations for this fire centre. - fire_centre_stations = [station for area_stations in request.planning_area_station_info.values() for station in area_stations] + fire_centre_stations = [ + station + for area_stations in request.planning_area_station_info.values() + for station in area_stations + ] fire_centre_station_code_ids = set() for station in fire_centre_stations: fire_centre_station_code_ids.add(station.station_code) fire_start_lookup = build_fire_start_prep_level_lookup(orm_session) - wfwx_stations: List[WFWXWeatherStation] = await get_wfwx_stations_from_station_codes(session, header, list(fire_centre_station_code_ids)) + fire_centre_station_codes = get_fire_centre_station_codes() + wfwx_api = create_wfwx_api(session) + wfwx_stations = await wfwx_api.get_wfwx_stations_from_station_codes( + list(fire_centre_station_code_ids), fire_centre_station_codes + ) wfwx_station_ids = [wfwx_station.wfwx_id for wfwx_station in wfwx_stations] - raw_dailies_generator = await get_raw_dailies_in_range_generator(session, header, wfwx_station_ids, start_timestamp, end_timestamp) + raw_dailies_generator = await wfwx_api.get_raw_dailies_in_range_generator( + wfwx_station_ids, start_timestamp, end_timestamp + ) raw_dailies: List[dict] = [raw_daily async for raw_daily in raw_dailies_generator] fuel_type_lookup: Dict[int, FuelTypeModel] = generate_fuel_type_lookup(orm_session) @@ -234,7 +286,9 @@ def build_fire_start_prep_level_lookup(orm_session) -> Dict[int, Dict[int, int]] for lookup in fire_start_lookup_records: if lookup.fire_start_range_id not in fire_start_lookup: fire_start_lookup[lookup.fire_start_range_id] = {} - fire_start_lookup[lookup.fire_start_range_id][lookup.mean_intensity_group] = lookup.prep_level + fire_start_lookup[lookup.fire_start_range_id][lookup.mean_intensity_group] = ( + lookup.prep_level + ) return fire_start_lookup @@ -242,13 +296,23 @@ def load_fire_start_ranges(orm_session, fire_centre_id: int) -> List[FireStartRa """Fetch the fire start ranges for a fire centre from the database, and return them as a list of schema objects. """ - return [FireStartRange(label=fire_start_range.label, id=fire_start_range.id) for fire_start_range in get_fire_centre_fire_start_ranges(orm_session, fire_centre_id)] + return [ + FireStartRange(label=fire_start_range.label, id=fire_start_range.id) + for fire_start_range in get_fire_centre_fire_start_ranges(orm_session, fire_centre_id) + ] -def initialize_planning_area_fire_starts(planning_area_fire_starts: Dict[int, FireStartRange], planning_area_id: int, num_prep_days: int, lowest_fire_starts: FireStartRange): +def initialize_planning_area_fire_starts( + planning_area_fire_starts: Dict[int, FireStartRange], + planning_area_id: int, + num_prep_days: int, + lowest_fire_starts: FireStartRange, +): """Load up the planning area fire start ranges with default values.""" if planning_area_id not in planning_area_fire_starts: - planning_area_fire_starts[planning_area_id] = [lowest_fire_starts for _ in range(num_prep_days)] + planning_area_fire_starts[planning_area_id] = [ + lowest_fire_starts for _ in range(num_prep_days) + ] else: # Handle edge case where the provided planning area fire starts doesn't match the number # of prep days. @@ -273,12 +337,22 @@ def calculate_daily_results( prep_day_dailies = get_prep_day_dailies(dailies_date, area_dailies) daily_fire_starts: FireStartRange = planning_area_fire_starts[area_id][index] mean_intensity_group = calculate_mean_intensity(prep_day_dailies, num_unique_station_codes) - prep_level = calculate_prep_level(mean_intensity_group, daily_fire_starts, fire_start_lookup) - validated_dailies: List[ValidatedStationDaily] = list(map(validate_station_daily, prep_day_dailies)) + prep_level = calculate_prep_level( + mean_intensity_group, daily_fire_starts, fire_start_lookup + ) + validated_dailies: List[ValidatedStationDaily] = list( + map(validate_station_daily, prep_day_dailies) + ) # check if all validated_dailies are valid. valids = [v.valid for v in validated_dailies] all_dailies_valid = all(valids) - daily_result = DailyResult(date=dailies_date, dailies=validated_dailies, fire_starts=daily_fire_starts, mean_intensity_group=mean_intensity_group, prep_level=prep_level) + daily_result = DailyResult( + date=dailies_date, + dailies=validated_dailies, + fire_starts=daily_fire_starts, + mean_intensity_group=mean_intensity_group, + prep_level=prep_level, + ) daily_results.append(daily_result) return daily_results, all_dailies_valid @@ -290,12 +364,18 @@ def generate_fuel_type_lookup(orm_session: Session) -> Dict[int, FuelTypeModel]: def calculate_station_dailies( - raw_dailies: List[dict], station_info_list: List[StationInfo], station_lookup: Dict[str, WFWXWeatherStation], fuel_type_lookup: Dict[int, FuelTypeModel] + raw_dailies: List[dict], + station_info_list: List[StationInfo], + station_lookup: Dict[str, WFWXWeatherStation], + fuel_type_lookup: Dict[int, FuelTypeModel], ) -> List[StationDaily]: """Build a list of dailies with results from the fire behaviour calculations.""" area_dailies: List[StationDaily] = [] - selected_station_codes = [station.station_code for station in filter(lambda station: (station.selected), station_info_list)] + selected_station_codes = [ + station.station_code + for station in filter(lambda station: (station.selected), station_info_list) + ] station_info_lookup = {station.station_code: station for station in station_info_list} for raw_daily in raw_dailies: @@ -325,29 +405,49 @@ def calculate_hfi_results( """Computes HFI results based on parameter inputs""" planning_area_to_dailies: List[PlanningAreaResult] = [] - station_lookup: Dict[str, WFWXWeatherStation] = {station.wfwx_id: station for station in wfwx_stations} + station_lookup: Dict[str, WFWXWeatherStation] = { + station.wfwx_id: station for station in wfwx_stations + } wfwx_station_codes: Set[int] = set([station.code for station in wfwx_stations]) for area_id in planning_area_station_info.keys(): - area_dailies = calculate_station_dailies(raw_dailies, planning_area_station_info[area_id], station_lookup, fuel_type_lookup) + area_dailies = calculate_station_dailies( + raw_dailies, planning_area_station_info[area_id], station_lookup, fuel_type_lookup + ) # Initialize with defaults if empty/wrong length # TODO: Sometimes initialize_planning_area_fire_starts is called twice. Look into this once # endpoint re-factor is complete. lowest_fire_starts = fire_start_ranges[0] - initialize_planning_area_fire_starts(planning_area_fire_starts, area_id, num_prep_days, lowest_fire_starts) + initialize_planning_area_fire_starts( + planning_area_fire_starts, area_id, num_prep_days, lowest_fire_starts + ) - selected_stations = [station.station_code for station in planning_area_station_info[area_id] if station.selected is True and station.station_code in wfwx_station_codes] + selected_stations = [ + station.station_code + for station in planning_area_station_info[area_id] + if station.selected is True and station.station_code in wfwx_station_codes + ] all_dailies_valid: bool = True num_unique_station_codes = len(set(selected_stations)) (daily_results, all_dailies_valid) = calculate_daily_results( - num_prep_days, start_date, area_dailies, planning_area_fire_starts, area_id, fire_start_lookup, num_unique_station_codes + num_prep_days, + start_date, + area_dailies, + planning_area_fire_starts, + area_id, + fire_start_lookup, + num_unique_station_codes, ) - highest_daily_intensity_group = calculate_max_intensity_group(list(map(lambda daily_result: (daily_result.mean_intensity_group), daily_results))) + highest_daily_intensity_group = calculate_max_intensity_group( + list(map(lambda daily_result: (daily_result.mean_intensity_group), daily_results)) + ) - mean_prep_level = calculate_mean_prep_level(list(map(lambda daily_result: (daily_result.prep_level), daily_results)), num_prep_days) + mean_prep_level = calculate_mean_prep_level( + list(map(lambda daily_result: (daily_result.prep_level), daily_results)), num_prep_days + ) planning_area_to_dailies.append( PlanningAreaResult( @@ -394,7 +494,11 @@ def calculate_mean_intensity(dailies: List[StationDaily], num_of_station_codes: return math.ceil(mean_intensity_group) -def calculate_prep_level(mean_intensity_group: Optional[float], fire_starts: FireStartRange, fire_start_lookup: Dict[int, Dict[int, int]]) -> Optional[int]: +def calculate_prep_level( + mean_intensity_group: Optional[float], + fire_starts: FireStartRange, + fire_start_lookup: Dict[int, Dict[int, int]], +) -> Optional[int]: """Returns the prep level based on the MIG and fire starts range.""" if mean_intensity_group is None: return None diff --git a/backend/packages/wps-api/src/app/hfi/pdf_generator.py b/backend/packages/wps-api/src/app/hfi/pdf_generator.py index 0c26f792ea..59690779bc 100644 --- a/backend/packages/wps-api/src/app/hfi/pdf_generator.py +++ b/backend/packages/wps-api/src/app/hfi/pdf_generator.py @@ -1,72 +1,83 @@ """Generate a daily PDF""" + import logging from datetime import date, datetime -from typing import List, Dict, Tuple +from typing import Dict, List, Tuple + import pdfkit from jinja2 import Environment from wps_shared.db.models.hfi_calc import FuelType -from wps_shared.schemas.hfi_calc import FireCentre, HFIResultResponse, PlanningArea, WeatherStation -from app.hfi.pdf_template import PDFTemplateName, CSS_PATH -from app.hfi.pdf_data_formatter import response_2_daily_jinja_format, response_2_prep_cycle_jinja_format +from wps_shared.schemas.hfi_calc import HFIResultResponse, PlanningArea, WeatherStation +from wps_wf1.models import FireCentre +from app.hfi.pdf_data_formatter import ( + response_2_daily_jinja_format, + response_2_prep_cycle_jinja_format, +) +from app.hfi.pdf_template import CSS_PATH, PDFTemplateName logger = logging.getLogger(__name__) -def generate_html(result: HFIResultResponse, - fire_centres: List[FireCentre], - idir: str, - datetime_generated: datetime, - jinja_env: Environment, - fuel_types: Dict[int, FuelType]) -> Tuple[str, str]: +def generate_html( + result: HFIResultResponse, + fire_centres: List[FireCentre], + idir: str, + datetime_generated: datetime, + jinja_env: Environment, + fuel_types: Dict[int, FuelType], +) -> Tuple[str, str]: """Generates the full HTML based on the HFIResultResponse""" fire_centre_dict, planning_area_dict, station_dict = build_mappings(fire_centres) fire_centre_name = fire_centre_dict[result.selected_fire_center_id].name - rendered_output = generate_prep(result, - idir, - datetime_generated, - planning_area_dict, - station_dict, - fire_centre_name, - jinja_env, - fuel_types) - rendered_output += generate_daily(result, - idir, - datetime_generated, - planning_area_dict, - station_dict, - fire_centre_name, - jinja_env, - fuel_types) + rendered_output = generate_prep( + result, + idir, + datetime_generated, + planning_area_dict, + station_dict, + fire_centre_name, + jinja_env, + fuel_types, + ) + rendered_output += generate_daily( + result, + idir, + datetime_generated, + planning_area_dict, + station_dict, + fire_centre_name, + jinja_env, + fuel_types, + ) return rendered_output, fire_centre_name -def generate_pdf(result: HFIResultResponse, - fire_centres: List[FireCentre], - idir: str, - datetime_generated: datetime, - jinja_env: Environment, - fuel_types: Dict[int, FuelType]) -> Tuple[bytes, str]: +def generate_pdf( + result: HFIResultResponse, + fire_centres: List[FireCentre], + idir: str, + datetime_generated: datetime, + jinja_env: Environment, + fuel_types: Dict[int, FuelType], +) -> Tuple[bytes, str]: """Generates the full PDF based on the HFIResultResponse""" - rendered_output, fire_centre_name = generate_html(result, - fire_centres, - idir, - datetime_generated, - jinja_env, - fuel_types) - - left_footer = f'Exported on {datetime_generated.isoformat()} by {idir} | https://psu.nrs.gov.bc.ca/hfi-calculator' + rendered_output, fire_centre_name = generate_html( + result, fire_centres, idir, datetime_generated, jinja_env, fuel_types + ) + + left_footer = f"Exported on {datetime_generated.isoformat()} by {idir} | https://psu.nrs.gov.bc.ca/hfi-calculator" options = { - 'page-size': 'Letter', - 'orientation': 'Landscape', - 'margin-left': '7mm', - 'margin-right': '7mm', - 'footer-left': left_footer, - 'footer-right': '[page] of [topage]', - 'footer-font-name': 'BCSans', - 'footer-font-size': '6' + "page-size": "Letter", + "orientation": "Landscape", + "margin-left": "7mm", + "margin-right": "7mm", + "footer-left": left_footer, + "footer-right": "[page] of [topage]", + "footer-font-name": "BCSans", + "footer-font-size": "6", } pdf_bytes: bytes = pdfkit.from_string(input=rendered_output, options=options, css=CSS_PATH) @@ -75,20 +86,20 @@ def generate_pdf(result: HFIResultResponse, return pdf_bytes, pdf_filename -def generate_prep(result: HFIResultResponse, - idir: str, - datetime_generated: datetime, - planning_area_dict: Dict[int, PlanningArea], - station_dict: Dict[int, WeatherStation], - fire_centre_name: str, - jinja_env: Environment, - fuel_types: Dict[int, FuelType]): +def generate_prep( + result: HFIResultResponse, + idir: str, + datetime_generated: datetime, + planning_area_dict: Dict[int, PlanningArea], + station_dict: Dict[int, WeatherStation], + fire_centre_name: str, + jinja_env: Environment, + fuel_types: Dict[int, FuelType], +): """Generates the prep cycle portion of the PDF""" prep_pdf_data, dates, date_range = response_2_prep_cycle_jinja_format( - result, - planning_area_dict, - station_dict, - fuel_types) + result, planning_area_dict, station_dict, fuel_types + ) template = jinja_env.get_template(PDFTemplateName.PREP.value) return template.render( @@ -97,33 +108,35 @@ def generate_prep(result: HFIResultResponse, planning_areas=prep_pdf_data, prep_days=dates, fire_centre_name=fire_centre_name, - date_range=date_range) - - -def generate_daily(result: HFIResultResponse, - idir: str, - datetime_generated: datetime, - planning_area_dict: Dict[int, PlanningArea], - station_dict: Dict[int, WeatherStation], - fire_centre_name: str, - jinja_env: Environment, - fuel_types: Dict[int, FuelType]) -> str: + date_range=date_range, + ) + + +def generate_daily( + result: HFIResultResponse, + idir: str, + datetime_generated: datetime, + planning_area_dict: Dict[int, PlanningArea], + station_dict: Dict[int, WeatherStation], + fire_centre_name: str, + jinja_env: Environment, + fuel_types: Dict[int, FuelType], +) -> str: """Generates the daily portion of the PDF""" template = jinja_env.get_template(PDFTemplateName.DAILY.value) daily_pdf_data_by_date = response_2_daily_jinja_format( - result, - planning_area_dict, - station_dict, - fuel_types) + result, planning_area_dict, station_dict, fuel_types + ) return template.render( idir=idir, datetime_generated=datetime_generated.isoformat(), daily_pdf_data_by_date=daily_pdf_data_by_date, - fire_centre_name=fire_centre_name) + fire_centre_name=fire_centre_name, + ) def build_mappings(fire_centres: List[FireCentre]): - """ Marshall hydrated fire centres into dicts keyed by id """ + """Marshall hydrated fire centres into dicts keyed by id""" fire_centre_dict: Dict[int, FireCentre] = {} planning_area_dict: Dict[int, PlanningArea] = {} station_dict: Dict[int, WeatherStation] = {} @@ -137,10 +150,12 @@ def build_mappings(fire_centres: List[FireCentre]): def get_pdf_filename(fire_centre_name: str, date_generated: date, idir: str) -> str: - """ Returns the formatted pdf filename """ - return fire_centre_name.replace(" ", "") + \ - "_HFICalculator_" + \ - date_generated.isoformat() + \ - "_" + \ - idir.upper() + \ - ".pdf" + """Returns the formatted pdf filename""" + return ( + fire_centre_name.replace(" ", "") + + "_HFICalculator_" + + date_generated.isoformat() + + "_" + + idir.upper() + + ".pdf" + ) diff --git a/backend/packages/wps-api/src/app/hourlies.py b/backend/packages/wps-api/src/app/hourlies.py index 7809b26245..508842b6ca 100644 --- a/backend/packages/wps-api/src/app/hourlies.py +++ b/backend/packages/wps-api/src/app/hourlies.py @@ -1,21 +1,17 @@ -""" Hourly reading from weather stations ("actuals") -""" +"""Hourly reading from weather stations ("actuals")""" + import math -from typing import List from datetime import datetime, timedelta -from aiohttp.client import ClientSession +from typing import List +from aiohttp.client import ClientSession from aiohttp.connector import TCPConnector -import wps_shared.db.database -from wps_shared.db.crud.observations import get_hourly_actuals -import wps_shared.stations -from wps_shared.schemas.observations import WeatherStationHourlyReadings, WeatherReading -from wps_shared.utils.dewpoint import compute_dewpoint -from wps_shared.wildfire_one import wfwx_api +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api +from wps_wf1.models import WeatherStationHourlyReadings def get(value: object, condition: bool = True): - """ If the condition is False, or the value is not a number, return None, otherwise + """If the condition is False, or the value is not a number, return None, otherwise return the value. """ if not condition or value is None or math.isnan(value): @@ -23,43 +19,8 @@ def get(value: object, condition: bool = True): return value -async def fetch_hourly_readings_from_db( - station_codes: List[int], - date_from: datetime, - date_to: datetime) -> List[WeatherStationHourlyReadings]: - """ Fetch the hourly readings from the database. - """ - stations = await wfwx_api.get_stations_by_codes(station_codes) - with wps_shared.db.database.get_read_session_scope() as session: - readings = get_hourly_actuals(session, station_codes, date_from, date_to) - station_readings = None - result = [] - - for reading in readings: - if station_readings is None or reading.station_code != station_readings.station.code: - station = next( - station for station in stations if station.code == reading.station_code) - station_readings = WeatherStationHourlyReadings( - station=station, values=[]) - result.append(station_readings) - weather_reading = WeatherReading( - datetime=reading.weather_date, - temperature=get(reading.temperature, reading.temp_valid), - relative_humidity=get(reading.relative_humidity, reading.rh_valid), - wind_speed=get(reading.wind_speed, reading.wspeed_valid), - wind_direction=get(reading.wind_direction, reading.wdir_valid), - precipitation=get(reading.precipitation, reading.precip_valid), - dewpoint=compute_dewpoint(get(reading.temperature), get(reading.relative_humidity)), - ffmc=get(reading.ffmc), - isi=get(reading.isi), - fwi=get(reading.fwi) - ) - station_readings.values.append(weather_reading) - return result - - def _get_time_interval(time_of_interest: datetime): - """ Returns the start and end datetimes for hourly readings based on given time of interest """ + """Returns the start and end datetimes for hourly readings based on given time of interest""" # by default, we want the past 5 days, and if available the next 10 days. start_time_stamp = time_of_interest - timedelta(days=5) # the UI is interested in hourly reading before and after the time of interest. @@ -69,9 +30,9 @@ def _get_time_interval(time_of_interest: datetime): async def get_hourly_readings( - station_codes: List[int], - time_of_interest: datetime) -> List[WeatherStationHourlyReadings]: - """ Get the hourly readings for the list of station codes provided. + station_codes: List[int], time_of_interest: datetime +) -> List[WeatherStationHourlyReadings]: + """Get the hourly readings for the list of station codes provided. Reading 5 days before, and 10 days after the time of interest are returned. Depending on configuration, will read from WF1 or from local database. """ @@ -79,19 +40,21 @@ async def get_hourly_readings( # Limit the number of concurrent connections. async with ClientSession(connector=TCPConnector(limit=10)) as session: - header = await wfwx_api.get_auth_header(session) - return await wfwx_api.get_hourly_readings(session, header, station_codes, start_time_stamp, end_time_stamp) + wfwx_api = create_wfwx_api(session) + return await wfwx_api.get_hourly_readings(station_codes, start_time_stamp, end_time_stamp) async def get_hourly_readings_in_time_interval( - station_codes: List[int], - start_time_stamp: datetime, - end_time_stamp: datetime, - use_cache: bool = True) -> List[WeatherStationHourlyReadings]: - """ Fetch the hourly observations from WFWX API for the list of station codes provided, + station_codes: List[int], + start_time_stamp: datetime, + end_time_stamp: datetime, + use_cache: bool = True, +) -> List[WeatherStationHourlyReadings]: + """Fetch the hourly observations from WFWX API for the list of station codes provided, between the start_time_stamp and end_time_stamp specified. """ async with ClientSession(connector=TCPConnector(limit=10)) as session: - header = await wfwx_api.get_auth_header(session) + wfwx_api = create_wfwx_api(session) return await wfwx_api.get_hourly_readings( - session, header, station_codes, start_time_stamp, end_time_stamp, use_cache) + station_codes, start_time_stamp, end_time_stamp, use_cache + ) diff --git a/backend/packages/wps-api/src/app/jobs/hourly_actuals.py b/backend/packages/wps-api/src/app/jobs/hourly_actuals.py index eb062cfc9c..fcf513ce20 100644 --- a/backend/packages/wps-api/src/app/jobs/hourly_actuals.py +++ b/backend/packages/wps-api/src/app/jobs/hourly_actuals.py @@ -1,30 +1,31 @@ -""" Bot for loading hourly actual values. -""" +"""Bot for loading hourly actual values.""" + import asyncio import logging import os import sys from datetime import datetime, timedelta -from aiohttp.client import ClientSession -from sqlalchemy.exc import IntegrityError + import wps_shared.db.database import wps_shared.utils.time -from wps_shared.wps_logging import configure_logging +from aiohttp.client import ClientSession +from sqlalchemy.exc import IntegrityError from wps_shared.db.crud.observations import save_hourly_actual from wps_shared.rocketchat_notifications import send_rocketchat_notification -from wps_shared.wildfire_one import wfwx_api +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api +from wps_shared.wps_logging import configure_logging logger = logging.getLogger(__name__) -class HourlyActualsJob(): - """ Job that downloads the hourly actuals from the wildfire website and stores it in a database. """ +class HourlyActualsJob: + """Job that downloads the hourly actuals from the wildfire website and stores it in a database.""" def __init__(self): self.now = wps_shared.utils.time.get_pst_now() def _get_start_date(self) -> datetime: - """ Return time N hour ago. E.g. if it's 17h15 now, we'd get YYYYMMDD16. The intention is that + """Return time N hour ago. E.g. if it's 17h15 now, we'd get YYYYMMDD16. The intention is that this bot runs every hour, so if we ask for everything from an hour back, we should be fine. However, just to be on the safe side, we're asking for the last three hours - just in case there was a station that came in late, or if for whatever reason we missed a run. @@ -33,38 +34,39 @@ def _get_start_date(self) -> datetime: return hour_ago def _get_end_date(self) -> datetime: - """ Return now. E.g. if it's 17h15 now, we'd get YYYYMMDD17 """ + """Return now. E.g. if it's 17h15 now, we'd get YYYYMMDD17""" return self.now async def run_wfwx(self): - """ Entry point for running the bot """ - async with ClientSession() as session: - header = await wfwx_api.get_auth_header(session) + """Entry point for running the bot""" - start_date = self._get_start_date() - end_date = self._get_end_date() - - hourly_actuals = await wfwx_api.get_hourly_actuals_all_stations( - session, header, start_date, end_date) + start_date = self._get_start_date() + end_date = self._get_end_date() + async with ClientSession() as session: + wfwx_api = create_wfwx_api(session) + hourly_actuals = await wfwx_api.get_hourly_actuals_all_stations(start_date, end_date) - logger.info('Retrieved %s hourly actuals', len(hourly_actuals)) + logger.info("Retrieved %s hourly actuals", len(hourly_actuals)) with wps_shared.db.database.get_write_session_scope() as session: for hourly_actual in hourly_actuals: try: save_hourly_actual(session, hourly_actual) except IntegrityError: - logger.info('Skipping duplicate record for %s @ %s', - hourly_actual.station_code, hourly_actual.weather_date) + logger.info( + "Skipping duplicate record for %s @ %s", + hourly_actual.station_code, + hourly_actual.weather_date, + ) session.rollback() def main(): - """ Makes the appropriate method calls in order to submit + """Makes the appropriate method calls in order to submit asynchronous queries to the Wildfire 1 API to get hourly values for all weather stations. """ try: - logger.debug('Retrieving hourly actuals...') + logger.debug("Retrieving hourly actuals...") bot = HourlyActualsJob() loop = asyncio.new_event_loop() @@ -75,12 +77,12 @@ def main(): sys.exit(os.EX_OK) except Exception as exception: # Exit non 0 - failure. - logger.error('Failed to retrieve hourly actuals.', exc_info=exception) - rc_message = ':scream: Encountered error retrieving hourly actuals' + logger.error("Failed to retrieve hourly actuals.", exc_info=exception) + rc_message = ":scream: Encountered error retrieving hourly actuals" send_rocketchat_notification(rc_message, exception) sys.exit(os.EX_SOFTWARE) -if __name__ == '__main__': +if __name__ == "__main__": configure_logging() main() diff --git a/backend/packages/wps-api/src/app/jobs/noon_forecasts.py b/backend/packages/wps-api/src/app/jobs/noon_forecasts.py index 31faa03c69..1c3e8a709a 100644 --- a/backend/packages/wps-api/src/app/jobs/noon_forecasts.py +++ b/backend/packages/wps-api/src/app/jobs/noon_forecasts.py @@ -11,7 +11,7 @@ from wps_shared.wps_logging import configure_logging import wps_shared.db.database from wps_shared.db.crud.forecasts import save_noon_forecast -from wps_shared.wildfire_one import wfwx_api +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api import wps_shared.utils.time from wps_shared.rocketchat_notifications import send_rocketchat_notification @@ -27,10 +27,8 @@ def __init__(self): async def run_wfwx(self): """ Entry point for running the bot """ async with ClientSession() as session: - header = await wfwx_api.get_auth_header(session) - - noon_forecasts = await wfwx_api.get_noon_forecasts_all_stations( - session, header, self.now) + wfwx_api = create_wfwx_api(session) + noon_forecasts = await wfwx_api.get_noon_forecasts_all_stations(self.now) logger.info('Retrieved %s noon forecasts', len(noon_forecasts)) with wps_shared.db.database.get_write_session_scope() as session: diff --git a/backend/packages/wps-api/src/app/main.py b/backend/packages/wps-api/src/app/main.py index 9f93c608a1..21b90f63be 100644 --- a/backend/packages/wps-api/src/app/main.py +++ b/backend/packages/wps-api/src/app/main.py @@ -10,7 +10,9 @@ from fastapi.middleware.cors import CORSMiddleware import sentry_sdk from starlette.applications import Starlette -from wps_shared import schemas +from wps_shared.schemas.observations import WeatherStationHourlyReadingsResponse +from wps_shared.schemas.percentiles import CalculatedResponse, PercentileRequest +from wps_shared.schemas.shared import WeatherDataRequest from wps_shared.wps_logging import configure_logging from app.percentile import get_precalculated_percentiles from wps_shared.auth import authentication_required, audit @@ -171,11 +173,9 @@ async def get_health(): raise -@api.post( - "/observations/", response_model=schemas.observations.WeatherStationHourlyReadingsResponse -) +@api.post("/observations/", response_model=WeatherStationHourlyReadingsResponse) async def get_hourlies( - request: schemas.shared.WeatherDataRequest, + request: WeatherDataRequest, _=Depends(authentication_required), __=Depends(audit), ): @@ -186,14 +186,14 @@ async def get_hourlies( readings = await hourlies.get_hourly_readings(request.stations, request.time_of_interest) - return schemas.observations.WeatherStationHourlyReadingsResponse(hourlies=readings) + return WeatherStationHourlyReadingsResponse(hourlies=readings) except Exception as exception: logger.critical(exception, exc_info=True) raise -@api.post("/percentiles/", response_model=schemas.percentiles.CalculatedResponse) -async def get_percentiles(request: schemas.percentiles.PercentileRequest): +@api.post("/percentiles/", response_model=CalculatedResponse) +async def get_percentiles(request: PercentileRequest): """Return 90% FFMC, 90% ISI, 90% BUI etc. for a given set of fire stations for a given period of time.""" try: logger.info("/percentiles/") diff --git a/backend/packages/wps-api/src/app/morecast_v2/forecasts.py b/backend/packages/wps-api/src/app/morecast_v2/forecasts.py index fca331c676..ecdc45f2de 100644 --- a/backend/packages/wps-api/src/app/morecast_v2/forecasts.py +++ b/backend/packages/wps-api/src/app/morecast_v2/forecasts.py @@ -1,31 +1,35 @@ +from collections import defaultdict from datetime import datetime, time, timedelta, timezone +from typing import List, Optional, Tuple from urllib.parse import urljoin -from wps_shared import config -from cffdrs import ffmc, dc, dmc, isi, bui, fwi - -from aiohttp import ClientSession -from collections import defaultdict - -from wps_shared.utils.time import vancouver_tz -from typing import List, Optional, Tuple +from cffdrs import bui, dc, dmc, ffmc, fwi, isi from sqlalchemy.orm import Session +from wps_shared import config +from wps_shared.db.crud.hfi_calc import get_fire_centre_station_codes from wps_shared.db.crud.morecast_v2 import get_forecasts_in_range from wps_shared.schemas.morecast_v2 import ( - MoreCastForecastOutput, MoreCastForecastInput, + MoreCastForecastOutput, +) +from wps_shared.utils.time import vancouver_tz +from wps_wf1.models import ( StationDailyFromWF1, WF1ForecastRecordType, WF1PostForecast, - WeatherIndeterminate, + WFWXWeatherStation, WeatherDeterminate, + WeatherIndeterminate, ) -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation -from wps_shared.wildfire_one.wfwx_api import get_forecasts_for_stations_by_date_range, get_no_cache_auth_header, get_wfwx_stations_from_station_codes -from app.fire_behaviour import cffdrs +from wps_wf1.wfwx_api import WfwxApi -def get_forecasts(db_session: Session, start_time: Optional[datetime], end_time: Optional[datetime], station_codes: List[int]) -> List[MoreCastForecastOutput]: +def get_forecasts( + db_session: Session, + start_time: Optional[datetime], + end_time: Optional[datetime], + station_codes: List[int], +) -> List[MoreCastForecastOutput]: if start_time is None or end_time is None: return [] @@ -48,10 +52,17 @@ def get_forecasts(db_session: Session, start_time: Optional[datetime], end_time: return forecasts -def construct_wf1_forecast(forecast: MoreCastForecastInput, stations: List[WFWXWeatherStation], forecast_id: Optional[str], created_by: Optional[str]) -> WF1PostForecast: +def construct_wf1_forecast( + forecast: MoreCastForecastInput, + stations: List[WFWXWeatherStation], + forecast_id: Optional[str], + created_by: Optional[str], +) -> WF1PostForecast: station = next(filter(lambda obj: obj.code == forecast.station_code, stations)) station_id = station.wfwx_id - station_url = urljoin(config.get("WFWX_BASE_URL"), f"wfwx-fireweather-api/v1/stations/{station_id}") + station_url = urljoin( + config.get("WFWX_BASE_URL"), f"wfwx-fireweather-api/v1/stations/{station_id}" + ) wf1_post_forecast = WF1PostForecast( createdBy=created_by, id=forecast_id, @@ -69,17 +80,29 @@ def construct_wf1_forecast(forecast: MoreCastForecastInput, stations: List[WFWXW return wf1_post_forecast -async def construct_wf1_forecasts(session: ClientSession, forecast_records: List[MoreCastForecastInput], stations: List[WFWXWeatherStation], username: str) -> List[WF1PostForecast]: +async def construct_wf1_forecasts( + wfwx_api: WfwxApi, + forecast_records: List[MoreCastForecastInput], + stations: List[WFWXWeatherStation], + username: str, +) -> List[WF1PostForecast]: # Fetch existing forecasts from WF1 for the stations and date range in the forecast records - header = await get_no_cache_auth_header(session) - forecast_dates = [datetime.fromtimestamp(f.for_date / 1000, timezone.utc) for f in forecast_records] + forecast_dates = [ + datetime.fromtimestamp(f.for_date / 1000, timezone.utc) for f in forecast_records + ] min_forecast_date = min(forecast_dates) max_forecast_date = max(forecast_dates) start_time = datetime.combine(min_forecast_date, time.min, tzinfo=vancouver_tz) end_time = datetime.combine(max_forecast_date, time.max, tzinfo=vancouver_tz) unique_station_codes = list(set([f.station_code for f in forecast_records])) - dailies = await get_forecasts_for_stations_by_date_range( - session=session, header=header, start_time_of_interest=start_time, end_time_of_interest=end_time, unique_station_codes=unique_station_codes, check_cache=False + fire_centre_station_codes = get_fire_centre_station_codes() + dailies = await wfwx_api.get_forecasts_for_stations_by_date_range( + start_time_of_interest=start_time, + end_time_of_interest=end_time, + unique_station_codes=unique_station_codes, + fire_centre_station_codes=fire_centre_station_codes, + check_cache=False, + use_no_cache_header=True, ) # Shape the WF1 dailies into a dictionary keyed by station codes for easier consumption @@ -92,31 +115,55 @@ async def construct_wf1_forecasts(session: ClientSession, forecast_records: List for forecast in forecast_records: forecast_timestamp = datetime.fromtimestamp(forecast.for_date / 1000, timezone.utc) # Check if an existing daily was retrieved from WF1 and use id and createdBy attributes if present - observed_daily = next((daily for daily in grouped_dailies[forecast.station_code] if daily.utcTimestamp == forecast_timestamp), None) + observed_daily = next( + ( + daily + for daily in grouped_dailies[forecast.station_code] + if daily.utcTimestamp == forecast_timestamp + ), + None, + ) forecast_id = observed_daily.forecast_id if observed_daily is not None else None created_by = observed_daily.created_by if observed_daily is not None else username wf1_forecasts.append(construct_wf1_forecast(forecast, stations, forecast_id, created_by)) return wf1_forecasts -async def format_as_wf1_post_forecasts(session: ClientSession, forecast_records: List[MoreCastForecastInput], username: str, headers: dict) -> List[WF1PostForecast]: +async def format_as_wf1_post_forecasts( + wfwx_api: WfwxApi, + forecast_records: List[MoreCastForecastInput], + username: str, +) -> List[WF1PostForecast]: """Returns list of forecast records re-formatted in the data structure WF1 API expects""" station_codes = [record.station_code for record in forecast_records] - stations = await get_wfwx_stations_from_station_codes(session, headers, station_codes) + fire_centre_station_codes = get_fire_centre_station_codes() + stations = await wfwx_api.get_wfwx_stations_from_station_codes( + list(station_codes), fire_centre_station_codes + ) unique_stations = list(set(stations)) - wf1_post_forecasts = await construct_wf1_forecasts(session, forecast_records, unique_stations, username) + wf1_post_forecasts = await construct_wf1_forecasts( + wfwx_api, forecast_records, unique_stations, username + ) return wf1_post_forecasts def actual_exists(forecast: WeatherIndeterminate, actuals: List[WeatherIndeterminate]): """Returns True if the actuals contain a WeatherIndeterminate with station_code and utc_timestamp that matches those of the forecast; otherwise, returns False.""" - station_code_matches = [actual for actual in actuals if actual.station_code == forecast.station_code] - utc_timestamp_matches = [station_code_match for station_code_match in station_code_matches if station_code_match.utc_timestamp == forecast.utc_timestamp] + station_code_matches = [ + actual for actual in actuals if actual.station_code == forecast.station_code + ] + utc_timestamp_matches = [ + station_code_match + for station_code_match in station_code_matches + if station_code_match.utc_timestamp == forecast.utc_timestamp + ] return len(utc_timestamp_matches) > 0 -def filter_for_api_forecasts(forecasts: List[WeatherIndeterminate], actuals: List[WeatherIndeterminate]): +def filter_for_api_forecasts( + forecasts: List[WeatherIndeterminate], actuals: List[WeatherIndeterminate] +): """Returns a list of forecasts where each forecast has a corresponding WeatherIndeterminate in the actuals with a matching station_code and utc_timestamp.""" filtered_forecasts = [] @@ -126,7 +173,9 @@ def filter_for_api_forecasts(forecasts: List[WeatherIndeterminate], actuals: Lis return filtered_forecasts -def get_fwi_values(actuals: List[WeatherIndeterminate], forecasts: List[WeatherIndeterminate]) -> Tuple[List[WeatherIndeterminate], List[WeatherIndeterminate]]: +def get_fwi_values( + actuals: List[WeatherIndeterminate], forecasts: List[WeatherIndeterminate] +) -> Tuple[List[WeatherIndeterminate], List[WeatherIndeterminate]]: """ Calculates actuals and forecasts with Fire Weather Index System values by calculating based off previous actuals and subsequent forecasts. @@ -142,16 +191,28 @@ def get_fwi_values(actuals: List[WeatherIndeterminate], forecasts: List[WeatherI # Shape indeterminates into nested dicts for quick and easy look ups by station code and date for indeterminate in all_indeterminates: - indeterminates_dict[indeterminate.station_code][indeterminate.utc_timestamp.date()] = indeterminate + indeterminates_dict[indeterminate.station_code][indeterminate.utc_timestamp.date()] = ( + indeterminate + ) for idx, indeterminate in enumerate(all_indeterminates): - last_indeterminate = indeterminates_dict[indeterminate.station_code].get(indeterminate.utc_timestamp.date() - timedelta(days=1), None) + last_indeterminate = indeterminates_dict[indeterminate.station_code].get( + indeterminate.utc_timestamp.date() - timedelta(days=1), None + ) if last_indeterminate is not None: updated_forecast = calculate_fwi_values(last_indeterminate, indeterminate) all_indeterminates[idx] = updated_forecast - updated_forecasts = [indeterminate for indeterminate in all_indeterminates if indeterminate.determinate == WeatherDeterminate.FORECAST] - updated_actuals = [indeterminate for indeterminate in all_indeterminates if indeterminate.determinate == WeatherDeterminate.ACTUAL] + updated_forecasts = [ + indeterminate + for indeterminate in all_indeterminates + if indeterminate.determinate == WeatherDeterminate.FORECAST + ] + updated_actuals = [ + indeterminate + for indeterminate in all_indeterminates + if indeterminate.determinate == WeatherDeterminate.ACTUAL + ] return updated_actuals, updated_forecasts @@ -166,7 +227,10 @@ def indeterminate_missing_fwi(indeterminate: WeatherIndeterminate): ) -def calculate_fwi_from_seed_indeterminates(seed_indeterminates: List[WeatherIndeterminate], target_indeterminates: List[WeatherIndeterminate]) -> List[WeatherIndeterminate]: +def calculate_fwi_from_seed_indeterminates( + seed_indeterminates: List[WeatherIndeterminate], + target_indeterminates: List[WeatherIndeterminate], +) -> List[WeatherIndeterminate]: """ Calculates FWI values for a list of target indeterminates based on seed indeterminates. @@ -180,12 +244,16 @@ def calculate_fwi_from_seed_indeterminates(seed_indeterminates: List[WeatherInde # Shape indeterminates into nested dicts for quick lookups by station code and date for indeterminate in all_indeterminates: - indeterminates_dict[indeterminate.station_code][indeterminate.utc_timestamp.date()] = indeterminate + indeterminates_dict[indeterminate.station_code][indeterminate.utc_timestamp.date()] = ( + indeterminate + ) # Calculate FWI values for target indeterminates for idx, indeterminate in enumerate(target_indeterminates): previous_date = indeterminate.utc_timestamp.date() - timedelta(days=1) - last_indeterminate = indeterminates_dict[indeterminate.station_code].get(previous_date, None) + last_indeterminate = indeterminates_dict[indeterminate.station_code].get( + previous_date, None + ) if last_indeterminate is not None and indeterminate_missing_fwi(indeterminate): # If the target indeterminate already has FWI values, skip calculation target_indeterminates[idx] = calculate_fwi_values(last_indeterminate, indeterminate) @@ -193,7 +261,9 @@ def calculate_fwi_from_seed_indeterminates(seed_indeterminates: List[WeatherInde return target_indeterminates -def calculate_fwi_values(yesterday: WeatherIndeterminate, today: WeatherIndeterminate) -> WeatherIndeterminate: +def calculate_fwi_values( + yesterday: WeatherIndeterminate, today: WeatherIndeterminate +) -> WeatherIndeterminate: """ Uses CFFDRS library to calculate Fire Weather Index System values @@ -225,11 +295,29 @@ def calculate_fwi_values(yesterday: WeatherIndeterminate, today: WeatherIndeterm return today if yesterday.fine_fuel_moisture_code is not None: - today.fine_fuel_moisture_code = ffmc(ffmc_yda=yesterday.fine_fuel_moisture_code, temp=temp, rh=rh, prec=precip, ws=wind_spd) + today.fine_fuel_moisture_code = ffmc( + ffmc_yda=yesterday.fine_fuel_moisture_code, temp=temp, rh=rh, prec=precip, ws=wind_spd + ) if yesterday.duff_moisture_code is not None: - today.duff_moisture_code = dmc(dmc_yda=yesterday.duff_moisture_code, temp=temp, rh=rh, prec=precip, lat=latitude, mon=month_to_calculate_for, lat_adjust=True) + today.duff_moisture_code = dmc( + dmc_yda=yesterday.duff_moisture_code, + temp=temp, + rh=rh, + prec=precip, + lat=latitude, + mon=month_to_calculate_for, + lat_adjust=True, + ) if yesterday.drought_code is not None: - today.drought_code = dc(dc_yda=yesterday.drought_code, temp=temp, rh=rh, prec=precip, lat=latitude, mon=month_to_calculate_for, lat_adjust=True) + today.drought_code = dc( + dc_yda=yesterday.drought_code, + temp=temp, + rh=rh, + prec=precip, + lat=latitude, + mon=month_to_calculate_for, + lat_adjust=True, + ) if today.fine_fuel_moisture_code is not None: today.initial_spread_index = isi(ffmc=today.fine_fuel_moisture_code, ws=today.wind_speed) if today.duff_moisture_code is not None and today.drought_code is not None: @@ -238,3 +326,30 @@ def calculate_fwi_values(yesterday: WeatherIndeterminate, today: WeatherIndeterm today.fire_weather_index = fwi(isi=today.initial_spread_index, bui=today.build_up_index) return today + + +def transform_morecastforecastoutput_to_weatherindeterminate( + forecast_outputs: List[MoreCastForecastOutput], wfwx_stations: List[WFWXWeatherStation] +) -> List[WeatherIndeterminate]: + """Helper function to convert list of MoreCastForecastOutput objects (taken from our database) + into list of WeatherIndeterminate objects to match the structure of the forecasts pulled from WFWX. + wfwx_stations list (station data from WFWX) is used to populate station_name data. + """ + weather_indeterminates: List[WeatherIndeterminate] = [] + for output in forecast_outputs: + station = next(s for s in wfwx_stations if s.code == output.station_code) + + weather_indeterminates.append( + WeatherIndeterminate( + station_code=output.station_code, + station_name=station.name if station else "", + utc_timestamp=output.for_date, + determinate=WeatherDeterminate.FORECAST, + temperature=output.temp, + relative_humidity=output.rh, + precipitation=output.precip, + wind_direction=output.wind_direction, + wind_speed=output.wind_speed, + ) + ) + return weather_indeterminates diff --git a/backend/packages/wps-api/src/app/routers/fba.py b/backend/packages/wps-api/src/app/routers/fba.py index 6cd41324d6..15ff1be6ce 100644 --- a/backend/packages/wps-api/src/app/routers/fba.py +++ b/backend/packages/wps-api/src/app/routers/fba.py @@ -9,15 +9,6 @@ from aiohttp.client import ClientSession from fastapi import APIRouter, Depends from sqlalchemy.ext.asyncio import AsyncSession - -from app.auto_spatial_advisory.fuel_type_layer import ( - get_fuel_type_raster_by_year, -) -from app.auto_spatial_advisory.process_hfi import RunType -from app.auto_spatial_advisory.zone_stats import ( - get_fuel_type_area_stats, - get_zone_wind_stats_for_source_id, -) from wps_shared.auth import asa_authentication_required, audit_asa from wps_shared.db.crud.auto_spatial_advisory import ( get_all_hfi_thresholds_by_id, @@ -55,7 +46,16 @@ SFMSRunParameter, TPIResponse, ) -from wps_shared.wildfire_one.wfwx_api import get_auth_header, get_fire_centers +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api + +from app.auto_spatial_advisory.fuel_type_layer import ( + get_fuel_type_raster_by_year, +) +from app.auto_spatial_advisory.process_hfi import RunType +from app.auto_spatial_advisory.zone_stats import ( + get_fuel_type_area_stats, + get_zone_wind_stats_for_source_id, +) logger = logging.getLogger(__name__) @@ -148,8 +148,8 @@ async def get_all_fire_centers(_=Depends(asa_authentication_required)): """Returns fire centers for all active stations.""" logger.info("/fba/fire-centers/") async with ClientSession() as session: - header = await get_auth_header(session) - fire_centers = await get_fire_centers(session, header) + wfwx_api = create_wfwx_api(session) + fire_centers = await wfwx_api.get_fire_centers() return FireCenterListResponse(fire_centers=fire_centers) diff --git a/backend/packages/wps-api/src/app/routers/fba_calc.py b/backend/packages/wps-api/src/app/routers/fba_calc.py index aabec9833a..24610394fb 100644 --- a/backend/packages/wps-api/src/app/routers/fba_calc.py +++ b/backend/packages/wps-api/src/app/routers/fba_calc.py @@ -2,31 +2,29 @@ import logging from datetime import date, datetime, timedelta + from aiohttp.client import ClientSession from fastapi import APIRouter, Depends -from wps_shared.auth import authentication_required, audit -from app.fire_behaviour.advisory import ( - FBACalculatorWeatherStation, - FireBehaviourAdvisory, - calculate_fire_behaviour_advisory, -) -from app.fire_behaviour.fwi_adjust import calculate_adjusted_fwi_result -from app.hourlies import get_hourly_readings_in_time_interval +from wps_shared.auth import audit, authentication_required +from wps_shared.db.crud.hfi_calc import get_fire_centre_station_codes from wps_shared.schemas.fba_calc import ( StationListRequest, StationRequest, - StationsListResponse, StationResponse, + StationsListResponse, ) from wps_shared.utils.time import get_hour_20_from_date -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation -from wps_shared.wildfire_one.wfwx_api import ( - get_auth_header, - get_dailies_generator, - get_wfwx_stations_from_station_codes, +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api +from wps_wf1.models import WFWXWeatherStation + +from app.fire_behaviour.advisory import ( + FBACalculatorWeatherStation, + FireBehaviourAdvisory, + calculate_fire_behaviour_advisory, ) +from app.fire_behaviour.fwi_adjust import calculate_adjusted_fwi_result from app.fire_behaviour.prediction import build_hourly_rh_dict - +from app.hourlies import get_hourly_readings_in_time_interval router = APIRouter( prefix="/fba-calc", dependencies=[Depends(authentication_required), Depends(audit)] @@ -199,15 +197,15 @@ async def get_stations_data(request: StationListRequest, _=Depends(authenticatio time_of_interest = get_hour_20_from_date(request.date) async with ClientSession() as session: - # authenticate against wfwx api - header = await get_auth_header(session) + fire_centre_station_codes = get_fire_centre_station_codes() # get station information from the wfwx api - wfwx_stations = await get_wfwx_stations_from_station_codes( - session, header, unique_station_codes + wfwx_api = create_wfwx_api(session) + wfwx_stations = await wfwx_api.get_wfwx_stations_from_station_codes( + unique_station_codes, fire_centre_station_codes ) # get the dailies for all the stations - dailies = await get_dailies_generator( - session, header, wfwx_stations, time_of_interest, time_of_interest + dailies = await wfwx_api.get_dailies_generator( + wfwx_stations, time_of_interest, time_of_interest ) # turn it into a dictionary so we can easily get at data using a station id dailies_by_station_id = { @@ -216,8 +214,8 @@ async def get_stations_data(request: StationListRequest, _=Depends(authenticatio # must retrieve the previous day's observed/forecasted FFMC value from WFWX prev_day = time_of_interest - timedelta(days=1) # get the "daily" data for the station for the previous day - yesterday_response = await get_dailies_generator( - session, header, wfwx_stations, prev_day, prev_day + yesterday_response = await wfwx_api.get_dailies_generator( + wfwx_stations, prev_day, prev_day ) # turn it into a dictionary so we can easily get at data yesterday_dailies_by_station_id = { diff --git a/backend/packages/wps-api/src/app/routers/fire_watch.py b/backend/packages/wps-api/src/app/routers/fire_watch.py index d2d4bdab3c..f88d5bccf0 100644 --- a/backend/packages/wps-api/src/app/routers/fire_watch.py +++ b/backend/packages/wps-api/src/app/routers/fire_watch.py @@ -9,6 +9,7 @@ from geoalchemy2.shape import to_shape from shapely import from_wkt from sqlalchemy import Row +from wps_wf1.models import FireCentre from app.fire_watch.calculate_weather import ( MissingWeatherDataError, @@ -44,7 +45,6 @@ FireWatchResponse, FireWatchStation, ) -from wps_shared.schemas.hfi_calc import FireCentre from wps_shared.schemas.stations import GeoJsonWeatherStation from wps_shared.stations import get_stations_as_geojson from wps_shared.utils.time import get_utc_now diff --git a/backend/packages/wps-api/src/app/routers/morecast_v2.py b/backend/packages/wps-api/src/app/routers/morecast_v2.py index df1acc8023..fd2777ff1c 100644 --- a/backend/packages/wps-api/src/app/routers/morecast_v2.py +++ b/backend/packages/wps-api/src/app/routers/morecast_v2.py @@ -1,19 +1,22 @@ """Routes for Morecast v2""" import logging -from aiohttp.client import ClientSession -from app.morecast_v2.forecasts import format_as_wf1_post_forecasts -from wps_shared.utils.time import vancouver_tz -from typing import List from datetime import date, datetime, time, timedelta, timezone -from fastapi import APIRouter, Response, Depends, status +from typing import List + +from aiohttp.client import ClientSession +from fastapi import APIRouter, Depends, HTTPException, Response, status from fastapi.responses import ORJSONResponse -from wps_shared.auth import auth_with_forecaster_role_required, audit, authentication_required +from wps_shared.auth import audit, auth_with_forecaster_role_required, authentication_required from wps_shared.db.crud.grass_curing import get_percent_grass_curing_by_station_for_date_range -from wps_shared.db.crud.morecast_v2 import get_forecasts_in_range, get_user_forecasts_for_date, save_all_forecasts +from wps_shared.db.crud.hfi_calc import get_fire_centre_station_codes +from wps_shared.db.crud.morecast_v2 import ( + get_forecasts_in_range, + get_user_forecasts_for_date, + save_all_forecasts, +) from wps_shared.db.database import get_read_session_scope, get_write_session_scope from wps_shared.db.models.morecast_v2 import MorecastForecastRecord -from app.morecast_v2.forecasts import filter_for_api_forecasts, get_forecasts, get_fwi_values from wps_shared.schemas.morecast_v2 import ( IndeterminateDailiesResponse, MoreCastForecastOutput, @@ -21,27 +24,47 @@ MorecastForecastResponse, ObservedDailiesForStations, StationDailiesResponse, - WeatherIndeterminate, - WeatherDeterminate, ) from wps_shared.schemas.shared import StationsRequest -from wps_shared.wildfire_one.schema_parsers import transform_morecastforecastoutput_to_weatherindeterminate -from wps_shared.utils.time import get_hour_20_from_date, get_utc_now -from wps_shared.weather_models.fetch.predictions import fetch_latest_model_run_predictions_by_station_code_and_date_range -from wps_shared.wildfire_one.wfwx_api import get_auth_header, get_dailies_for_stations_and_date, get_daily_determinates_for_stations_and_date, get_wfwx_stations_from_station_codes -from wps_shared.wildfire_one.wfwx_post_api import WF1_HTTP_ERROR, post_forecasts from wps_shared.utils.redis import clear_cache_matching +from wps_shared.utils.time import get_hour_20_from_date, get_utc_now, vancouver_tz +from wps_shared.weather_models.fetch.predictions import ( + fetch_latest_model_run_predictions_by_station_code_and_date_range, +) + +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api +from wps_wf1.models import WeatherDeterminate, WeatherIndeterminate +from app.morecast_v2.forecasts import ( + filter_for_api_forecasts, + format_as_wf1_post_forecasts, + get_forecasts, + get_fwi_values, + transform_morecastforecastoutput_to_weatherindeterminate, +) logger = logging.getLogger(__name__) no_cache = "max-age=0" # don't let the browser cache this -router = APIRouter(prefix="/morecast-v2", dependencies=[Depends(authentication_required), Depends(audit)]) +router = APIRouter( + prefix="/morecast-v2", dependencies=[Depends(authentication_required), Depends(audit)] +) + +WF1_HTTP_ERROR = HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=""" + Error submitting forecasts to WF1, please retry. + All your forecast inputs have been saved as a draft on your browser and can be submitted at a later time. + If the problem persists, use the following link to verify the status of the WF1 service: https://wfapps.nrs.gov.bc.ca/pub/wfwx-fireweather-web/stations + """, +) @router.get("/forecasts/{for_date}") -async def get_forecasts_for_date_and_user(for_date: date, response: Response, token=Depends(authentication_required)) -> List[MorecastForecastResponse]: +async def get_forecasts_for_date_and_user( + for_date: date, response: Response, token=Depends(authentication_required) +) -> List[MorecastForecastResponse]: """Return forecasts""" logger.info("/forecasts/") response.headers["Cache-Control"] = no_cache @@ -53,7 +76,9 @@ async def get_forecasts_for_date_and_user(for_date: date, response: Response, to @router.post("/forecasts/{start_date}/{end_date}") -async def get_forecasts_by_date_range(start_date: date, end_date: date, request: StationsRequest, response: Response): +async def get_forecasts_by_date_range( + start_date: date, end_date: date, request: StationsRequest, response: Response +): """Return forecasts for the specified date range and stations""" logger.info(f"/forecasts/{start_date}/{end_date}") response.headers["Cache-Control"] = no_cache @@ -81,7 +106,11 @@ async def get_forecasts_by_date_range(start_date: date, end_date: date, request: @router.post("/forecast", status_code=status.HTTP_201_CREATED) -async def save_forecasts(forecasts: MoreCastForecastRequest, response: Response, token=Depends(auth_with_forecaster_role_required)) -> MorecastForecastResponse: +async def save_forecasts( + forecasts: MoreCastForecastRequest, + response: Response, + token=Depends(auth_with_forecaster_role_required), +) -> MorecastForecastResponse: """Persist a forecast""" logger.info("/forecast") response.headers["Cache-Control"] = no_cache @@ -111,9 +140,11 @@ async def save_forecasts(forecasts: MoreCastForecastRequest, response: Response, async with ClientSession() as client_session: try: - headers = await get_auth_header(client_session) - wf1_forecast_records = await format_as_wf1_post_forecasts(client_session, forecasts_list, username, headers) - await post_forecasts(client_session, forecasts=wf1_forecast_records) + wfwx_api = create_wfwx_api(client_session) + wf1_forecast_records = await format_as_wf1_post_forecasts( + wfwx_api, forecasts_list, username + ) + await wfwx_api.post_forecasts(forecasts=wf1_forecast_records) station_ids = [wfwx_station.stationId for wfwx_station in wf1_forecast_records] for station_id in station_ids: @@ -149,17 +180,22 @@ async def get_yesterdays_actual_dailies(today: date, request: ObservedDailiesFor unique_station_codes = list(set(request.station_codes)) time_of_interest = get_hour_20_from_date(today) - timedelta(days=1) + fire_centre_station_codes = get_fire_centre_station_codes() async with ClientSession() as session: - header = await get_auth_header(session) + wfwx_api = create_wfwx_api(session) - yesterday_dailies = await get_dailies_for_stations_and_date(session, header, time_of_interest, time_of_interest, unique_station_codes) + yesterday_dailies = await wfwx_api.get_dailies_for_stations_and_date( + time_of_interest, time_of_interest, unique_station_codes, fire_centre_station_codes + ) return StationDailiesResponse(dailies=yesterday_dailies) @router.post("/observed-dailies/{start_date}/{end_date}", response_model=StationDailiesResponse) -async def get_observed_dailies(start_date: date, end_date: date, request: ObservedDailiesForStations): +async def get_observed_dailies( + start_date: date, end_date: date, request: ObservedDailiesForStations +): """Returns the daily observations for the requested station codes, from the given start_date to the most recent date where daily observation data is available. """ @@ -169,16 +205,28 @@ async def get_observed_dailies(start_date: date, end_date: date, request: Observ start_date_of_interest = get_hour_20_from_date(start_date) end_date_of_interest = get_hour_20_from_date(end_date) + fire_centre_station_codes = get_fire_centre_station_codes() async with ClientSession() as session: - header = await get_auth_header(session) - observed_dailies = await get_dailies_for_stations_and_date(session, header, start_date_of_interest, end_date_of_interest, unique_station_codes) + wfwx_api = create_wfwx_api(session) + observed_dailies = await wfwx_api.get_dailies_for_stations_and_date( + start_date_of_interest, + end_date_of_interest, + unique_station_codes, + fire_centre_station_codes, + ) return StationDailiesResponse(dailies=observed_dailies) -@router.post("/determinates/{start_date}/{end_date}", response_model=IndeterminateDailiesResponse, response_class=ORJSONResponse) -async def get_determinates_for_date_range(start_date: date, end_date: date, request: StationsRequest): +@router.post( + "/determinates/{start_date}/{end_date}", + response_model=IndeterminateDailiesResponse, + response_class=ORJSONResponse, +) +async def get_determinates_for_date_range( + start_date: date, end_date: date, request: StationsRequest +): """Returns the weather values for any actuals, predictions and forecasts for the requested stations within the requested date range. """ @@ -193,16 +241,30 @@ async def get_determinates_for_date_range(start_date: date, end_date: date, requ start_date_for_fwi_calc = start_date_of_interest - timedelta(days=1) async with ClientSession() as session: - header = await get_auth_header(session) + fire_centre_station_codes = get_fire_centre_station_codes() # get station information from the wfwx api - wfwx_stations = await get_wfwx_stations_from_station_codes(session, header, unique_station_codes) - wf1_actuals, wf1_forecasts = await get_daily_determinates_for_stations_and_date(session, header, start_date_for_fwi_calc, end_date_of_interest, unique_station_codes) + wfwx_api = create_wfwx_api(session) + wfwx_stations = await wfwx_api.get_wfwx_stations_from_station_codes( + unique_station_codes, fire_centre_station_codes + ) + wf1_actuals, wf1_forecasts = await wfwx_api.get_daily_determinates_for_stations_and_date( + start_date_for_fwi_calc, + end_date_of_interest, + unique_station_codes, + fire_centre_station_codes, + ) wf1_actuals, wf1_forecasts = get_fwi_values(wf1_actuals, wf1_forecasts) # drop the days before the date of interest that were needed to calculate fwi values - wf1_actuals = [actual for actual in wf1_actuals if actual.utc_timestamp >= start_date_of_interest] - wf1_forecasts = [forecast for forecast in wf1_forecasts if forecast.utc_timestamp >= start_date_of_interest] + wf1_actuals = [ + actual for actual in wf1_actuals if actual.utc_timestamp >= start_date_of_interest + ] + wf1_forecasts = [ + forecast + for forecast in wf1_forecasts + if forecast.utc_timestamp >= start_date_of_interest + ] # Find the min and max dates for actuals from wf1. These define the range of dates for which # we need to retrieve forecasts from our API database. Note that not all stations report actuals @@ -212,15 +274,25 @@ async def get_determinates_for_date_range(start_date: date, end_date: date, requ max_wf1_actuals_date = max(wf1_actuals_dates, default=None) with get_read_session_scope() as db_session: - forecasts_from_db: List[MoreCastForecastOutput] = get_forecasts(db_session, min_wf1_actuals_date, max_wf1_actuals_date, request.stations) - predictions: List[WeatherIndeterminate] = await fetch_latest_model_run_predictions_by_station_code_and_date_range(db_session, unique_station_codes, start_time, end_time) + forecasts_from_db: List[MoreCastForecastOutput] = get_forecasts( + db_session, min_wf1_actuals_date, max_wf1_actuals_date, request.stations + ) + predictions: List[ + WeatherIndeterminate + ] = await fetch_latest_model_run_predictions_by_station_code_and_date_range( + db_session, unique_station_codes, start_time, end_time + ) station_codes = [station.code for station in wfwx_stations] - grass_curing_rows = get_percent_grass_curing_by_station_for_date_range(db_session, start_time.date(), end_time.date(), station_codes) + grass_curing_rows = get_percent_grass_curing_by_station_for_date_range( + db_session, start_time.date(), end_time.date(), station_codes + ) grass_curing = [] for gc_tuple in grass_curing_rows: gc_row = gc_tuple[0] - current_station = [station for station in wfwx_stations if station.code == gc_row.station_code][0] + current_station = [ + station for station in wfwx_stations if station.code == gc_row.station_code + ][0] gc_indeterminate = WeatherIndeterminate( determinate=WeatherDeterminate.GRASS_CURING_CWFIS, station_code=current_station.code, @@ -232,7 +304,9 @@ async def get_determinates_for_date_range(start_date: date, end_date: date, requ ) grass_curing.append(gc_indeterminate) - transformed_forecasts = transform_morecastforecastoutput_to_weatherindeterminate(forecasts_from_db, wfwx_stations) + transformed_forecasts = transform_morecastforecastoutput_to_weatherindeterminate( + forecasts_from_db, wfwx_stations + ) # Not all weather stations report actuals at the same time, so we can end up in a situation where # for a given date, we need to show the forecast from the wf1 API for one station, and the forecast @@ -242,4 +316,9 @@ async def get_determinates_for_date_range(start_date: date, end_date: date, requ wf1_forecasts.extend(transformed_forecasts_to_add) - return IndeterminateDailiesResponse(actuals=wf1_actuals, forecasts=wf1_forecasts, grass_curing=grass_curing, predictions=predictions) + return IndeterminateDailiesResponse( + actuals=wf1_actuals, + forecasts=wf1_forecasts, + grass_curing=grass_curing, + predictions=predictions, + ) diff --git a/backend/packages/wps-api/src/app/routers/stations.py b/backend/packages/wps-api/src/app/routers/stations.py index f596a996ea..54509087cb 100644 --- a/backend/packages/wps-api/src/app/routers/stations.py +++ b/backend/packages/wps-api/src/app/routers/stations.py @@ -1,6 +1,7 @@ """ Routers for stations """ import logging from datetime import datetime +from aiohttp import ClientSession from fastapi import APIRouter, Response, Depends from wps_shared.auth import authentication_required, audit from wps_shared.utils.time import get_utc_now, get_hour_20 @@ -12,7 +13,7 @@ WeatherStationGroupMembersResponse, ) from wps_shared.stations import get_stations_as_geojson, fetch_detailed_stations_as_geojson -from wps_shared.wildfire_one import wfwx_api +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api logger = logging.getLogger(__name__) @@ -75,8 +76,10 @@ async def get_station_groups(response: Response, _=Depends(authentication_requir Groups are retrieved from an undocumented stationGroups endpoint. """ logger.info('/stations/groups') - groups = await wfwx_api.get_station_groups() - response.headers["Cache-Control"] = no_cache + async with ClientSession() as session: + wfwx_api = create_wfwx_api(session) + groups = await wfwx_api.get_station_groups() + response.headers["Cache-Control"] = no_cache return WeatherStationGroupsResponse(groups=groups) @@ -84,6 +87,8 @@ async def get_station_groups(response: Response, _=Depends(authentication_requir async def get_stations_by_group_ids(groups_request: WeatherStationGroupsMemberRequest, response: Response, _=Depends(authentication_required)): """ Return a list of stations that are part of the specified group(s) """ logger.info('/stations/groups/members') - stations = await wfwx_api.get_stations_by_group_ids([id for id in groups_request.group_ids]) - response.headers["Cache-Control"] = no_cache + async with ClientSession() as session: + wfwx_api = create_wfwx_api(session) + stations = await wfwx_api.get_stations_by_group_ids([id for id in groups_request.group_ids]) + response.headers["Cache-Control"] = no_cache return WeatherStationGroupMembersResponse(stations=stations) diff --git a/backend/packages/wps-api/src/app/tests/auto_spatial_advisory/test_critical_hours.py b/backend/packages/wps-api/src/app/tests/auto_spatial_advisory/test_critical_hours.py index c017acf96f..85d1df1306 100644 --- a/backend/packages/wps-api/src/app/tests/auto_spatial_advisory/test_critical_hours.py +++ b/backend/packages/wps-api/src/app/tests/auto_spatial_advisory/test_critical_hours.py @@ -3,9 +3,11 @@ import math import numpy as np import json + +from wps_wf1.models import WFWXWeatherStation from app.auto_spatial_advisory.critical_hours import CriticalHoursInputs, calculate_representative_hours, check_station_valid, determine_start_time, determine_end_time from wps_shared.schemas.fba_calc import CriticalHoursHFI -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation + dirname = os.path.dirname(__file__) dailies_fixture = os.path.join(dirname, "wf1-dailies.json") diff --git a/backend/packages/wps-api/src/app/tests/conftest.py b/backend/packages/wps-api/src/app/tests/conftest.py index 50d81d2952..be95b7e7e4 100644 --- a/backend/packages/wps-api/src/app/tests/conftest.py +++ b/backend/packages/wps-api/src/app/tests/conftest.py @@ -15,6 +15,7 @@ mock_requests_session, mock_client_session, spy_access_logging, + mock_wfwx_api, ) diff --git a/backend/packages/wps-api/src/app/tests/fba/test_fba_endpoint.py b/backend/packages/wps-api/src/app/tests/fba/test_fba_endpoint.py index 108c1fcfc5..204738d8da 100644 --- a/backend/packages/wps-api/src/app/tests/fba/test_fba_endpoint.py +++ b/backend/packages/wps-api/src/app/tests/fba/test_fba_endpoint.py @@ -1,19 +1,16 @@ -import asyncio import json import math from collections import namedtuple from datetime import date, datetime, timezone -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import app.main import pytest from aiohttp import ClientSession from app.tests import get_complete_filename from fastapi.testclient import TestClient - from wps_shared.db.models.auto_spatial_advisory import ( AdvisoryHFIWindSpeed, - AdvisoryTPIStats, RunParameters, SFMSFuelType, TPIClassEnum, @@ -24,7 +21,6 @@ FireZoneHFIStats, HFIStatsResponse, HfiThreshold, - LatestSFMSRunParameterRangeResponse, SFMSRunParameter, ) from wps_shared.tests.common import default_mock_client_get @@ -285,7 +281,9 @@ def client(): @pytest.mark.parametrize( "status, expected_fire_centers", [(200, "test_fba_endpoint_fire_centers.json")] ) -def test_fba_endpoint_fire_centers(status, expected_fire_centers, monkeypatch): +def test_fba_endpoint_fire_centers( + status, expected_fire_centers, monkeypatch, mocker, mock_wfwx_api +): monkeypatch.setattr(ClientSession, "get", default_mock_client_get) client = TestClient(app.main.app) @@ -319,11 +317,10 @@ def test_get_endpoints_unauthorized(client: TestClient, endpoint: str): assert response.status_code == 401 -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) -@patch("app.routers.fba.get_fire_centers", mock_get_fire_centres) @pytest.mark.usefixtures("mock_jwt_decode") -def test_get_fire_centres_authorized(client: TestClient): +def test_get_fire_centres_authorized(client: TestClient, mocker, mock_wfwx_api): """Allowed to get fire centres when authorized""" + mocker.patch("app.routers.fba.create_wfwx_api", return_value=mock_wfwx_api) response = client.get(get_fire_centres_url) assert response.status_code == 200 @@ -367,7 +364,6 @@ async def mock_zone_ids_in_centre(*_, **__): return [1] -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) @patch("app.routers.fba.get_precomputed_stats_for_shape", mock_get_fire_centre_info) @patch("app.routers.fba.get_all_hfi_thresholds_by_id", mock_hfi_thresholds) @patch("app.routers.fba.get_all_sfms_fuel_type_records", mock_sfms_fuel_types) @@ -391,7 +387,6 @@ def test_get_fire_center_info_authorized(client: TestClient): assert math.isclose(kfc_json["1"]["min_wind_stats"][0]["min_wind_speed"], 1) -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) @patch("app.routers.fba.get_precomputed_stats_for_shape", mock_get_fire_centre_info) @patch( "app.routers.fba.get_fuel_type_raster_by_year", @@ -417,7 +412,6 @@ def test_get_fire_center_info_authorized_no_min_wind_speeds(client: TestClient): assert kfc_json["1"]["min_wind_stats"] == [] -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) @patch("app.routers.fba.get_precomputed_stats_for_shape", mock_get_fire_centre_info_with_grass) @patch( "app.routers.fba.get_fuel_type_raster_by_year", @@ -444,7 +438,6 @@ def test_get_fire_center_info_authorized_grass_fuel(client: TestClient): assert math.isclose(kfc_json["1"]["min_wind_stats"][0]["min_wind_speed"], 1) -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) @patch("app.routers.fba.get_run_datetimes", mock_get_sfms_run_datetimes) @pytest.mark.usefixtures("mock_jwt_decode") def test_get_sfms_run_datetimes_authorized(client: TestClient): @@ -456,7 +449,6 @@ def test_get_sfms_run_datetimes_authorized(client: TestClient): ).strftime("%Y-%m-%dT%H:%M:%SZ") -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) @patch("app.routers.fba.get_centre_tpi_stats", mock_get_centre_tpi_stats) @patch("app.routers.fba.get_fire_centre_tpi_fuel_areas", mock_get_fire_centre_tpi_fuel_areas) @patch("app.routers.fba.get_fuel_type_raster_by_year", mock_get_fuel_type_raster_by_year) @@ -513,7 +505,6 @@ def test_get_fire_centre_tpi_stats_authorized(client: TestClient): @pytest.mark.usefixtures("mock_jwt_decode") -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) @patch("app.routers.fba.get_tpi_stats", mock_get_tpi_stats) @patch("app.routers.fba.get_fuel_type_raster_by_year", mock_get_fuel_type_raster_by_year) @patch("app.routers.fba.get_tpi_fuel_areas", mock_get_tpi_fuel_areas) @@ -540,7 +531,6 @@ def test_get_tpi_stats_authorized(client: TestClient): @pytest.mark.usefixtures("mock_jwt_decode") -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) @patch("app.routers.fba.get_sfms_bounds", mock_get_sfms_bounds) def test_get_sfms_run_bounds(client: TestClient): response = client.get(get_sfms_run_bounds_url) @@ -553,7 +543,6 @@ def test_get_sfms_run_bounds(client: TestClient): @pytest.mark.usefixtures("mock_jwt_decode") -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) @patch( "app.routers.fba.get_sfms_bounds", mock_get_sfms_bounds_no_data, @@ -579,8 +568,6 @@ def test_get_sfms_run_bounds_no_bounds(client: TestClient): @pytest.mark.usefixtures("mock_test_idir_jwt_decode") @pytest.mark.parametrize("endpoint", FBA_ENDPOINTS) -@patch("app.routers.fba.get_auth_header", mock_get_auth_header) -@patch("app.routers.fba.get_fire_centers", mock_get_fire_centres) @patch("app.routers.fba.get_precomputed_stats_for_shape", mock_get_fire_centre_info) @patch("app.routers.fba.get_all_hfi_thresholds_by_id", mock_hfi_thresholds) @patch("app.routers.fba.get_all_sfms_fuel_type_records", mock_sfms_fuel_types) @@ -602,7 +589,8 @@ def test_get_sfms_run_bounds_no_bounds(client: TestClient): ) @patch("app.routers.fba.get_tpi_fuel_areas", mock_get_tpi_fuel_areas) @patch("app.routers.fba.get_tpi_stats", mock_get_tpi_stats) -def test_fba_endpoints_allowed_for_test_idir(client, endpoint): +def test_fba_endpoints_allowed_for_test_idir(client, endpoint, mocker, mock_wfwx_api): + mocker.patch("app.routers.fba.create_wfwx_api", return_value=mock_wfwx_api) headers = {"Authorization": "Bearer token"} response = client.get(endpoint, headers=headers) assert response.status_code == 200 diff --git a/backend/packages/wps-api/src/app/tests/fire_behavior/test_fwi_adjust.py b/backend/packages/wps-api/src/app/tests/fire_behavior/test_fwi_adjust.py index 8835f0e338..962f242282 100644 --- a/backend/packages/wps-api/src/app/tests/fire_behavior/test_fwi_adjust.py +++ b/backend/packages/wps-api/src/app/tests/fire_behavior/test_fwi_adjust.py @@ -1,9 +1,10 @@ import math from datetime import datetime -from wps_shared.fuel_types import FuelTypeEnum -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation + from app.fire_behaviour.fwi_adjust import calculate_adjusted_fwi_result +from wps_shared.fuel_types import FuelTypeEnum from wps_shared.schemas.fba_calc import StationRequest +from wps_wf1.models import WFWXWeatherStation station_1 = WFWXWeatherStation( code=1, diff --git a/backend/packages/wps-api/src/app/tests/fire_watch/test_calculate_weather.py b/backend/packages/wps-api/src/app/tests/fire_watch/test_calculate_weather.py index 711417d4c4..5580ae6b43 100644 --- a/backend/packages/wps-api/src/app/tests/fire_watch/test_calculate_weather.py +++ b/backend/packages/wps-api/src/app/tests/fire_watch/test_calculate_weather.py @@ -3,6 +3,8 @@ from datetime import datetime, timezone from unittest.mock import AsyncMock, patch, create_autospec +from wps_wf1.models import WFWXWeatherStation, WeatherDeterminate, WeatherIndeterminate + from wps_shared.db.models.fire_watch import FireWatch, FireWatchWeather from wps_shared.fuel_types import FuelTypeEnum from app.fire_behaviour.prediction import FireBehaviourPrediction @@ -23,9 +25,7 @@ validate_fire_watch_inputs, validate_prediction_dates, ) -from wps_shared.schemas.morecast_v2 import WeatherDeterminate, WeatherIndeterminate from wps_shared.schemas.weather_models import ModelPredictionDetails -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation from app.fire_watch.calculate_weather import MissingWeatherDataError @@ -201,22 +201,22 @@ async def test_map_model_prediction_to_weather_indeterminate_missing_bias_value( @pytest.mark.anyio -@patch("app.fire_watch.calculate_weather.get_auth_header", new_callable=AsyncMock) -@patch( - "app.fire_watch.calculate_weather.get_wfwx_stations_from_station_codes", new_callable=AsyncMock -) -async def test_fetch_station_metadata(mock_get_stations, mock_get_auth_header): - mock_get_stations.return_value = [ - WFWXWeatherStation( - code=1, - name="Station 1", - lat=50.0, - long=-120.0, - elevation=1, - wfwx_id="1", - zone_code=None, - ) - ] +async def test_fetch_station_metadata(mocker, mock_wfwx_api): + mock_wfwx_api.get_wfwx_stations_from_station_codes = AsyncMock( + return_value=[ + WFWXWeatherStation( + code=1, + name="Station 1", + lat=50.0, + long=-120.0, + elevation=1, + wfwx_id="1", + zone_code=None, + ) + ] + ) + mocker.patch("app.fire_watch.calculate_weather.create_wfwx_api", return_value=mock_wfwx_api) + result = await get_station_metadata([1]) assert result[1].name == "Station 1" diff --git a/backend/packages/wps-api/src/app/tests/forecasts/test_noon_forecasts.py b/backend/packages/wps-api/src/app/tests/forecasts/test_noon_forecasts.py index 05000cc7ba..3e6a83fedb 100644 --- a/backend/packages/wps-api/src/app/tests/forecasts/test_noon_forecasts.py +++ b/backend/packages/wps-api/src/app/tests/forecasts/test_noon_forecasts.py @@ -1,17 +1,20 @@ -import pytest import json import os from datetime import datetime + +import app.main +import pytest +from aiohttp import ClientSession from sqlalchemy.orm import Session from starlette.testclient import TestClient -from aiohttp import ClientSession -from wps_shared.schemas.stations import StationCodeList -import app.main -from wps_shared.tests.common import default_mock_client_get from wps_shared.db.models.forecasts import NoonForecast +from wps_shared.tests.common import default_mock_client_get +from wps_wf1.models import StationCodeList -def mock_query_noon_forecast_records(session: Session, station_codes: StationCodeList, start_date: datetime, end_date: datetime): +def mock_query_noon_forecast_records( + session: Session, station_codes: StationCodeList, start_date: datetime, end_date: datetime +): """Mock some noon forecasts""" forecasts = [] dirname = os.path.dirname(os.path.realpath(__file__)) @@ -32,7 +35,11 @@ def mock_query_noon_forecast_records(session: Session, station_codes: StationCod @pytest.mark.usefixtures("mock_jwt_decode") def test_noon_forecasts(codes, status, num_groups, monkeypatch): monkeypatch.setattr(ClientSession, "get", default_mock_client_get) - monkeypatch.setattr(app.forecasts.noon_forecasts, "query_noon_forecast_records", mock_query_noon_forecast_records) + monkeypatch.setattr( + app.forecasts.noon_forecasts, + "query_noon_forecast_records", + mock_query_noon_forecast_records, + ) client = TestClient(app.main.app) headers = {"Content-Type": "application/json", "Authorization": "Bearer token"} diff --git a/backend/packages/wps-api/src/app/tests/forecasts/test_noon_forecasts_summaries.py b/backend/packages/wps-api/src/app/tests/forecasts/test_noon_forecasts_summaries.py index 99c54326fe..8923cc3ce3 100644 --- a/backend/packages/wps-api/src/app/tests/forecasts/test_noon_forecasts_summaries.py +++ b/backend/packages/wps-api/src/app/tests/forecasts/test_noon_forecasts_summaries.py @@ -1,14 +1,14 @@ +from datetime import datetime, timedelta + +import app.main import pytest -from datetime import timedelta, datetime +import wps_shared.utils.time as time_utils +from aiohttp import ClientSession from sqlalchemy.orm import Session from starlette.testclient import TestClient -from aiohttp import ClientSession -from wps_shared.schemas.stations import StationCodeList -import app.main -from wps_shared.tests.common import default_mock_client_get from wps_shared.db.models.forecasts import NoonForecast -import wps_shared.utils.time as time_utils - +from wps_shared.tests.common import default_mock_client_get +from wps_wf1.models import StationCodeList noon = time_utils.get_utc_now().replace(hour=20, minute=0, second=0, microsecond=0) weather_date = noon - timedelta(days=2) @@ -18,7 +18,9 @@ mock_rhs = [50, 51, 52] -def mock_query_noon_forecast_records(session: Session, station_codes: StationCodeList, start_date: datetime, end_date: datetime): +def mock_query_noon_forecast_records( + session: Session, station_codes: StationCodeList, start_date: datetime, end_date: datetime +): """Mock some noon forecasts""" forecasts = [] weather_values = [] @@ -27,18 +29,36 @@ def mock_query_noon_forecast_records(session: Session, station_codes: StationCod for code in [209, 322]: for value in weather_values: - forecasts.append(NoonForecast(station_code=code, weather_date=weather_date, created_at=time_utils.get_utc_now(), temperature=value["tmp"], relative_humidity=value["rh"])) + forecasts.append( + NoonForecast( + station_code=code, + weather_date=weather_date, + created_at=time_utils.get_utc_now(), + temperature=value["tmp"], + relative_humidity=value["rh"], + ) + ) return forecasts -@pytest.mark.parametrize("codes,status,num_summaries", [([999], 200, 0), ([322], 200, 1), ([322, 838], 200, 2)]) +@pytest.mark.parametrize( + "codes,status,num_summaries", [([999], 200, 0), ([322], 200, 1), ([322, 838], 200, 2)] +) @pytest.mark.usefixtures("mock_jwt_decode") def test_noon_forecast_summaries(codes, status, num_summaries, monkeypatch): monkeypatch.setattr(ClientSession, "get", default_mock_client_get) - monkeypatch.setattr(app.forecasts.noon_forecasts_summaries, "query_noon_forecast_records", mock_query_noon_forecast_records) + monkeypatch.setattr( + app.forecasts.noon_forecasts_summaries, + "query_noon_forecast_records", + mock_query_noon_forecast_records, + ) client = TestClient(app.main.app) monkeypatch.setattr(ClientSession, "get", default_mock_client_get) - response = client.post("/api/forecasts/noon/summaries/", headers={"Authorization": "Bearer token"}, json={"stations": codes}) + response = client.post( + "/api/forecasts/noon/summaries/", + headers={"Authorization": "Bearer token"}, + json={"stations": codes}, + ) assert response.status_code == status assert len(response.json()["summaries"]) == num_summaries @@ -55,4 +75,12 @@ def test_noon_forecast_summaries(codes, status, num_summaries, monkeypatch): if len(result["summaries"]) == 1: summary = result["summaries"][0] assert summary["station"]["code"] == codes[0] - assert summary["values"] == [{"datetime": weather_date.isoformat().replace("+00:00", "Z"), "tmp_min": tmp_min, "tmp_max": tmp_max, "rh_min": rh_min, "rh_max": rh_max}] + assert summary["values"] == [ + { + "datetime": weather_date.isoformat().replace("+00:00", "Z"), + "tmp_min": tmp_min, + "tmp_max": tmp_max, + "rh_min": rh_min, + "rh_max": rh_max, + } + ] diff --git a/backend/packages/wps-api/src/app/tests/hfi/test_hfi.py b/backend/packages/wps-api/src/app/tests/hfi/test_hfi.py index 6bad466341..426486fd51 100644 --- a/backend/packages/wps-api/src/app/tests/hfi/test_hfi.py +++ b/backend/packages/wps-api/src/app/tests/hfi/test_hfi.py @@ -1,19 +1,13 @@ """Unit testing for hfi logic""" +import json +import os from datetime import date, datetime, timedelta + import pytest -import os -import json -from pytest_mock import MockerFixture -from app.hfi.hfi_calc import ( - calculate_hfi_results, - calculate_mean_intensity, - calculate_max_intensity_group, - calculate_prep_level, - validate_date_range, - validate_station_daily, -) import wps_shared.db.models.hfi_calc as hfi_calc_models +from pytest_mock import MockerFixture +from starlette.testclient import TestClient from wps_shared.schemas.hfi_calc import ( DailyResult, DateRange, @@ -30,10 +24,18 @@ ) from wps_shared.schemas.shared import FuelType from wps_shared.utils.time import get_pst_now, get_utc_now -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation -from starlette.testclient import TestClient -from app.main import app as starlette_app +from wps_wf1.models import WFWXWeatherStation + import app.routers.hfi_calc +from app.hfi.hfi_calc import ( + calculate_hfi_results, + calculate_max_intensity_group, + calculate_mean_intensity, + calculate_prep_level, + validate_date_range, + validate_station_daily, +) +from app.main import app as starlette_app # Kamloops FC fixture kamloops_fc = FireCentre( diff --git a/backend/packages/wps-api/src/app/tests/hfi/test_hfi_endpoint_request.py b/backend/packages/wps-api/src/app/tests/hfi/test_hfi_endpoint_request.py index cfa578a428..60e36a8fff 100644 --- a/backend/packages/wps-api/src/app/tests/hfi/test_hfi_endpoint_request.py +++ b/backend/packages/wps-api/src/app/tests/hfi/test_hfi_endpoint_request.py @@ -2,28 +2,29 @@ Unit tests for hfi endpoints. """ +import json import os from datetime import date + import pytest -import json -from fastapi.testclient import TestClient +import wps_shared.db.crud.hfi_calc from aiohttp import ClientSession +from fastapi.testclient import TestClient from pytest_mock import MockerFixture -import app.main -import app.routers.hfi_calc -from wps_shared.tests.common import default_mock_client_get -from app.tests import load_json_file from wps_shared.db.models.hfi_calc import ( - PlanningWeatherStation, - FuelType, FireCentre, - PlanningArea, - HFIRequest, - FireStartRange, FireStartLookup, + FireStartRange, + FuelType, + HFIRequest, + PlanningArea, + PlanningWeatherStation, ) -import wps_shared.db.crud.hfi_calc +from wps_shared.tests.common import default_mock_client_get +import app.main +import app.routers.hfi_calc +from app.tests import load_json_file from app.tests.utils.mock_jwt_decode_role import MockJWTDecodeWithRole diff --git a/backend/packages/wps-api/src/app/tests/hfi/test_hfi_planning_areas.py b/backend/packages/wps-api/src/app/tests/hfi/test_hfi_planning_areas.py index 1df57f2b8c..a4b70a5509 100644 --- a/backend/packages/wps-api/src/app/tests/hfi/test_hfi_planning_areas.py +++ b/backend/packages/wps-api/src/app/tests/hfi/test_hfi_planning_areas.py @@ -1,27 +1,58 @@ import pytest -from starlette.testclient import TestClient from sqlalchemy.orm import Session +from starlette.testclient import TestClient +from wps_shared.db.models.hfi_calc import FireCentre, FuelType, PlanningArea, PlanningWeatherStation + import app.main -from wps_shared.db.models.hfi_calc import PlanningWeatherStation, FireCentre, FuelType, PlanningArea -import app.routers.hfi_calc def mock_get_fire_weather_stations(_: Session): fire_centre = FireCentre(id=1, name="Kamloops Fire Centre") planning_area_1 = PlanningArea(id=1, name="Kamloops (K2)", fire_centre_id=1) planning_area_2 = PlanningArea(id=2, name="Vernon (K4)", fire_centre_id=1) - fuel_type_1 = FuelType(id=1, abbrev="O1B", description="neigh", fuel_type_code="O1B", percentage_conifer=0, percentage_dead_fir=0) - fuel_type_2 = FuelType(id=2, abbrev="C7B", description="moo", fuel_type_code="C7", percentage_conifer=100, percentage_dead_fir=0) + fuel_type_1 = FuelType( + id=1, + abbrev="O1B", + description="neigh", + fuel_type_code="O1B", + percentage_conifer=0, + percentage_dead_fir=0, + ) + fuel_type_2 = FuelType( + id=2, + abbrev="C7B", + description="moo", + fuel_type_code="C7", + percentage_conifer=100, + percentage_dead_fir=0, + ) return [ - (PlanningWeatherStation(station_code=322, fuel_type_id=1, planning_area_id=1), fuel_type_1, planning_area_1, fire_centre), - (PlanningWeatherStation(station_code=346, fuel_type_id=2, planning_area_id=2), fuel_type_2, planning_area_2, fire_centre), - (PlanningWeatherStation(station_code=334, fuel_type_id=2, planning_area_id=2), fuel_type_2, planning_area_2, fire_centre), + ( + PlanningWeatherStation(station_code=322, fuel_type_id=1, planning_area_id=1), + fuel_type_1, + planning_area_1, + fire_centre, + ), + ( + PlanningWeatherStation(station_code=346, fuel_type_id=2, planning_area_id=2), + fuel_type_2, + planning_area_2, + fire_centre, + ), + ( + PlanningWeatherStation(station_code=334, fuel_type_id=2, planning_area_id=2), + fuel_type_2, + planning_area_2, + fire_centre, + ), ] @pytest.mark.usefixtures("mock_client_session", "mock_jwt_decode") def test_get_fire_weather_stations(monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr(app.hfi.hfi_calc, "get_fire_weather_stations", mock_get_fire_weather_stations) + monkeypatch.setattr( + app.hfi.hfi_calc, "get_fire_weather_stations", mock_get_fire_weather_stations + ) client = TestClient(app.main.app) headers = {"Content-Type": "application/json", "Authorization": "Bearer token"} response = client.get("/api/hfi-calc/fire-centres/", headers=headers) diff --git a/backend/packages/wps-api/src/app/tests/hfi/test_pdf_formatter.py b/backend/packages/wps-api/src/app/tests/hfi/test_pdf_formatter.py index 204ff3b8ef..2d3830fbdc 100644 --- a/backend/packages/wps-api/src/app/tests/hfi/test_pdf_formatter.py +++ b/backend/packages/wps-api/src/app/tests/hfi/test_pdf_formatter.py @@ -2,44 +2,50 @@ import os from datetime import date, datetime, timedelta from typing import List + from jinja2 import Environment, FunctionLoader -from app.hfi.pdf_data_formatter import (get_date_range_string, - get_fire_start_labels, - get_formatted_dates, - get_mean_intensity_groups, - get_merged_station_data, - get_prep_levels, - get_sorted_dates, - response_2_daily_jinja_format, - response_2_prep_cycle_jinja_format) -from app.hfi.pdf_generator import build_mappings -from app.hfi.pdf_template import get_template from wps_shared.schemas.hfi_calc import ( DailyResult, DateRange, FireCentre, + FireStartRange, HFIResultResponse, StationDaily, StationInfo, StationPDFData, WeatherStation, WeatherStationProperties, - FireStartRange, ) from wps_shared.schemas.shared import FuelType -test_hfi_result = os.path.join(os.path.dirname(__file__), 'test_hfi_result.json') -test_fcs = os.path.join(os.path.dirname(__file__), 'test_fire_centres.json') +from app.hfi.pdf_data_formatter import ( + get_date_range_string, + get_fire_start_labels, + get_formatted_dates, + get_mean_intensity_groups, + get_merged_station_data, + get_prep_levels, + get_sorted_dates, + response_2_daily_jinja_format, + response_2_prep_cycle_jinja_format, +) +from app.hfi.pdf_generator import build_mappings +from app.hfi.pdf_template import get_template + +test_hfi_result = os.path.join(os.path.dirname(__file__), "test_hfi_result.json") +test_fcs = os.path.join(os.path.dirname(__file__), "test_fire_centres.json") jinja_env = Environment(loader=FunctionLoader(get_template), autoescape=True) -lowest_fire_starts = (FireStartRange(id=1, label='0-1', )) +lowest_fire_starts = FireStartRange( + id=1, + label="0-1", +) def test_get_sorted_dates_same(): - """ Only one date of each date is returned in order """ + """Only one date of each date is returned in order""" start_date = date.fromisocalendar(2022, 2, 2) end_date = start_date + timedelta(days=6) - date_range: DateRange = DateRange(start_date=start_date, - end_date=end_date) + date_range: DateRange = DateRange(start_date=start_date, end_date=end_date) result = get_sorted_dates(date_range) assert len(result) == 7 @@ -48,85 +54,87 @@ def test_get_sorted_dates_same(): def test_get_date_range_string_2(): - """ Only one date of each date is returned in order """ + """Only one date of each date is returned in order""" dates = [date.fromisocalendar(2022, 2, 2), date.fromisocalendar(2022, 3, 3)] result = get_date_range_string(dates) - assert result == '2022-01-11 to 2022-01-19' + assert result == "2022-01-11 to 2022-01-19" def test_get_date_range_string_3(): - """ Only one date of each date is returned in order """ - dates = [date.fromisocalendar(2022, 1, 1), - date.fromisocalendar(2022, 2, 2), - date.fromisocalendar(2022, 3, 3)] + """Only one date of each date is returned in order""" + dates = [ + date.fromisocalendar(2022, 1, 1), + date.fromisocalendar(2022, 2, 2), + date.fromisocalendar(2022, 3, 3), + ] result = get_date_range_string(dates) - assert result == '2022-01-03 to 2022-01-19' + assert result == "2022-01-03 to 2022-01-19" def test_get_date_range_string_empty(): - """ Only one date of each date is returned in order """ + """Only one date of each date is returned in order""" result = get_date_range_string([]) - assert result == '' + assert result == "" def test_get_date_range_string_single(): - """ Only one date of each date is returned in order """ + """Only one date of each date is returned in order""" result = get_date_range_string([date.fromisocalendar(2022, 1, 1)]) - assert result == '2022-01-03' + assert result == "2022-01-03" def test_get_fire_start_labels(): - """ Returns the fire start labels from a planning area result """ - with open(test_hfi_result, 'r') as hfi_result: + """Returns the fire start labels from a planning area result""" + with open(test_hfi_result, "r") as hfi_result: result_json = json.load(hfi_result) result = HFIResultResponse(**result_json) fire_labels = get_fire_start_labels(result.planning_area_hfi_results[0].daily_results) - assert fire_labels == ['0-1', '0-1', '0-1'] + assert fire_labels == ["0-1", "0-1", "0-1"] def test_get_prep_levels(): - """ Returns the prep levels from a list of daily results""" + """Returns the prep levels from a list of daily results""" daily_results: List[DailyResult] = [ DailyResult( date=datetime.fromisocalendar(2022, 2, 2), dailies=[], fire_starts=lowest_fire_starts, - prep_level=1 + prep_level=1, ), DailyResult( date=datetime.fromisocalendar(2022, 2, 2), dailies=[], fire_starts=lowest_fire_starts, - prep_level=2 - ) + prep_level=2, + ), ] result = get_prep_levels(daily_results) assert result == [1, 2] def test_get_mean_intensity_groups(): - """ Returns the prep levels from a list of daily results""" + """Returns the prep levels from a list of daily results""" daily_results: List[DailyResult] = [ DailyResult( date=datetime.fromisocalendar(2022, 2, 2), dailies=[], fire_starts=lowest_fire_starts, - mean_intensity_group=1 + mean_intensity_group=1, ), DailyResult( date=datetime.fromisocalendar(2022, 2, 2), dailies=[], fire_starts=lowest_fire_starts, - mean_intensity_group=2 - ) + mean_intensity_group=2, + ), ] result = get_mean_intensity_groups(daily_results) assert result == [1, 2] def test_all_array_functions(): - """ Per day metrics, ordered by date, shoud be the same length """ - with open(test_hfi_result, 'r') as hfi_result: + """Per day metrics, ordered by date, shoud be the same length""" + with open(test_hfi_result, "r") as hfi_result: result_json = json.load(hfi_result) result = HFIResultResponse(**result_json) @@ -147,26 +155,29 @@ def test_all_array_functions(): def test_get_merged_station_data(): - weather_station_1 = WeatherStation( code=1, - station_props=WeatherStationProperties(name='s1', - wfwx_station_uuid='1', - elevation=1)) + station_props=WeatherStationProperties(name="s1", wfwx_station_uuid="1", elevation=1), + ) weather_station_2 = WeatherStation( code=2, - station_props=WeatherStationProperties(name='s2', - wfwx_station_uuid='2', - elevation=1, - )) + station_props=WeatherStationProperties( + name="s2", + wfwx_station_uuid="2", + elevation=1, + ), + ) station_dict = {1: weather_station_1, 2: weather_station_2} station_daily1 = StationDaily(code=1, date=datetime.fromisocalendar(2022, 2, 2)) station_daily2 = StationDaily(code=2, date=datetime.fromisocalendar(2022, 2, 2)) - fuel_types = {1: FuelType(id=1, abbrev='A', fuel_type_code='A', description='A')} - planning_area_station_info = [StationInfo(station_code=1, selected=True, fuel_type_id=1), - StationInfo(station_code=2, selected=True, fuel_type_id=1)] + fuel_types = {1: FuelType(id=1, abbrev="A", fuel_type_code="A", description="A")} + planning_area_station_info = [ + StationInfo(station_code=1, selected=True, fuel_type_id=1), + StationInfo(station_code=2, selected=True, fuel_type_id=1), + ] merged_station_data: List[StationPDFData] = get_merged_station_data( - station_dict, [station_daily1, station_daily2], fuel_types, planning_area_station_info) + station_dict, [station_daily1, station_daily2], fuel_types, planning_area_station_info + ) assert len(merged_station_data) == 2 assert merged_station_data[0].code == 1 assert merged_station_data[0].station_props == weather_station_1.station_props @@ -177,57 +188,65 @@ def test_get_merged_station_data(): def test_response_2_prep_cycle_jinja_format(): - with open(test_hfi_result, 'r') as hfi_result, open(test_fcs, 'r') as fcs: + with open(test_hfi_result, "r") as hfi_result, open(test_fcs, "r") as fcs: result = json.load(hfi_result) fc_dict = json.load(fcs) fire_centres = [] - for fc_json in fc_dict['fire_centres']: + for fc_json in fc_dict["fire_centres"]: fc = FireCentre(**fc_json) fire_centres.append(fc) - fuel_types = {id: FuelType(id=id, abbrev='A', fuel_type_code='A', description='A') - for id in [22, 24, 26, 34]} + fuel_types = { + id: FuelType(id=id, abbrev="A", fuel_type_code="A", description="A") + for id in [22, 24, 26, 34] + } _, planning_area_dict, station_dict = build_mappings(fire_centres) area_pdf_data, formatted_dates, date_range = response_2_prep_cycle_jinja_format( - HFIResultResponse(**result), planning_area_dict, station_dict, fuel_types=fuel_types) + HFIResultResponse(**result), planning_area_dict, station_dict, fuel_types=fuel_types + ) # 7 planning areas in coastal assert len(area_pdf_data) == 7 # assert correct order - assert area_pdf_data[0].planning_area_name == 'Fraser Zone' - assert area_pdf_data[1].planning_area_name == 'Pemberton Zone' - assert area_pdf_data[2].planning_area_name == 'Sunshine Coast' - assert area_pdf_data[3].planning_area_name == 'South Island' - assert area_pdf_data[4].planning_area_name == 'Mid Island' - assert area_pdf_data[5].planning_area_name == 'North Island' - assert area_pdf_data[6].planning_area_name == 'Mid-Coast' - assert formatted_dates == ['Monday August 02, 2021', 'Tuesday August 03, 2021', - 'Wednesday August 04, 2021'] - assert date_range == '2021-08-02 to 2021-08-04' + assert area_pdf_data[0].planning_area_name == "Fraser Zone" + assert area_pdf_data[1].planning_area_name == "Pemberton Zone" + assert area_pdf_data[2].planning_area_name == "Sunshine Coast" + assert area_pdf_data[3].planning_area_name == "South Island" + assert area_pdf_data[4].planning_area_name == "Mid Island" + assert area_pdf_data[5].planning_area_name == "North Island" + assert area_pdf_data[6].planning_area_name == "Mid-Coast" + assert formatted_dates == [ + "Monday August 02, 2021", + "Tuesday August 03, 2021", + "Wednesday August 04, 2021", + ] + assert date_range == "2021-08-02 to 2021-08-04" def test_response_2_daily_jinja_format(): - with open(test_hfi_result, 'r') as hfi_result, open(test_fcs, 'r') as fcs: + with open(test_hfi_result, "r") as hfi_result, open(test_fcs, "r") as fcs: result = json.load(hfi_result) fc_dict = json.load(fcs) fire_centres = [] - for fc_json in fc_dict['fire_centres']: + for fc_json in fc_dict["fire_centres"]: fc = FireCentre(**fc_json) fire_centres.append(fc) _, planning_area_dict, station_dict = build_mappings(fire_centres) - fuel_types = {id: FuelType(id=id, abbrev='A', fuel_type_code='A', description='A') - for id in [22, 24, 26, 34]} + fuel_types = { + id: FuelType(id=id, abbrev="A", fuel_type_code="A", description="A") + for id in [22, 24, 26, 34] + } daily_pdf_data_by_date = response_2_daily_jinja_format( - HFIResultResponse(**result), planning_area_dict, station_dict, - fuel_types=fuel_types) + HFIResultResponse(**result), planning_area_dict, station_dict, fuel_types=fuel_types + ) # 4 daily results day_dates = list(daily_pdf_data_by_date.keys()) assert len(day_dates) == 3 - assert day_dates[0] == '2021-08-02' - assert day_dates[1] == '2021-08-03' - assert day_dates[2] == '2021-08-04' + assert day_dates[0] == "2021-08-02" + assert day_dates[1] == "2021-08-03" + assert day_dates[2] == "2021-08-04" for daily_planning_area_data in daily_pdf_data_by_date.values(): # 7 planning areas in coastal diff --git a/backend/packages/wps-api/src/app/tests/hfi/test_pdf_generator.py b/backend/packages/wps-api/src/app/tests/hfi/test_pdf_generator.py index 98b4cc0a6f..1af0492502 100644 --- a/backend/packages/wps-api/src/app/tests/hfi/test_pdf_generator.py +++ b/backend/packages/wps-api/src/app/tests/hfi/test_pdf_generator.py @@ -1,55 +1,73 @@ +import hashlib import json import os from datetime import date, datetime -import hashlib + from jinja2 import Environment, FunctionLoader -from app.hfi.pdf_generator import generate_pdf, get_pdf_filename, generate_html -from app.hfi.pdf_template import get_template -from wps_shared.schemas.hfi_calc import FireCentre, HFIResultResponse +from wps_shared.schemas.hfi_calc import HFIResultResponse, FireCentre from wps_shared.schemas.shared import FuelType -test_hfi_result = os.path.join(os.path.dirname(__file__), 'test_hfi_result.json') -test_fcs = os.path.join(os.path.dirname(__file__), 'test_fire_centres.json') +from app.hfi.pdf_generator import generate_html, generate_pdf, get_pdf_filename +from app.hfi.pdf_template import get_template + +test_hfi_result = os.path.join(os.path.dirname(__file__), "test_hfi_result.json") +test_fcs = os.path.join(os.path.dirname(__file__), "test_fire_centres.json") def generate_test_input(): - with open(test_hfi_result, 'r') as hfi_result, open(test_fcs, 'r') as fcs: + with open(test_hfi_result, "r") as hfi_result, open(test_fcs, "r") as fcs: result = json.load(hfi_result) fc_dict = json.load(fcs) fire_centres = [] - for fc_json in fc_dict['fire_centres']: + for fc_json in fc_dict["fire_centres"]: fc = FireCentre(**fc_json) fire_centres.append(fc) - fuel_types = {id: FuelType( - id=id, - abbrev=f'{id}', - fuel_type_code=f'{id}', - description=f'{id}', - percentage_conifer=0, - percentage_dead_fir=0 - ) for id in range(50)} + fuel_types = { + id: FuelType( + id=id, + abbrev=f"{id}", + fuel_type_code=f"{id}", + description=f"{id}", + percentage_conifer=0, + percentage_dead_fir=0, + ) + for id in range(50) + } return result, fire_centres, fuel_types def test_generate_html(): - """ Test the HTML generation - checking that the hash of the html doesn't change """ + """Test the HTML generation - checking that the hash of the html doesn't change""" jinja_env = Environment(loader=FunctionLoader(get_template), autoescape=True) (result, fire_centres, fuel_types) = generate_test_input() - html_string, _ = generate_html(HFIResultResponse(**result), fire_centres, 'wps', - datetime.fromisocalendar(2022, 2, 2), jinja_env, - fuel_types) + html_string, _ = generate_html( + HFIResultResponse(**result), + fire_centres, + "wps", + datetime.fromisocalendar(2022, 2, 2), + jinja_env, + fuel_types, + ) - assert hashlib.sha256(html_string.encode()).hexdigest( - ) == 'b994d15734bcf9c3b4eaeadc2e9609dd79cf06e4f1446eb966306a5e89e0ac96' + assert ( + hashlib.sha256(html_string.encode()).hexdigest() + == "b994d15734bcf9c3b4eaeadc2e9609dd79cf06e4f1446eb966306a5e89e0ac96" + ) def test_generate_pdf(): jinja_env = Environment(loader=FunctionLoader(get_template), autoescape=True) (result, fire_centres, fuel_types) = generate_test_input() - pdf_bytes, pdf_filename = generate_pdf(HFIResultResponse(**result), fire_centres, 'wps', - datetime.fromisocalendar(2022, 2, 2), jinja_env, fuel_types=fuel_types) + pdf_bytes, pdf_filename = generate_pdf( + HFIResultResponse(**result), + fire_centres, + "wps", + datetime.fromisocalendar(2022, 2, 2), + jinja_env, + fuel_types=fuel_types, + ) assert len(pdf_bytes) > 0 assert pdf_filename is not None # Would be nice to check if the pdf hash has changed, but the pdf generated by pdfkit is non @@ -57,5 +75,5 @@ def test_generate_pdf(): def test_pdf_filename(): - res = get_pdf_filename('Kamloops', date.fromisocalendar(2022, 2, 2), 'wps') - assert res == 'Kamloops_HFICalculator_2022-01-11_WPS.pdf' + res = get_pdf_filename("Kamloops", date.fromisocalendar(2022, 2, 2), "wps") + assert res == "Kamloops_HFICalculator_2022-01-11_WPS.pdf" diff --git a/backend/packages/wps-api/src/app/tests/jobs/job_fixtures.py b/backend/packages/wps-api/src/app/tests/jobs/job_fixtures.py index aedb2e61ed..08fd9f8f9e 100644 --- a/backend/packages/wps-api/src/app/tests/jobs/job_fixtures.py +++ b/backend/packages/wps-api/src/app/tests/jobs/job_fixtures.py @@ -1,6 +1,7 @@ """ Fixtures for job tests """ import asyncio -from wps_shared.wildfire_one.wfwx_api import WFWXWeatherStation + +from wps_wf1.models import WFWXWeatherStation class MockWFWXHourlyResponse(object): diff --git a/backend/packages/wps-api/src/app/tests/jobs/test_hourly_actuals.py b/backend/packages/wps-api/src/app/tests/jobs/test_hourly_actuals.py index 35eaa87843..4b1a0d2f43 100644 --- a/backend/packages/wps-api/src/app/tests/jobs/test_hourly_actuals.py +++ b/backend/packages/wps-api/src/app/tests/jobs/test_hourly_actuals.py @@ -1,40 +1,29 @@ -""" Unit testing for hourly actuals job """ -import math -import os +"""Unit testing for hourly actuals job""" + import logging +import os +from unittest.mock import MagicMock + import pytest from pytest_mock import MockerFixture -from wps_shared.db.models.observations import HourlyActual -from app.tests.jobs.job_fixtures import mock_wfwx_stations, mock_wfwx_response -from wps_shared.utils.time import get_utc_now -from app.jobs import hourly_actuals -from wps_shared.wildfire_one import wfwx_api +from app.jobs import hourly_actuals +from app.tests.jobs.job_fixtures import mock_wfwx_response, mock_wfwx_stations logger = logging.getLogger(__name__) -@pytest.fixture() -def mock_hourly_actuals(mocker: MockerFixture): - """ Mocks out hourly actuals as async result """ - wfwx_hourlies = mock_wfwx_response() - future_wfwx_stations = mock_wfwx_stations() - mocker.patch("wps_shared.wildfire_one.wfwx_api.wfwx_station_list_mapper", return_value=future_wfwx_stations) - mocker.patch("wps_shared.wildfire_one.wfwx_api.get_hourly_actuals_all_stations", return_value=wfwx_hourlies) - mocker.patch("wps_shared.wildfire_one.wildfire_fetchers.fetch_paged_response_generator", return_value=iter(wfwx_hourlies)) - - -def test_hourly_actuals_job(monkeypatch, mocker: MockerFixture, mock_hourly_actuals): - """ Very simple test that checks that: +def test_hourly_actuals_job(mocker: MockerFixture, mock_wfwx_api): + """Very simple test that checks that: - the bot exits with a success code - the expected number of records are saved. """ - async def mock_get_auth_header(_): - return dict() + wfwx_hourlies = mock_wfwx_response() + mock_wfwx_api.get_hourly_actuals_all_stations.return_value = wfwx_hourlies + mocker.patch("app.jobs.hourly_actuals.create_wfwx_api", return_value=mock_wfwx_api) - monkeypatch.setattr(wfwx_api, 'get_auth_header', mock_get_auth_header) - save_hourly_actuals_spy = mocker.spy(hourly_actuals, 'save_hourly_actual') + save_hourly_actuals_spy = mocker.spy(hourly_actuals, "save_hourly_actual") with pytest.raises(SystemExit) as excinfo: hourly_actuals.main() # Assert that we exited without errors. @@ -44,18 +33,14 @@ async def mock_get_auth_header(_): assert save_hourly_actuals_spy.call_count == 2 -def test_hourly_actuals_job_fail(mocker: MockerFixture, - monkeypatch, - mock_requests_session): +def test_hourly_actuals_job_fail(mocker: MockerFixture, mock_wfwx_api): """ Test that when the bot fails, a message is sent to rocket-chat, and our exit code is 1. """ - def mock_get_hourly_readings(self, filename: str): - raise Exception() - - monkeypatch.setattr(wfwx_api, 'get_hourly_readings', mock_get_hourly_readings) - rocket_chat_spy = mocker.spy(hourly_actuals, 'send_rocketchat_notification') + mock_wfwx_api.get_hourly_actuals_all_stations = mocker.AsyncMock(side_effect=Exception()) + mocker.patch("app.jobs.hourly_actuals.create_wfwx_api", return_value=mock_wfwx_api) + rocket_chat_spy = mocker.spy(hourly_actuals, "send_rocketchat_notification") with pytest.raises(SystemExit) as excinfo: hourly_actuals.main() @@ -63,76 +48,3 @@ def mock_get_hourly_readings(self, filename: str): assert excinfo.value.code == os.EX_SOFTWARE # Assert that rocket chat was called. assert rocket_chat_spy.call_count == 1 - - -def test_parse_hourly_actual(): - """ Valid fields are set when values exist """ - raw_actual = { - "weatherTimestamp": get_utc_now().timestamp(), - "temperature": 0.0, - "relativeHumidity": 0.0, - "windSpeed": 0.0, - "windDirection": 0.0, - "precipitation": 0.0, - "fineFuelMoistureCode": 0.0, - "initialSpreadIndex": 0.0, - "fireWeatherIndex": 0.0 - } - - hourly_actual = wfwx_api.parse_hourly_actual(1, raw_actual) - assert isinstance(hourly_actual, HourlyActual) - assert hourly_actual.rh_valid is True - assert hourly_actual.temp_valid is True - assert hourly_actual.wdir_valid is True - assert hourly_actual.precip_valid is True - assert hourly_actual.wspeed_valid is True - - -def test_invalid_metrics(): - """ Metric valid flags should be false """ - - raw_actual = { - "weatherTimestamp": get_utc_now().timestamp(), - "temperature": 0.0, - "relativeHumidity": 101, - "windSpeed": -1, - "windDirection": 361, - "precipitation": -1, - "fineFuelMoistureCode": 0.0, - "initialSpreadIndex": 0.0, - "fireWeatherIndex": 0.0 - } - - hourly_actual = wfwx_api.parse_hourly_actual(1, raw_actual) - assert isinstance(hourly_actual, HourlyActual) - assert hourly_actual.temp_valid is True - assert hourly_actual.rh_valid is False - assert hourly_actual.precip_valid is False - assert hourly_actual.wspeed_valid is False - assert hourly_actual.wdir_valid is False - - -def test_invalid_metrics_from_wfwx(): - """ Metric valid flags should be false """ - - raw_actual = { - "weatherTimestamp": get_utc_now().timestamp(), - "temperature": 0.0, - "relativeHumidity": 101, - "windSpeed": -1, - "windDirection": 361, - "fineFuelMoistureCode": 0.0, - "initialSpreadIndex": 0.0, - "fireWeatherIndex": 0.0, - "observationValid": False, - "observationValidComment": "Precipitation can not be null." - } - - hourly_actual = wfwx_api.parse_hourly_actual(1, raw_actual) - assert isinstance(hourly_actual, HourlyActual) - assert hourly_actual.temp_valid is True - assert hourly_actual.rh_valid is False - assert hourly_actual.precip_valid is False - assert hourly_actual.wspeed_valid is False - assert hourly_actual.wdir_valid is False - assert hourly_actual.precipitation is math.nan diff --git a/backend/packages/wps-api/src/app/tests/jobs/test_noon_forecasts.py b/backend/packages/wps-api/src/app/tests/jobs/test_noon_forecasts.py index 1794239f3f..a705ccf4f1 100644 --- a/backend/packages/wps-api/src/app/tests/jobs/test_noon_forecasts.py +++ b/backend/packages/wps-api/src/app/tests/jobs/test_noon_forecasts.py @@ -1,36 +1,27 @@ -""" Unit tests for the fireweather noon forecats job """ -import os +"""Unit tests for the fireweather noon forecats job""" + import logging +import os + import pytest from pytest_mock import MockerFixture + from app.jobs import noon_forecasts -from app.tests.jobs.job_fixtures import mock_wfwx_stations, mock_wfwx_response -from wps_shared.wildfire_one import wfwx_api +from app.tests.jobs.job_fixtures import mock_wfwx_response logger = logging.getLogger(__name__) -@pytest.fixture() -def mock_noon_forecasts(mocker: MockerFixture): - """ Mocks out noon forecasts as async result """ - wfwx_hourlies = mock_wfwx_response() - future_wfwx_stations = mock_wfwx_stations() - - mocker.patch("wps_shared.wildfire_one.wfwx_api.wfwx_station_list_mapper", return_value=future_wfwx_stations) - mocker.patch("wps_shared.wildfire_one.wfwx_api.get_noon_forecasts_all_stations", return_value=wfwx_hourlies) - mocker.patch("wps_shared.wildfire_one.wildfire_fetchers.fetch_paged_response_generator", return_value=iter(wfwx_hourlies)) - - -def test_noon_forecasts_bot(monkeypatch, mocker: MockerFixture, mock_noon_forecasts): - """ Very simple test that checks that: +def test_noon_forecasts_bot(monkeypatch, mocker: MockerFixture, mock_wfwx_api): + """Very simple test that checks that: - the bot exits with a success code - the expected number of records are saved. """ - async def mock_get_auth_header(_): - return dict() - monkeypatch.setattr(wfwx_api, 'get_auth_header', mock_get_auth_header) - save_noon_forecast_spy = mocker.spy(noon_forecasts, 'save_noon_forecast') + wfwx_hourlies = mock_wfwx_response() + mock_wfwx_api.get_noon_forecasts_all_stations.return_value = wfwx_hourlies + mocker.patch("app.jobs.noon_forecasts.create_wfwx_api", return_value=mock_wfwx_api) + save_noon_forecast_spy = mocker.spy(noon_forecasts, "save_noon_forecast") with pytest.raises(SystemExit) as excinfo: noon_forecasts.main() # Assert that we exited without errors. @@ -40,18 +31,14 @@ async def mock_get_auth_header(_): assert save_noon_forecast_spy.call_count == 2 -def test_noon_forecasts_bot_fail(mocker: MockerFixture, - monkeypatch): +def test_noon_forecasts_bot_fail(mocker: MockerFixture, monkeypatch, mock_wfwx_api): """ Test that when the bot fails a message is sent to rocket-chat, and our exit code is 1. """ - def mock_get_noon_forecasts(): - raise Exception() - - monkeypatch.setattr(wfwx_api, 'get_noon_forecasts_all_stations', mock_get_noon_forecasts) - rocket_chat_spy = mocker.spy(noon_forecasts, 'send_rocketchat_notification') + mock_wfwx_api.get_noon_forecasts_all_stations = mocker.AsyncMock(side_effect=Exception()) + rocket_chat_spy = mocker.spy(noon_forecasts, "send_rocketchat_notification") with pytest.raises(SystemExit) as excinfo: noon_forecasts.main() diff --git a/backend/packages/wps-api/src/app/tests/morecast_v2/test_forecasts.py b/backend/packages/wps-api/src/app/tests/morecast_v2/test_forecasts.py index a8ce21210b..4ea7b4b43a 100644 --- a/backend/packages/wps-api/src/app/tests/morecast_v2/test_forecasts.py +++ b/backend/packages/wps-api/src/app/tests/morecast_v2/test_forecasts.py @@ -1,8 +1,18 @@ from datetime import datetime, timezone from typing import Optional -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest from math import isclose + +from wps_wf1.models import ( + StationDailyFromWF1, + WF1ForecastRecordType, + WF1PostForecast, + WFWXWeatherStation, + WeatherDeterminate, + WeatherIndeterminate, +) +import app from wps_shared.db.models.morecast_v2 import MorecastForecastRecord from app.morecast_v2.forecasts import ( actual_exists, @@ -13,8 +23,7 @@ get_forecasts, get_fwi_values, ) -from wps_shared.schemas.morecast_v2 import StationDailyFromWF1, WeatherDeterminate, WeatherIndeterminate, WF1ForecastRecordType, WF1PostForecast, MoreCastForecastInput -from wps_shared.wildfire_one.schema_parsers import WFWXWeatherStation +from wps_shared.schemas.morecast_v2 import MoreCastForecastInput start_time = datetime(2022, 1, 1, tzinfo=timezone.utc) end_time = datetime(2022, 1, 2, tzinfo=timezone.utc) @@ -240,10 +249,14 @@ def test_construct_wf1_forecast_update(): @pytest.mark.anyio -@patch("aiohttp.ClientSession.get") -@patch("app.morecast_v2.forecasts.get_forecasts_for_stations_by_date_range", return_value=[station_1_daily_from_wf1]) -async def test_construct_wf1_forecasts_new(_, mock_get): - result = await construct_wf1_forecasts(mock_get, [morecast_input_1, morecast_input_2], wfwx_weather_stations, "user") +async def test_construct_wf1_forecasts_new(mock_wfwx_api): + mock_wfwx_api.get_forecasts_for_stations_by_date_range = AsyncMock( + return_value=[station_1_daily_from_wf1] + ) + result = await construct_wf1_forecasts( + mock_wfwx_api, [morecast_input_1, morecast_input_2], wfwx_weather_stations, "user" + ) + assert len(result) == 2 # existing forecast assert_wf1_forecast(result[0], morecast_input_1, station_1_daily_from_wf1.forecast_id, station_1_daily_from_wf1.created_by, station_1_url, "1") diff --git a/backend/packages/wps-api/src/app/tests/morecast_v2/test_morecast_v2_endpoint.py b/backend/packages/wps-api/src/app/tests/morecast_v2/test_morecast_v2_endpoint.py index 9915715821..23fd03b11a 100644 --- a/backend/packages/wps-api/src/app/tests/morecast_v2/test_morecast_v2_endpoint.py +++ b/backend/packages/wps-api/src/app/tests/morecast_v2/test_morecast_v2_endpoint.py @@ -1,15 +1,17 @@ +from datetime import datetime +from unittest.mock import AsyncMock + import aiohttp -from fastapi.testclient import TestClient -from httpx import ASGITransport, AsyncClient +import app.routers.morecast_v2 import pytest -from datetime import datetime from aiohttp import ClientSession +from app.tests.utils.mock_jwt_decode_role import MockJWTDecodeWithRole +from fastapi.testclient import TestClient +from httpx import ASGITransport, AsyncClient +from wps_shared.schemas.morecast_v2 import MoreCastForecastInput, MoreCastForecastRequest from wps_shared.schemas.shared import StationsRequest from wps_shared.tests.common import default_mock_client_get -from wps_shared.schemas.morecast_v2 import MoreCastForecastInput, MoreCastForecastRequest, StationDailyFromWF1 -import app.routers.morecast_v2 -from app.tests.utils.mock_jwt_decode_role import MockJWTDecodeWithRole - +from wps_wf1.models import StationDailyFromWF1 morecast_v2_post_url = "/api/morecast-v2/forecast" morecast_v2_get_url = "/api/morecast-v2/forecasts/2023-03-15" @@ -21,7 +23,20 @@ decode_fn = "jwt.decode" -forecast = MoreCastForecastRequest(token="testToken", forecasts=[MoreCastForecastInput(station_code=1, for_date=1, temp=10.0, rh=40.1, precip=70.2, wind_speed=20.3, wind_direction=40)]) +forecast = MoreCastForecastRequest( + token="testToken", + forecasts=[ + MoreCastForecastInput( + station_code=1, + for_date=1, + temp=10.0, + rh=40.1, + precip=70.2, + wind_speed=20.3, + wind_direction=40, + ) + ], +) stations = StationsRequest(stations=[1, 2]) @@ -38,7 +53,9 @@ def client(): async def async_client(): from app.main import app as test_app - async with AsyncClient(transport=ASGITransport(app=test_app), base_url="https://test") as test_client: + async with AsyncClient( + transport=ASGITransport(app=test_app), base_url="https://test" + ) as test_client: yield test_client @@ -68,7 +85,9 @@ def test_post_forecast_unauthorized(client: TestClient): @pytest.mark.anyio -def test_post_forecast_authorized(client: TestClient, monkeypatch: pytest.MonkeyPatch): +def test_post_forecast_authorized( + client: TestClient, monkeypatch: pytest.MonkeyPatch, mocker, mock_wfwx_api +): """Allowed to post station changes with correct role""" def mock_admin_role_function(*_, **__): @@ -76,27 +95,27 @@ def mock_admin_role_function(*_, **__): monkeypatch.setattr(decode_fn, mock_admin_role_function) - async def mock_format_as_wf1_post_forecasts(client_session, forecasts_to_save, username, headers): + async def mock_format_as_wf1_post_forecasts(wfwx_api, forecasts_to_save, username): return [] - monkeypatch.setattr(app.routers.morecast_v2, "format_as_wf1_post_forecasts", mock_format_as_wf1_post_forecasts) - - async def mock_post_forecasts(client_session, forecasts): - return None - - monkeypatch.setattr(app.routers.morecast_v2, "post_forecasts", mock_post_forecasts) + monkeypatch.setattr( + app.routers.morecast_v2, "format_as_wf1_post_forecasts", mock_format_as_wf1_post_forecasts + ) - async def mock_get_auth_header(_): - return dict() + class MockResponse: + status = 201 - monkeypatch.setattr(app.routers.morecast_v2, "get_auth_header", mock_get_auth_header) + mock_wfwx_api.post_forecasts = AsyncMock(return_value=MockResponse()) + mocker.patch("app.routers.morecast_v2.create_wfwx_api", return_value=mock_wfwx_api) response = client.post(morecast_v2_post_url, json=forecast.model_dump()) assert response.status_code == 201 @pytest.mark.anyio -def test_post_forecast_authorized_error(client: TestClient, monkeypatch: pytest.MonkeyPatch): +def test_post_forecast_authorized_error( + client: TestClient, monkeypatch: pytest.MonkeyPatch, mocker, mock_wfwx_api +): """Allowed to post station changes with correct role""" def mock_admin_role_function(*_, **__): @@ -104,10 +123,12 @@ def mock_admin_role_function(*_, **__): monkeypatch.setattr(decode_fn, mock_admin_role_function) - async def mock_format_as_wf1_post_forecasts(client_session, forecasts_to_save, username, headers): + async def mock_format_as_wf1_post_forecasts(mock_wfwx_api, forecasts_to_save, username): return [] - monkeypatch.setattr(app.routers.morecast_v2, "format_as_wf1_post_forecasts", mock_format_as_wf1_post_forecasts) + monkeypatch.setattr( + app.routers.morecast_v2, "format_as_wf1_post_forecasts", mock_format_as_wf1_post_forecasts + ) class MockResponse: status = 500 @@ -115,23 +136,8 @@ class MockResponse: async def text(self): return "Bad Request" - class MockClientSession: - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - pass - - async def post(self, url): - return MockResponse() - - # Use monkeypatch to replace the ClientSession with our mock class - monkeypatch.setattr(aiohttp, "ClientSession", lambda: MockClientSession()) - - async def mock_get_auth_header(_): - return dict() - - monkeypatch.setattr(app.routers.morecast_v2, "get_auth_header", mock_get_auth_header) + mock_wfwx_api.post_forecasts = AsyncMock(return_value=MockResponse()) + mocker.patch("app.routers.morecast_v2.create_wfwx_api", mock_wfwx_api) response = client.post(morecast_v2_post_url, json=forecast.model_dump()) assert response.status_code == 500 @@ -151,7 +157,9 @@ def test_post_forecasts_by_date_range_unauthorized(client: TestClient): assert response.status_code == 401 -def test_post_forecast_by_date_range_authorized(client: TestClient, monkeypatch: pytest.MonkeyPatch): +def test_post_forecast_by_date_range_authorized( + client: TestClient, monkeypatch: pytest.MonkeyPatch +): """Allowed to post station changes with correct role""" def mock_admin_role_function(*_, **__): @@ -165,7 +173,9 @@ def mock_admin_role_function(*_, **__): def test_get_yesterday_dailies_unauthorized(client: TestClient): """user must be authenticated to retrieve yesterday dailies""" - response = client.post(morecast_v2_post_yesterday_dailies_url, json={"station_codes": [209, 211, 302]}) + response = client.post( + morecast_v2_post_yesterday_dailies_url, json={"station_codes": [209, 211, 302]} + ) assert response.status_code == 401 @@ -180,10 +190,15 @@ def mock_admin_role_function(*_, **__): requested_station_codes = [209, 211, 302] - response = client.post(morecast_v2_post_yesterday_dailies_url, json={"station_codes": requested_station_codes}) + response = client.post( + morecast_v2_post_yesterday_dailies_url, json={"station_codes": requested_station_codes} + ) assert response.status_code == 200 - parsed_dailies = [StationDailyFromWF1.model_validate(raw_daily) for raw_daily in response.json().get("dailies")] + parsed_dailies = [ + StationDailyFromWF1.model_validate(raw_daily) + for raw_daily in response.json().get("dailies") + ] assert len(parsed_dailies) == 3 today_date = datetime.strptime(today, "%Y-%m-%d").date() @@ -197,17 +212,23 @@ def mock_admin_role_function(*_, **__): def test_get_determinates_unauthorized(client: TestClient): - response = client.post(morecast_v2_post_determinates_url, json={"station_codes": [209, 211, 302]}) + response = client.post( + morecast_v2_post_determinates_url, json={"station_codes": [209, 211, 302]} + ) assert response.status_code == 401 @pytest.mark.anyio -async def test_get_determinates_authorized(anyio_backend, async_client: AsyncClient, monkeypatch: pytest.MonkeyPatch): +async def test_get_determinates_authorized( + anyio_backend, async_client: AsyncClient, monkeypatch: pytest.MonkeyPatch +): def mock_admin_role_function(*_, **__): return MockJWTDecodeWithRole("morecast2_write_forecast") monkeypatch.setattr(decode_fn, mock_admin_role_function) monkeypatch.setattr(ClientSession, "get", default_mock_client_get) - response = await async_client.post(morecast_v2_post_determinates_url, json={"stations": [209, 211, 302]}) + response = await async_client.post( + morecast_v2_post_determinates_url, json={"stations": [209, 211, 302]} + ) assert response.status_code == 200 diff --git a/backend/packages/wps-api/src/app/tests/morecast_v2/test_predictions.py b/backend/packages/wps-api/src/app/tests/morecast_v2/test_predictions.py index 703d441779..bf39c485b6 100644 --- a/backend/packages/wps-api/src/app/tests/morecast_v2/test_predictions.py +++ b/backend/packages/wps-api/src/app/tests/morecast_v2/test_predictions.py @@ -1,17 +1,17 @@ -from wps_shared.schemas.morecast_v2 import WeatherDeterminate, WeatherIndeterminate from datetime import datetime + from wps_shared.weather_models.fetch.predictions import post_process_fetched_predictions +from wps_wf1.models import WeatherDeterminate, WeatherIndeterminate -def build_weather_indeterminate(station_code: int, - station_name: str, - determinate: WeatherDeterminate, - utc_timestamp: datetime): +def build_weather_indeterminate( + station_code: int, station_name: str, determinate: WeatherDeterminate, utc_timestamp: datetime +): return WeatherIndeterminate( station_code=station_code, station_name=station_name, determinate=determinate, - utc_timestamp=utc_timestamp + utc_timestamp=utc_timestamp, ) @@ -21,39 +21,59 @@ def test_post_process_fetched_predictions_empty(): def test_post_process_fetched_predictions_same_everything(): weather_indeterminates = [ - build_weather_indeterminate(1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1)), - build_weather_indeterminate(1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1)) + build_weather_indeterminate( + 1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1) + ), + build_weather_indeterminate( + 1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1) + ), ] assert post_process_fetched_predictions(weather_indeterminates) == [weather_indeterminates[0]] def test_post_process_fetched_predictions_same_station_date_different_model(): weather_indeterminates = [ - build_weather_indeterminate(1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1)), - build_weather_indeterminate(1, "one", WeatherDeterminate.HRDPS, utc_timestamp=datetime(2023, 1, 1)) + build_weather_indeterminate( + 1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1) + ), + build_weather_indeterminate( + 1, "one", WeatherDeterminate.HRDPS, utc_timestamp=datetime(2023, 1, 1) + ), ] assert post_process_fetched_predictions(weather_indeterminates) == weather_indeterminates def test_post_process_fetched_predictions_same_station_model_different_date(): weather_indeterminates = [ - build_weather_indeterminate(1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1)), - build_weather_indeterminate(1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 2)) + build_weather_indeterminate( + 1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1) + ), + build_weather_indeterminate( + 1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 2) + ), ] assert post_process_fetched_predictions(weather_indeterminates) == weather_indeterminates def test_post_process_fetched_predictions_same_date_model_different_station(): weather_indeterminates = [ - build_weather_indeterminate(1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1)), - build_weather_indeterminate(2, "two", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1)) + build_weather_indeterminate( + 1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1) + ), + build_weather_indeterminate( + 2, "two", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1) + ), ] assert post_process_fetched_predictions(weather_indeterminates) == weather_indeterminates def test_post_process_fetched_predictions_different_everything(): weather_indeterminates = [ - build_weather_indeterminate(1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1)), - build_weather_indeterminate(2, "two", WeatherDeterminate.HRDPS, utc_timestamp=datetime(2023, 1, 2)) + build_weather_indeterminate( + 1, "one", WeatherDeterminate.GDPS, utc_timestamp=datetime(2023, 1, 1) + ), + build_weather_indeterminate( + 2, "two", WeatherDeterminate.HRDPS, utc_timestamp=datetime(2023, 1, 2) + ), ] assert post_process_fetched_predictions(weather_indeterminates) == weather_indeterminates diff --git a/backend/packages/wps-api/src/app/tests/observations/test_observations_endpoint.py b/backend/packages/wps-api/src/app/tests/observations/test_observations_endpoint.py index e5f6c8a611..ce4ef59c37 100644 --- a/backend/packages/wps-api/src/app/tests/observations/test_observations_endpoint.py +++ b/backend/packages/wps-api/src/app/tests/observations/test_observations_endpoint.py @@ -1,8 +1,8 @@ -from starlette.testclient import TestClient +from unittest.mock import AsyncMock + import pytest -from wps_shared.schemas.observations import WeatherStationHourlyReadings -from wps_shared.schemas.stations import WeatherStation -from wps_shared.wildfire_one import wfwx_api +from starlette.testclient import TestClient +from wps_wf1.models import WeatherStation, WeatherStationHourlyReadings @pytest.fixture() @@ -14,57 +14,45 @@ def client(): @pytest.mark.usefixtures("mock_jwt_decode") -def test_multiple_stations(client: TestClient, monkeypatch): - """ Very simple test that checks that: +def test_multiple_stations(client: TestClient, mocker, mock_wfwx_api): + """Very simple test that checks that: - the bot exits with a success code - the expected number of records are saved. """ codes = [1, 2] - async def mock_get_auth_header(_): - return dict() + mock_hourly_readings = [ + WeatherStationHourlyReadings( + values=[], station=WeatherStation(code=codes[0], name="one", lat=1.0, long=1.0) + ), + WeatherStationHourlyReadings( + values=[], station=WeatherStation(code=codes[1], name="two", lat=2.0, long=2.0) + ), + ] - async def mock_hourly_readings(*_, **__): - return [ - WeatherStationHourlyReadings(values=[], - station=WeatherStation(code=codes[0], - name='one', - lat=1.0, - long=1.0)), - WeatherStationHourlyReadings(values=[], - station=WeatherStation(code=codes[1], - name='two', - lat=2.0, - long=2.0))] + mock_wfwx_api.get_hourly_readings = AsyncMock(return_value=mock_hourly_readings) + mocker.patch("app.hourlies.create_wfwx_api", return_value=mock_wfwx_api) - monkeypatch.setattr(wfwx_api, 'get_auth_header', mock_get_auth_header) - monkeypatch.setattr('app.hourlies.get_hourly_readings', mock_hourly_readings) - - response = client.post('/api/observations/', json={"stations": codes}) - assert len(response.json()['hourlies']) == 2 + response = client.post("/api/observations/", json={"stations": codes}) + assert len(response.json()["hourlies"]) == 2 @pytest.mark.usefixtures("mock_jwt_decode") -def test_single_station_single_value(client: TestClient, monkeypatch): - """ Very simple test that checks that: +def test_single_station_single_value(client: TestClient, mocker, mock_wfwx_api): + """Very simple test that checks that: - the bot exits with a success code - the expected number of records are saved. """ codes = [1] - async def mock_get_auth_header(_): - return dict() - - async def mock_hourly_readings(*_, **__): - return [ - WeatherStationHourlyReadings(values=[], - station=WeatherStation(code=codes[0], - name='one', - lat=1.0, - long=1.0))] + mock_hourly_readings = [ + WeatherStationHourlyReadings( + values=[], station=WeatherStation(code=codes[0], name="one", lat=1.0, long=1.0) + ) + ] - monkeypatch.setattr(wfwx_api, 'get_auth_header', mock_get_auth_header) - monkeypatch.setattr('app.hourlies.get_hourly_readings', mock_hourly_readings) + mock_wfwx_api.get_hourly_readings = AsyncMock(return_value=mock_hourly_readings) + mocker.patch("app.hourlies.create_wfwx_api", return_value=mock_wfwx_api) - response = client.post('/api/observations/', json={"stations": codes}) - assert len(response.json()['hourlies']) == 1 + response = client.post("/api/observations/", json={"stations": codes}) + assert len(response.json()["hourlies"]) == 1 diff --git a/backend/packages/wps-api/src/app/tests/test_station_groups.py b/backend/packages/wps-api/src/app/tests/test_station_groups.py index 90cd0b8eca..d2c41c6cdc 100644 --- a/backend/packages/wps-api/src/app/tests/test_station_groups.py +++ b/backend/packages/wps-api/src/app/tests/test_station_groups.py @@ -1,47 +1,53 @@ -from httpx import ASGITransport, AsyncClient -import pytest from unittest.mock import patch +import pytest +from httpx import ASGITransport, AsyncClient -station_groups_get_url = '/api/stations/groups' -station_groups_members_post_url = '/api/stations/groups/members' +station_groups_get_url = "/api/stations/groups" +station_groups_members_post_url = "/api/stations/groups/members" @pytest.fixture() async def async_client(): from app.main import app as test_app - async with AsyncClient(transport=ASGITransport(app=test_app), base_url="https://test") as test_client: + async with AsyncClient( + transport=ASGITransport(app=test_app), base_url="https://test" + ) as test_client: yield test_client @pytest.mark.anyio async def test_get_station_groups_unauthorized(anyio_backend, async_client: AsyncClient): - """ unauthenticated clients have no access """ + """unauthenticated clients have no access""" response = await async_client.get(station_groups_get_url) assert response.status_code == 401 -@patch("wps_shared.wildfire_one.wfwx_api.get_station_groups", return_value=[]) @pytest.mark.usefixtures("mock_jwt_decode") @pytest.mark.anyio -async def test_get_station_groups_authorized(anyio_backend, async_client: AsyncClient): - """ authenticated client can access """ +async def test_get_station_groups_authorized( + anyio_backend, async_client: AsyncClient, mock_wfwx_api, mocker +): + """authenticated client can access""" + mocker.patch("app.routers.stations.create_wfwx_api", return_value=mock_wfwx_api) response = await async_client.get(station_groups_get_url) assert response.status_code == 200 @pytest.mark.anyio async def test_get_station_groups_members_unauthorized(anyio_backend, async_client: AsyncClient): - """ unauthenticated clients have no access """ + """unauthenticated clients have no access""" response = await async_client.post(station_groups_members_post_url) assert response.status_code == 401 -@patch("wps_shared.wildfire_one.wfwx_api.get_stations_by_group_ids", return_value=[]) @pytest.mark.usefixtures("mock_jwt_decode") @pytest.mark.anyio -async def test_get_station_groups_members_authorized(anyio_backend, async_client: AsyncClient): - """ authenticated clients can access """ +async def test_get_station_groups_members_authorized( + anyio_backend, async_client: AsyncClient, mock_wfwx_api, mocker +): + """authenticated clients can access""" + mocker.patch("app.routers.stations.create_wfwx_api", return_value=mock_wfwx_api) response = await async_client.post(station_groups_members_post_url, json={"group_ids": ["1"]}) assert response.status_code == 200 diff --git a/backend/packages/wps-jobs/pyproject.toml b/backend/packages/wps-jobs/pyproject.toml index a35edbfedb..ed61af8152 100644 --- a/backend/packages/wps-jobs/pyproject.toml +++ b/backend/packages/wps-jobs/pyproject.toml @@ -17,6 +17,7 @@ dependencies = [ "xarray>=2025.3.1,<2026", "herbie-data>=2025.11.1,<2026", "gdal==3.9.2", + "wps-wf1" ] [project.optional-dependencies] @@ -26,6 +27,7 @@ dev = [ [tool.uv.sources] wps-shared = { workspace = true } +wps-wf1 = { workspace = true } [build-system] requires = ["hatchling"] diff --git a/backend/packages/wps-jobs/src/tests/weather_models/test_common_model_fetchers.py b/backend/packages/wps-jobs/src/tests/weather_models/test_common_model_fetchers.py index 0fb4c7807b..785d6b7b99 100644 --- a/backend/packages/wps-jobs/src/tests/weather_models/test_common_model_fetchers.py +++ b/backend/packages/wps-jobs/src/tests/weather_models/test_common_model_fetchers.py @@ -3,13 +3,12 @@ import numpy import pytest - from weather_model_jobs.common_model_fetchers import ( ModelValueProcessor, - WeatherStation, accumulate_nam_precipitation, ) from wps_shared.db.models.weather_models import ModelRunGridSubsetPrediction +from wps_wf1.models import WeatherStation ZERO_HOUR_TIMESTAMP = datetime(2023, 9, 7, 0, 0, 0) TWELVE_HOUR_TIMESTAMP = datetime(2023, 9, 7, 12, 0, 0) diff --git a/backend/packages/wps-jobs/src/tests/weather_models/test_ecmwf.py b/backend/packages/wps-jobs/src/tests/weather_models/test_ecmwf.py index 1ed2009fbe..05403a8f7c 100644 --- a/backend/packages/wps-jobs/src/tests/weather_models/test_ecmwf.py +++ b/backend/packages/wps-jobs/src/tests/weather_models/test_ecmwf.py @@ -1,13 +1,13 @@ +import math import os from datetime import datetime from unittest.mock import MagicMock, PropertyMock import numpy as np import pytest +import weather_model_jobs.ecmwf from aiohttp import ClientSession from pytest_mock import MockerFixture - -import weather_model_jobs.ecmwf from weather_model_jobs import ModelEnum from weather_model_jobs.ecmwf import ( ECMWF, @@ -19,8 +19,8 @@ from weather_model_jobs.utils.process_grib import PredictionModelNotFound from wps_shared.db.crud.model_run_repository import ModelRunRepository from wps_shared.db.models.weather_models import PredictionModelRunTimestamp -from wps_shared.schemas.stations import WeatherStation from wps_shared.tests.common import default_mock_client_get +from wps_wf1.models import WeatherStation num_forecast_hours = len(list(get_ecmwf_forecast_hours())) @@ -93,8 +93,8 @@ def test_get_stations_dataframe(): ] df = get_stations_dataframe(transformer, stations) assert len(df) == 2 - assert df.iloc[0]["latitude"] == 11.0 - assert df.iloc[0]["longitude"] == 21.0 + assert math.isclose(df.iloc[0]["latitude"], 11.0) + assert math.isclose(df.iloc[0]["longitude"], 21.0) def test_ecmwf_process_model_run_no_url(mock_herbie_instance): @@ -269,6 +269,7 @@ def test_main_success(mocker: MockerFixture, monkeypatch): """Test the main function when it runs successfully.""" async def mock_process_models(): + """No implementation required.""" pass monkeypatch.setattr(ClientSession, "get", default_mock_client_get) diff --git a/backend/packages/wps-jobs/src/tests/weather_models/test_ecmwf_prediction_processor.py b/backend/packages/wps-jobs/src/tests/weather_models/test_ecmwf_prediction_processor.py index b923cae035..a07c169518 100644 --- a/backend/packages/wps-jobs/src/tests/weather_models/test_ecmwf_prediction_processor.py +++ b/backend/packages/wps-jobs/src/tests/weather_models/test_ecmwf_prediction_processor.py @@ -1,19 +1,19 @@ -import pytest -from unittest.mock import MagicMock from datetime import datetime, timedelta, timezone -from unittest.mock import call +import math +from unittest.mock import MagicMock, call -from pytest_mock import MockerFixture +import pytest import weather_model_jobs.ecmwf_prediction_processor +from pytest_mock import MockerFixture from weather_model_jobs import ModelEnum from weather_model_jobs.ecmwf_prediction_processor import ECMWFPredictionProcessor -from wps_shared.schemas.stations import WeatherStation from wps_shared.db.crud.model_run_repository import ModelRunRepository from wps_shared.db.models.weather_models import ( ModelRunPrediction, PredictionModelRunTimestamp, WeatherStationModelPrediction, ) +from wps_wf1.models import WeatherStation @pytest.fixture @@ -352,7 +352,7 @@ def test_calculate_past_24_hour_precip_with_previous_prediction( prediction.prediction_model_run_timestamp_id, prediction.prediction_timestamp - timedelta(days=1), ) - assert result == 5.0 + assert math.isclose(result, 5.0) def test_calculate_past_24_hour_precip_without_previous_prediction( @@ -385,7 +385,7 @@ def test_calculate_past_24_hour_precip_without_previous_prediction( prediction.prediction_timestamp - timedelta(days=1), model_run.prediction_run_timestamp, ) - assert result == 3.0 + assert math.isclose(result, 3.0) @pytest.mark.parametrize( @@ -524,13 +524,13 @@ def test_initialize_station_prediction(setup_processor, mock_model_run_data): ) processor._calculate_delta_precip.assert_called_once_with(None, station_prediction) - assert result.tmp_tgl_2 == 25.0 - assert result.rh_tgl_2 == 60.0 - assert result.apcp_sfc_0 == 10.0 - assert result.precip_24h == 15.0 - assert result.delta_precip == 5.0 - assert result.wind_tgl_10 == 5.5 - assert result.wdir_tgl_10 == 180.0 + assert math.isclose(result.tmp_tgl_2, 25.0) + assert math.isclose(result.rh_tgl_2, 60.0) + assert math.isclose(result.apcp_sfc_0, 10.0) + assert math.isclose(result.precip_24h, 15.0) + assert math.isclose(result.delta_precip, 5.0) + assert math.isclose(result.wind_tgl_10, 5.5) + assert math.isclose(result.wdir_tgl_10, 180.0) def test_apply_bias_adjustments(setup_processor, mock_model_run_data): @@ -577,11 +577,11 @@ def test_apply_bias_adjustments(setup_processor, mock_model_run_data): station_prediction.precip_24h, station_prediction.prediction_timestamp ) - assert result.bias_adjusted_temperature == 22.5 - assert result.bias_adjusted_rh == 55.0 - assert result.bias_adjusted_wind_speed == 6.5 - assert result.bias_adjusted_wdir == 190.0 - assert result.bias_adjusted_precip_24h == 12.0 + assert math.isclose(result.bias_adjusted_temperature, 22.5) + assert math.isclose(result.bias_adjusted_rh, 55.0) + assert math.isclose(result.bias_adjusted_wind_speed, 6.5) + assert math.isclose(result.bias_adjusted_wdir, 190.0) + assert math.isclose(result.bias_adjusted_precip_24h, 12.0) def test_apply_interpolated_bias_adjustments(setup_processor, mock_model_run_data): diff --git a/backend/packages/wps-jobs/src/tests/weather_models/test_models_common.py b/backend/packages/wps-jobs/src/tests/weather_models/test_models_common.py index 286f3c97bf..5d94d59763 100644 --- a/backend/packages/wps-jobs/src/tests/weather_models/test_models_common.py +++ b/backend/packages/wps-jobs/src/tests/weather_models/test_models_common.py @@ -1,11 +1,12 @@ from datetime import datetime -from wps_shared.schemas.stations import WeatherStation, Season + from shapely import wkt -from wps_shared.db.models.weather_models import ProcessedModelRunUrl, ModelRunGridSubsetPrediction +from wps_shared.db.models.weather_models import ModelRunGridSubsetPrediction, ProcessedModelRunUrl +from wps_wf1.models import Season, WeatherStation class MockResponse: - """ Mocked out request.Response object """ + """Mocked out request.Response object""" def __init__(self, status_code, content=None): self.status_code = status_code @@ -20,35 +21,44 @@ def mock_get_model_run_predictions(*args): apcp_sfc_0=[2, 4, 3, 6], wdir_tgl_10=[10, 20, 30, 40], wind_tgl_10=[1, 2, 3, 4], - prediction_timestamp=datetime(2023, 2, 21, 18)), + prediction_timestamp=datetime(2023, 2, 21, 18), + ), ModelRunGridSubsetPrediction( tmp_tgl_2=[1, 2, 3, 4], rh_tgl_2=[20, 30, 40, 50], apcp_sfc_0=[3, 6, 3, 4], wdir_tgl_10=[280, 290, 300, 310], wind_tgl_10=[5, 6, 7, 8], - prediction_timestamp=datetime(2023, 2, 21, 21)), + prediction_timestamp=datetime(2023, 2, 21, 21), + ), ModelRunGridSubsetPrediction( tmp_tgl_2=[1, 2, 3, 4], rh_tgl_2=None, apcp_sfc_0=[3, 6, 3, 4], wdir_tgl_10=[20, 30, 40, 50], wind_tgl_10=[4, 3, 2, 1], - prediction_timestamp=datetime(2023, 2, 21, 21)) + prediction_timestamp=datetime(2023, 2, 21, 21), + ), ] return result def mock_get_stations(*args): - """ Mocked out listing of weather stations """ - return [WeatherStation( - code=123, name='Test', lat=50.7, long=-120.425, ecodivision_name='Test', - core_season=Season( - start_month=5, start_day=1, end_month=9, end_day=21)), ] + """Mocked out listing of weather stations""" + return [ + WeatherStation( + code=123, + name="Test", + lat=50.7, + long=-120.425, + ecodivision_name="Test", + core_season=Season(start_month=5, start_day=1, end_month=9, end_day=21), + ), + ] def mock_get_processed_file_count(*args): - """ Mocked out get processed file count """ + """Mocked out get processed file count""" return 162 @@ -59,6 +69,8 @@ def mock_get_processed_file_record(called: bool): return None -geom = ("POLYGON ((-120.525 50.77500000000001, -120.375 50.77500000000001,-120.375 50.62500000000001," - " -120.525 50.62500000000001, -120.525 50.77500000000001))") +geom = ( + "POLYGON ((-120.525 50.77500000000001, -120.375 50.77500000000001,-120.375 50.62500000000001," + " -120.525 50.62500000000001, -120.525 50.77500000000001))" +) shape = wkt.loads(geom) diff --git a/backend/packages/wps-jobs/src/weather_model_jobs/common_model_fetchers.py b/backend/packages/wps-jobs/src/weather_model_jobs/common_model_fetchers.py index 5587b1ddb9..90982eecd4 100644 --- a/backend/packages/wps-jobs/src/weather_model_jobs/common_model_fetchers.py +++ b/backend/packages/wps-jobs/src/weather_model_jobs/common_model_fetchers.py @@ -1,37 +1,38 @@ -from typing import List import logging -import numpy from datetime import datetime, timedelta -from pyproj import Geod +from typing import List + +import numpy import numpy as np +import wps_shared.db.database +import wps_shared.utils.time as time_utils +from pyproj import Geod from sqlalchemy.orm import Session +from weather_model_jobs.machine_learning import StationMachineLearning +from weather_model_jobs.utils.interpolate import ( + construct_interpolated_noon_prediction, + interpolate_between_two_points, +) from wps_shared import config +from wps_shared.db.crud.observations import get_accumulated_precipitation from wps_shared.db.crud.weather_models import ( - get_processed_file_record, - get_processed_file_count, - get_prediction_model_run_timestamp_records, + delete_model_run_predictions, get_model_run_predictions_for_station, + get_prediction_model_run_timestamp_records, + get_processed_file_count, + get_processed_file_record, get_weather_station_model_prediction, - delete_model_run_predictions, -) -from weather_model_jobs.utils.interpolate import ( - construct_interpolated_noon_prediction, - interpolate_between_two_points, ) -from weather_model_jobs.machine_learning import StationMachineLearning -from wps_shared.weather_models import ModelEnum -from wps_shared.schemas.stations import WeatherStation -from wps_shared.wps_logging import configure_logging -import wps_shared.utils.time as time_utils -from wps_shared.stations import get_stations_synchronously from wps_shared.db.models.weather_models import ( - ProcessedModelRunUrl, + ModelRunPrediction, PredictionModelRunTimestamp, + ProcessedModelRunUrl, WeatherStationModelPrediction, - ModelRunPrediction, ) -import wps_shared.db.database -from wps_shared.db.crud.observations import get_accumulated_precipitation +from wps_shared.stations import get_stations_synchronously +from wps_shared.weather_models import ModelEnum +from wps_shared.wps_logging import configure_logging +from wps_wf1.models import WeatherStation # If running as its own process, configure logging appropriately. if __name__ == "__main__": diff --git a/backend/packages/wps-jobs/src/weather_model_jobs/ecmwf.py b/backend/packages/wps-jobs/src/weather_model_jobs/ecmwf.py index 617e31f9d4..394c69231a 100644 --- a/backend/packages/wps-jobs/src/weather_model_jobs/ecmwf.py +++ b/backend/packages/wps-jobs/src/weather_model_jobs/ecmwf.py @@ -1,3 +1,4 @@ +import asyncio import logging import os import sys @@ -6,28 +7,27 @@ from typing import List import pandas as pd +import wps_shared.utils.time as time_utils from herbie import Herbie -import asyncio from osgeo import gdal from pyproj import CRS, Transformer -from wps_shared.rocketchat_notifications import send_rocketchat_notification -from wps_shared.geospatial.geospatial import NAD83_CRS, get_transformer -import wps_shared.utils.time as time_utils -from wps_shared.schemas.stations import WeatherStation -from wps_shared.wps_logging import configure_logging -from wps_shared.stations import get_stations_asynchronously -from wps_shared.db.database import get_write_session_scope -from wps_shared.db.crud.model_run_repository import ModelRunRepository from weather_model_jobs import ( ModelEnum, ModelRunInfo, ModelRunProcessResult, ProjectionEnum, ) -from weather_model_jobs.ecmwf_model_processor import ECMWFModelProcessor, TEMP +from weather_model_jobs.ecmwf_model_processor import TEMP, ECMWFModelProcessor from weather_model_jobs.ecmwf_prediction_processor import ECMWFPredictionProcessor from weather_model_jobs.utils.process_grib import PredictionModelNotFound +from wps_shared.db.crud.model_run_repository import ModelRunRepository +from wps_shared.db.database import get_write_session_scope from wps_shared.db.models.weather_models import ModelRunPrediction, PredictionModelRunTimestamp +from wps_shared.geospatial.geospatial import NAD83_CRS, get_transformer +from wps_shared.rocketchat_notifications import send_rocketchat_notification +from wps_shared.stations import get_stations_asynchronously +from wps_shared.wps_logging import configure_logging +from wps_wf1.models import WeatherStation gdal.UseExceptions() diff --git a/backend/packages/wps-jobs/src/weather_model_jobs/ecmwf_prediction_processor.py b/backend/packages/wps-jobs/src/weather_model_jobs/ecmwf_prediction_processor.py index 220a987634..941a4667b8 100644 --- a/backend/packages/wps-jobs/src/weather_model_jobs/ecmwf_prediction_processor.py +++ b/backend/packages/wps-jobs/src/weather_model_jobs/ecmwf_prediction_processor.py @@ -3,14 +3,6 @@ from datetime import datetime, timedelta, timezone from typing import Dict, List -from wps_shared.db.crud.model_run_repository import ModelRunRepository -from wps_shared.db.models.weather_models import ( - ModelRunPrediction, - PredictionModelRunTimestamp, - WeatherStationModelPrediction, -) -from wps_shared.schemas.stations import WeatherStation - from weather_model_jobs import ModelEnum from weather_model_jobs.machine_learning import StationMachineLearning from weather_model_jobs.utils.interpolate import ( @@ -18,6 +10,13 @@ construct_interpolated_noon_prediction, interpolate_between_two_points, ) +from wps_shared.db.crud.model_run_repository import ModelRunRepository +from wps_shared.db.models.weather_models import ( + ModelRunPrediction, + PredictionModelRunTimestamp, + WeatherStationModelPrediction, +) +from wps_wf1.models import WeatherStation logger = logging.getLogger(__name__) diff --git a/backend/packages/wps-shared/pyproject.toml b/backend/packages/wps-shared/pyproject.toml index 9ae17e81c5..fe8ce18613 100644 --- a/backend/packages/wps-shared/pyproject.toml +++ b/backend/packages/wps-shared/pyproject.toml @@ -22,6 +22,7 @@ dependencies = [ "asyncpg>=0.30.0,<1", "redis>=7.0.0,<8", "gdal==3.9.2", + "wps-wf1", ] [project.optional-dependencies] @@ -35,3 +36,6 @@ build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] packages = ["src/wps_shared"] + +[tool.uv.sources] +wps-wf1 = { workspace = true } diff --git a/backend/packages/wps-shared/src/wps_shared/db/crud/forecasts.py b/backend/packages/wps-shared/src/wps_shared/db/crud/forecasts.py index 23b0e82bbb..185c560957 100644 --- a/backend/packages/wps-shared/src/wps_shared/db/crud/forecasts.py +++ b/backend/packages/wps-shared/src/wps_shared/db/crud/forecasts.py @@ -1,27 +1,29 @@ -""" CRUD operations relating to forecasts made by forecasters. -""" +"""CRUD operations relating to forecasts made by forecasters.""" + import datetime -from sqlalchemy.orm import Session + from sqlalchemy import desc -from wps_shared.schemas.stations import StationCodeList +from sqlalchemy.orm import Session +from wps_wf1.models import StationCodeList + from wps_shared.db.models.forecasts import NoonForecast -def query_noon_forecast_records(session: Session, - station_codes: StationCodeList, - start_date: datetime, - end_date: datetime - ): - """ Sends a query to get noon forecast records """ - return session.query(NoonForecast)\ - .filter(NoonForecast.station_code.in_(station_codes))\ - .filter(NoonForecast.weather_date >= start_date)\ - .filter(NoonForecast.weather_date <= end_date)\ - .order_by(NoonForecast.weather_date)\ +def query_noon_forecast_records( + session: Session, station_codes: StationCodeList, start_date: datetime, end_date: datetime +): + """Sends a query to get noon forecast records""" + return ( + session.query(NoonForecast) + .filter(NoonForecast.station_code.in_(station_codes)) + .filter(NoonForecast.weather_date >= start_date) + .filter(NoonForecast.weather_date <= end_date) + .order_by(NoonForecast.weather_date) .order_by(desc(NoonForecast.created_at)) + ) def save_noon_forecast(session: Session, noon_forecast: NoonForecast): - """ Abstraction for writing NoonForecast to database. """ + """Abstraction for writing NoonForecast to database.""" session.add(noon_forecast) session.commit() diff --git a/backend/packages/wps-shared/src/wps_shared/db/crud/observations.py b/backend/packages/wps-shared/src/wps_shared/db/crud/observations.py index dc5a729a25..8450f68aec 100644 --- a/backend/packages/wps-shared/src/wps_shared/db/crud/observations.py +++ b/backend/packages/wps-shared/src/wps_shared/db/crud/observations.py @@ -1,84 +1,106 @@ -""" CRUD operations relating to observed readings (a.k.a "hourlies") -""" +"""CRUD operations relating to observed readings (a.k.a "hourlies")""" + import datetime from typing import List + from sqlalchemy import and_, select, text -from sqlalchemy.sql import func from sqlalchemy.orm import Session -from wps_shared.db.models.weather_models import (ModelRunPrediction, PredictionModel, PredictionModelRunTimestamp, - WeatherStationModelPrediction) +from sqlalchemy.sql import func + from wps_shared.db.models.observations import HourlyActual +from wps_shared.db.models.weather_models import ( + ModelRunPrediction, + PredictionModel, + PredictionModelRunTimestamp, + WeatherStationModelPrediction, +) def get_hourly_actuals( - session: Session, - station_codes: List[int], - start_date: datetime, - end_date: datetime = None): - """ Query for hourly actuals for given stations, from stated start_date to end_date. + session: Session, station_codes: List[int], start_date: datetime, end_date: datetime = None +): + """Query for hourly actuals for given stations, from stated start_date to end_date. :param end_date: If specified, return up to and including the end_date """ - query = session.query(HourlyActual)\ - .filter(HourlyActual.station_code.in_(station_codes))\ - .filter(HourlyActual.weather_date >= start_date)\ - .filter(HourlyActual.temp_valid == True)\ + query = ( + session.query(HourlyActual) + .filter(HourlyActual.station_code.in_(station_codes)) + .filter(HourlyActual.weather_date >= start_date) + .filter(HourlyActual.temp_valid == True) .filter(HourlyActual.rh_valid == True) + ) if end_date is not None: query = query.filter(HourlyActual.weather_date <= end_date) - query = query.order_by(HourlyActual.station_code)\ - .order_by(HourlyActual.weather_date) + query = query.order_by(HourlyActual.station_code).order_by(HourlyActual.weather_date) return query def get_actuals_left_outer_join_with_predictions( - session: Session, model_id: int, station_code: int, - start_date: datetime, end_date: datetime): + session: Session, model_id: int, station_code: int, start_date: datetime, end_date: datetime +): """ NOTE: Can improve this query by only returning the most recent prediction, maybe using nested queries. It works for now - but things could be faster. """ - return session.query(HourlyActual, ModelRunPrediction)\ - .outerjoin(ModelRunPrediction, - and_(ModelRunPrediction.prediction_timestamp == HourlyActual.weather_date, - ModelRunPrediction.station_code == station_code))\ - .outerjoin(PredictionModelRunTimestamp, - and_(PredictionModelRunTimestamp.id == - ModelRunPrediction.prediction_model_run_timestamp_id))\ - .filter(HourlyActual.station_code == station_code)\ - .filter(HourlyActual.weather_date >= start_date)\ - .filter(HourlyActual.temp_valid == True)\ - .filter(HourlyActual.rh_valid == True)\ - .filter(HourlyActual.weather_date <= end_date)\ - .filter(PredictionModelRunTimestamp.prediction_model_id == model_id)\ - .order_by(HourlyActual.station_code)\ - .order_by(HourlyActual.weather_date)\ + return ( + session.query(HourlyActual, ModelRunPrediction) + .outerjoin( + ModelRunPrediction, + and_( + ModelRunPrediction.prediction_timestamp == HourlyActual.weather_date, + ModelRunPrediction.station_code == station_code, + ), + ) + .outerjoin( + PredictionModelRunTimestamp, + and_( + PredictionModelRunTimestamp.id + == ModelRunPrediction.prediction_model_run_timestamp_id + ), + ) + .filter(HourlyActual.station_code == station_code) + .filter(HourlyActual.weather_date >= start_date) + .filter(HourlyActual.temp_valid == True) + .filter(HourlyActual.rh_valid == True) + .filter(HourlyActual.weather_date <= end_date) + .filter(PredictionModelRunTimestamp.prediction_model_id == model_id) + .order_by(HourlyActual.station_code) + .order_by(HourlyActual.weather_date) .order_by(PredictionModelRunTimestamp.prediction_run_timestamp.desc()) + ) def save_hourly_actual(session: Session, hourly_actual: HourlyActual): - """ Abstraction for writing HourlyActual to database. """ + """Abstraction for writing HourlyActual to database.""" session.add(hourly_actual) session.commit() -def get_accumulated_precipitation(session: Session, station_code: int, start_datetime: datetime, end_datetime: datetime): - """ Get the accumulated precipitation for a station by datetime range. """ - stmt = select(func.sum(HourlyActual.precipitation))\ - .where(HourlyActual.station_code == station_code, HourlyActual.weather_date > start_datetime, HourlyActual.weather_date <= end_datetime) +def get_accumulated_precipitation( + session: Session, station_code: int, start_datetime: datetime, end_datetime: datetime +): + """Get the accumulated precipitation for a station by datetime range.""" + stmt = select(func.sum(HourlyActual.precipitation)).where( + HourlyActual.station_code == station_code, + HourlyActual.weather_date > start_datetime, + HourlyActual.weather_date <= end_datetime, + ) result = session.scalars(stmt).first() if result is None: return 0 return result -def get_accumulated_precip_by_24h_interval(session: Session, station_code: int, start_datetime: datetime, end_datetime: datetime): - """ Get the accumulated precip for 24 hour intervals for a given station code within the specified time interval. +def get_accumulated_precip_by_24h_interval( + session: Session, station_code: int, start_datetime: datetime, end_datetime: datetime +): + """Get the accumulated precip for 24 hour intervals for a given station code within the specified time interval. :param session: The ORM/database session. :param station_code: The numeric code identifying the weather station of interest. :param start_datetime: The earliest date and time of interest. :param end_datetime: The latest date and time of interest. - + Note: I couldn't construct this query in SQLAlchemy, hence the need for the 'text' based query. generate_series(\'{}\', \'{}\', '24 hours'::interval) @@ -90,7 +112,7 @@ def get_accumulated_precip_by_24h_interval(session: Session, station_code: int, 2023-11-01 20:00:00 2023-11-02 20:00:00 2023-11-03 20:00:00 - + We then join the HourlyActuals table so that we can sum hourly precip in a 24 hour period. The join is based on the weather_date field in the HourlyActuals table being in a 24 hour range using this odd looking syntax: weather_date <@ tstzrange(day, day + '24 hours', '(]') @@ -104,7 +126,7 @@ def get_accumulated_precip_by_24h_interval(session: Session, station_code: int, 2023-11-01 01:00:00 .... 2023-11-01 19:00:00 - 2023-11-01 20:00:00 + 2023-11-01 20:00:00 """ stmt = """ SELECT day, station_code, sum(precipitation) actual_precip_24h @@ -123,21 +145,33 @@ def get_accumulated_precip_by_24h_interval(session: Session, station_code: int, return result.all() -def get_predicted_daily_precip(session: Session, model: PredictionModel, station_code: int, start_datetime: datetime, end_datetime: datetime): - """ Gets rows from WeatherStationModelPrediction for the given model and station within the +def get_predicted_daily_precip( + session: Session, + model: PredictionModel, + station_code: int, + start_datetime: datetime, + end_datetime: datetime, +): + """Gets rows from WeatherStationModelPrediction for the given model and station within the specified time interval at 20:00:00 UTC each day. :param session: The ORM/database session :param model: The numeric weather prediction model :param station_code: The code identifying the weather station. :param start_datetime: The earliest date and time of interest. - :param end_datetime: The latest date and time of interest. + :param end_datetime: The latest date and time of interest. """ - result = session.query(WeatherStationModelPrediction)\ - .join(PredictionModelRunTimestamp, PredictionModelRunTimestamp.id == WeatherStationModelPrediction.prediction_model_run_timestamp_id)\ - .filter(PredictionModelRunTimestamp.prediction_model_id == model.id)\ - .filter(WeatherStationModelPrediction.station_code == station_code)\ - .filter(WeatherStationModelPrediction.prediction_timestamp >= start_datetime)\ - .filter(WeatherStationModelPrediction.prediction_timestamp < end_datetime)\ - .filter(func.date_part('hour', WeatherStationModelPrediction.prediction_timestamp) == 20)\ + result = ( + session.query(WeatherStationModelPrediction) + .join( + PredictionModelRunTimestamp, + PredictionModelRunTimestamp.id + == WeatherStationModelPrediction.prediction_model_run_timestamp_id, + ) + .filter(PredictionModelRunTimestamp.prediction_model_id == model.id) + .filter(WeatherStationModelPrediction.station_code == station_code) + .filter(WeatherStationModelPrediction.prediction_timestamp >= start_datetime) + .filter(WeatherStationModelPrediction.prediction_timestamp < end_datetime) + .filter(func.date_part("hour", WeatherStationModelPrediction.prediction_timestamp) == 20) .order_by(WeatherStationModelPrediction.prediction_timestamp) + ) return result.all() diff --git a/backend/packages/wps-shared/src/wps_shared/schemas/fba.py b/backend/packages/wps-shared/src/wps_shared/schemas/fba.py index 300e80dceb..8bfc3e1d13 100644 --- a/backend/packages/wps-shared/src/wps_shared/schemas/fba.py +++ b/backend/packages/wps-shared/src/wps_shared/schemas/fba.py @@ -4,27 +4,11 @@ from typing import Dict, List, Optional from pydantic import BaseModel +from wps_wf1.models import FireCentre from wps_shared.schemas.auto_spatial_advisory import SFMSRunType -class FireCenterStation(BaseModel): - """A fire weather station has a code, name and geographical coordinate.""" - - code: int - name: str - zone: Optional[str] = None - - -class FireCentre(BaseModel): - """The highest-level organizational unit for wildfire planning. Each fire centre - has 1 or more planning areas within it.""" - - id: str - name: str - stations: List[FireCenterStation] - - class FireCenterListResponse(BaseModel): """Response for all fire centers, in a list""" diff --git a/backend/packages/wps-shared/src/wps_shared/schemas/forecasts.py b/backend/packages/wps-shared/src/wps_shared/schemas/forecasts.py index f36f3a3ad8..89ebbc25e4 100644 --- a/backend/packages/wps-shared/src/wps_shared/schemas/forecasts.py +++ b/backend/packages/wps-shared/src/wps_shared/schemas/forecasts.py @@ -3,7 +3,7 @@ from datetime import datetime from typing import List, Optional from pydantic import BaseModel -from wps_shared.schemas.stations import WeatherStation +from wps_wf1.models import WeatherStation class NoonForecastValue(BaseModel): diff --git a/backend/packages/wps-shared/src/wps_shared/schemas/morecast_v2.py b/backend/packages/wps-shared/src/wps_shared/schemas/morecast_v2.py index 7896a9af39..fa846d1faa 100644 --- a/backend/packages/wps-shared/src/wps_shared/schemas/morecast_v2.py +++ b/backend/packages/wps-shared/src/wps_shared/schemas/morecast_v2.py @@ -1,9 +1,10 @@ """This module contains pydantic models for Morecast v2""" from enum import Enum -from typing import List, Optional +from typing import List + from pydantic import BaseModel -from datetime import datetime +from wps_wf1.models import StationDailyFromWF1, WeatherIndeterminate class ModelChoice(str, Enum): @@ -17,34 +18,6 @@ class ModelChoice(str, Enum): RDPS = "RDPS" -class WeatherDeterminate(str, Enum): - """Enumerator for all valid determinate weather sources""" - - GDPS = "GDPS" - GDPS_BIAS = "GDPS_BIAS" - GFS = "GFS" - GFS_BIAS = "GFS_BIAS" - HRDPS = "HRDPS" - HRDPS_BIAS = "HRDPS_BIAS" - NAM = "NAM" - NAM_BIAS = "NAM_BIAS" - RDPS = "RDPS" - RDPS_BIAS = "RDPS_BIAS" - GRASS_CURING_CWFIS = "Grass_Curing_CWFIS" - ECMWF = "ECMWF" - - # non prediction models - FORECAST = "Forecast" - ACTUAL = "Actual" - - @classmethod - def from_string(cls, value: str) -> "WeatherDeterminate": - try: - return cls(value) - except ValueError: - raise ValueError(f"{value!r} is not a valid WeatherDeterminate") - - class ForecastedTemperature(BaseModel): """Forecaster chosen temperature""" @@ -117,78 +90,14 @@ class ObservedDailiesForStations(BaseModel): station_codes: List[int] -class StationDailyFromWF1(BaseModel): - """Daily weather data (forecast or observed) for a specific station and date retrieved from WF1 API""" - - created_by: str - forecast_id: str - station_code: int - station_name: str - utcTimestamp: datetime - temperature: Optional[float] = None - relative_humidity: Optional[float] = None - precipitation: Optional[float] = None - wind_direction: Optional[float] = None - wind_speed: Optional[float] = None - - class StationDailiesResponse(BaseModel): """List of StationDailyFromWF1 records as response""" dailies: List[StationDailyFromWF1] -class WeatherIndeterminate(BaseModel): - """Used to represent a predicted or actual value""" - - station_code: int - station_name: str - determinate: WeatherDeterminate - utc_timestamp: datetime - latitude: Optional[float] = None - longitude: Optional[float] = None - temperature: Optional[float] = None - relative_humidity: Optional[float] = None - precipitation: Optional[float] = None - wind_direction: Optional[float] = None - wind_speed: Optional[float] = None - fine_fuel_moisture_code: Optional[float] = None - duff_moisture_code: Optional[float] = None - drought_code: Optional[float] = None - initial_spread_index: Optional[float] = None - build_up_index: Optional[float] = None - fire_weather_index: Optional[float] = None - danger_rating: Optional[int] = None - grass_curing: Optional[float] = None - update_date: Optional[datetime] = None - prediction_run_timestamp: Optional[datetime] = None - - class IndeterminateDailiesResponse(BaseModel): actuals: List[WeatherIndeterminate] forecasts: List[WeatherIndeterminate] grass_curing: List[WeatherIndeterminate] predictions: List[WeatherIndeterminate] - - -class WF1ForecastRecordType(BaseModel): - id: str = "FORECAST" - displayLabel: str = "Forecast" - - -class WF1PostForecast(BaseModel): - """Used to represent a forecast to be POSTed to WF1""" - - archive: str = "false" - createdBy: Optional[str] = None - id: Optional[str] = None - station: str # station URL - stationId: str # station UUID - weatherTimestamp: int # UTC timestamp in millis - temperature: float - relativeHumidity: float - precipitation: float - windSpeed: float - windDirection: Optional[float] = None - grasslandCuring: Optional[float] = None - recordType: WF1ForecastRecordType diff --git a/backend/packages/wps-shared/src/wps_shared/schemas/observations.py b/backend/packages/wps-shared/src/wps_shared/schemas/observations.py index e839d436f8..8dc0bb10db 100644 --- a/backend/packages/wps-shared/src/wps_shared/schemas/observations.py +++ b/backend/packages/wps-shared/src/wps_shared/schemas/observations.py @@ -1,34 +1,12 @@ -""" This module contains pydandict schemas relating to observations (a.k.a. hourlies) for the API. -""" -from datetime import datetime -from typing import List, Optional -from pydantic import BaseModel -from wps_shared.schemas.stations import WeatherStation - - -class WeatherReading(BaseModel): - """ Weather reading for a particular point in time """ - datetime: Optional[datetime] - temperature: Optional[float] = None - relative_humidity: Optional[float] = None - wind_speed: Optional[float] = None - wind_direction: Optional[float] = None - barometric_pressure: Optional[float] = None - precipitation: Optional[float] = None - dewpoint: Optional[float] = None - ffmc: Optional[float] = None - isi: Optional[float] = None - fwi: Optional[float] = None - observation_valid: Optional[bool] = None - observation_valid_comment: Optional[str] = None +"""This module contains pydandict schemas relating to observations (a.k.a. hourlies) for the API.""" +from typing import List -class WeatherStationHourlyReadings(BaseModel): - """ The weather readings for a particular station """ - values: List[WeatherReading] - station: WeatherStation +from pydantic import BaseModel +from wps_wf1.models import WeatherStationHourlyReadings class WeatherStationHourlyReadingsResponse(BaseModel): - """ Response containing a number of hourly readings. """ + """Response containing a number of hourly readings.""" + hourlies: List[WeatherStationHourlyReadings] diff --git a/backend/packages/wps-shared/src/wps_shared/schemas/percentiles.py b/backend/packages/wps-shared/src/wps_shared/schemas/percentiles.py index 8c7a0a0065..0c337950b2 100644 --- a/backend/packages/wps-shared/src/wps_shared/schemas/percentiles.py +++ b/backend/packages/wps-shared/src/wps_shared/schemas/percentiles.py @@ -2,7 +2,7 @@ """ from typing import List, Dict, Optional from pydantic import BaseModel -from wps_shared.schemas.stations import WeatherStation +from wps_wf1.models import WeatherStation class YearRange(BaseModel): @@ -39,6 +39,6 @@ class MeanValues(BaseModel): class CalculatedResponse(BaseModel): """ The combined response for a set of stations. """ stations: Dict[int, StationSummary] = {} - mean_values: MeanValues = None + mean_values: Optional[MeanValues] = None year_range: YearRange percentile: int diff --git a/backend/packages/wps-shared/src/wps_shared/schemas/stations.py b/backend/packages/wps-shared/src/wps_shared/schemas/stations.py index 65a248c534..efafabb7d6 100644 --- a/backend/packages/wps-shared/src/wps_shared/schemas/stations.py +++ b/backend/packages/wps-shared/src/wps_shared/schemas/stations.py @@ -1,128 +1,58 @@ -""" This module contains pydandict schemas relating to weather stations for the API. -""" -from typing import List, Optional -from pydantic import BaseModel - - -class FireZone(BaseModel): - id: int - display_label: str - fire_centre: str - - -class StationFireCentre(BaseModel): - """ The fire centre associated with a station """ - id: int - display_label: str - - -class Season(BaseModel): - """ A fire season consists of a start date (month and day) and an end date (month and day). """ - start_month: int - start_day: int - end_month: int - end_day: int - - -class WeatherStationProperties(BaseModel): - """ Non-geometrical weather station properties """ - code: int - name: str - ecodivision_name: Optional[str] = None - core_season: Optional[Season] = None - - -class WeatherVariables(BaseModel): - """ Weather variables """ - temperature: Optional[float] = None - relative_humidity: Optional[float] = None - - -class DetailedWeatherStationProperties(WeatherStationProperties): - """ Detailed, non-geometrical weather station properties """ - observations: Optional[WeatherVariables] = None - forecasts: Optional[WeatherVariables] = None +"""This module contains pydandict schemas relating to weather stations for the API.""" +from typing import List, Optional -class WeatherStationGeometry(BaseModel): - """ Geometrical coordinates of a weather station """ - type: str = "Point" - coordinates: List[float] +from pydantic import BaseModel +from wps_wf1.models import ( + GeoJsonDetailedWeatherStation, + WeatherStationGeometry, + WeatherStationGroup, + WeatherStationGroupMember, + WeatherStationProperties, +) class GeoJsonWeatherStation(BaseModel): - """ GeoJson formatted weather station """ - type: str = "Feature" - properties: WeatherStationProperties - geometry: WeatherStationGeometry + """GeoJson formatted weather station""" - -class GeoJsonDetailedWeatherStation(BaseModel): - """ GeoJson formatted weather station with details """ type: str = "Feature" - properties: DetailedWeatherStationProperties + properties: WeatherStationProperties geometry: WeatherStationGeometry -class WeatherStation(BaseModel): - """ A fire weather station has a code, name and geographical coordinate. """ - zone_code: Optional[str] = None - code: int - name: str - lat: float - long: float - ecodivision_name: Optional[str] = None - core_season: Optional[Season] = None - elevation: Optional[int] = None - wfwx_station_uuid: Optional[str] = None - - class WeatherStationsResponse(BaseModel): - """ List of fire weather stations in geojson format. """ + """List of fire weather stations in geojson format.""" + type: str = "FeatureCollection" features: List[GeoJsonWeatherStation] class DetailedWeatherStationsResponse(BaseModel): - """ List of fire weather stations, with details, in geojson format. """ + """List of fire weather stations, with details, in geojson format.""" + type: str = "FeatureCollection" features: List[GeoJsonDetailedWeatherStation] class StationCodeList(BaseModel): - """ List of station codes. """ - stations: List[int] - + """List of station codes.""" -class WeatherStationGroupMember(BaseModel): - """ Description of a station in a group""" - id: str - display_label: str - fire_centre: StationFireCentre - fire_zone: Optional[FireZone] = None - station_code: int - station_status: str + stations: List[int] class WeatherStationGroupMembersResponse(BaseModel): - """ Response to a request for the stations in a group """ - stations: List[WeatherStationGroupMember] + """Response to a request for the stations in a group""" - -class WeatherStationGroup(BaseModel): - """ A weather station group from WF1""" - display_label: str - group_description: Optional[str] = None - group_owner_user_guid: str - group_owner_user_id: str - id: str + stations: List[WeatherStationGroupMember] class WeatherStationGroupsResponse(BaseModel): - """ Response to a request for all WFWX groups""" + """Response to a request for all WFWX groups""" + groups: List[WeatherStationGroup] class WeatherStationGroupsMemberRequest(BaseModel): - """ Request for all station members of all groups by group ids""" + """Request for all station members of all groups by group ids""" + group_ids: List[str] diff --git a/backend/packages/wps-shared/src/wps_shared/schemas/weather_models.py b/backend/packages/wps-shared/src/wps_shared/schemas/weather_models.py index 49a24fcd75..c8161ee4c5 100644 --- a/backend/packages/wps-shared/src/wps_shared/schemas/weather_models.py +++ b/backend/packages/wps-shared/src/wps_shared/schemas/weather_models.py @@ -3,7 +3,7 @@ from datetime import datetime from typing import List, Optional from pydantic import BaseModel -from wps_shared.schemas.stations import WeatherStation +from wps_wf1.models import WeatherStation class WeatherPredictionModel(BaseModel): diff --git a/backend/packages/wps-shared/src/wps_shared/stations.py b/backend/packages/wps-shared/src/wps_shared/stations.py index 1a6d712710..ff92b43352 100644 --- a/backend/packages/wps-shared/src/wps_shared/stations.py +++ b/backend/packages/wps-shared/src/wps_shared/stations.py @@ -7,19 +7,21 @@ import logging from typing import List, Final import json +from aiohttp import TCPConnector from aiohttp.client import ClientSession from sqlalchemy.engine.row import Row +from wps_wf1.models import ( + DetailedWeatherStationProperties, + GeoJsonDetailedWeatherStation, + WeatherStation, + WeatherStationGeometry, + WeatherStationProperties, + WeatherVariables, +) import wps_shared.db.database -from wps_shared.schemas.stations import (WeatherStation, - GeoJsonWeatherStation, - GeoJsonDetailedWeatherStation, - WeatherStationProperties, - WeatherVariables, - DetailedWeatherStationProperties, - WeatherStationGeometry) -from wps_shared.db.crud.stations import get_noon_forecast_observation_union -from wps_shared.wildfire_one import wfwx_api -from wps_shared.wildfire_one.wfwx_api import get_auth_header, get_detailed_stations, get_station_data +from wps_shared.schemas.stations import GeoJsonWeatherStation +from wps_shared.db.crud.stations import _get_noon_date, get_noon_forecast_observation_union +from wps_shared.wildfire_one.wfwx_api import create_wfwx_api logger = logging.getLogger(__name__) @@ -89,7 +91,11 @@ async def _get_detailed_stations(time_of_interest: datetime): async def get_stations_by_codes(station_codes: List[int]) -> List[WeatherStation]: """Get a list of stations by code, from WFWX Fireweather API.""" - return await wfwx_api.get_stations_by_codes(station_codes) + # Limit the number of concurrent connections. + conn = TCPConnector(limit=10) + async with ClientSession(connector=conn) as session: + wfwx_api = create_wfwx_api(session) + return await wfwx_api.get_stations_by_codes(station_codes) async def get_stations_from_source() -> List[WeatherStation]: @@ -101,7 +107,10 @@ async def fetch_detailed_stations_as_geojson(time_of_interest: datetime) -> List """Fetch a detailed list of stations. i.e. more than just the fire station name and code, throw some observations and forecast in the mix.""" logger.info("requesting detailed stations...") - result = await get_detailed_stations(time_of_interest) + noon_time_of_interest = _get_noon_date(time_of_interest) + async with ClientSession() as session: + wfwx_api = create_wfwx_api(session) + result = await wfwx_api.get_detailed_stations(noon_time_of_interest) logger.info("detailed stations loaded.") return result @@ -124,8 +133,8 @@ async def get_stations_as_geojson() -> List[GeoJsonWeatherStation]: async def get_stations_asynchronously(): """ Get list of stations asynchronously """ async with ClientSession() as session: - header = await get_auth_header(session) - return await get_station_data(session, header) + wfwx_api = create_wfwx_api(session) + return await wfwx_api.get_station_data() def get_stations_synchronously() -> List[WeatherStation]: diff --git a/backend/packages/wps-shared/src/wps_shared/tests/common.py b/backend/packages/wps-shared/src/wps_shared/tests/common.py index 224f93797a..bac99803cf 100644 --- a/backend/packages/wps-shared/src/wps_shared/tests/common.py +++ b/backend/packages/wps-shared/src/wps_shared/tests/common.py @@ -1,10 +1,11 @@ """Mock modules/classes""" +import json import logging import os -import json -from typing import Optional from contextlib import asynccontextmanager +from typing import Optional + from wps_shared.auth import ASA_TEST_IDIR_GUID from wps_shared.tests.fixtures.loader import FixtureFinder @@ -109,6 +110,11 @@ async def json(self) -> dict: """Return json response""" return self._json + def raise_for_status(self) -> None: + """Mimic aiohttp.ClientResponse.raise_for_status().""" + if 400 <= self.status: + raise Exception(f"HTTP {self.status}") + class DefaultMockAioSession: """Mock aiobotocore.session.AioSession""" diff --git a/backend/packages/wps-shared/src/wps_shared/tests/conftest.py b/backend/packages/wps-shared/src/wps_shared/tests/conftest.py index 7a41e33f1d..7325395ba4 100644 --- a/backend/packages/wps-shared/src/wps_shared/tests/conftest.py +++ b/backend/packages/wps-shared/src/wps_shared/tests/conftest.py @@ -196,3 +196,14 @@ def mock_client_session(monkeypatch): def spy_access_logging(mocker: MockerFixture): """Spies on access audting logging for tests""" return mocker.spy(auth, "create_api_access_audit_log") + + +@pytest.fixture +def mock_wfwx_api(mocker: MockerFixture): + """A mocked WfwxApi with async methods.""" + mock = mocker.AsyncMock(name="WfwxApiMock") + # Async method + mock._get_auth_header = mocker.AsyncMock(return_value={}) + mock._get_no_cache_auth_header = mocker.AsyncMock(return_value={"Cache-Control": "no-cache"}) + mock.get_stations_by_group_id = mocker.AsyncMock(return_value=[]) + return mock diff --git a/backend/packages/wps-shared/src/wps_shared/tests/fixtures/wf1/lookup.json b/backend/packages/wps-shared/src/wps_shared/tests/fixtures/wf1/lookup.json index 084f956ead..8d3c638b0e 100644 --- a/backend/packages/wps-shared/src/wps_shared/tests/fixtures/wf1/lookup.json +++ b/backend/packages/wps-shared/src/wps_shared/tests/fixtures/wf1/lookup.json @@ -102,6 +102,9 @@ "get": { "{'query': 'weatherTimestamp==1618862400000', 'page': 0, 'size': '1000'}": { "None": "wfwx/v1/dailies/rsql__query_weatherTimestamp==1618862400000_page_0_size_1000.json" + }, + "{'query': 'weatherTimestamp==1618862400000', 'page': 0, 'size': 1000}": { + "None": "wfwx/v1/dailies/rsql__query_weatherTimestamp==1618862400000_page_0_size_1000.json" } } }, diff --git a/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_schema_parsers.py b/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_schema_parsers.py deleted file mode 100644 index 8c0b9695e1..0000000000 --- a/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_schema_parsers.py +++ /dev/null @@ -1,216 +0,0 @@ -from typing import List -from wps_shared.schemas.morecast_v2 import StationDailyFromWF1, WeatherDeterminate -from wps_shared.wildfire_one.schema_parsers import ( - WF1RecordTypeEnum, - parse_noon_forecast, - parse_hourly_actual, - unique_weather_stations_mapper, - weather_indeterminate_list_mapper, - weather_stations_mapper, - dailies_list_mapper, -) -import pytest - - -def build_mock_station_group_member(station_id: str, station_code: str): - return { - "station": { - "id": station_id, - "stationCode": station_code, - "stationStatus": { - "id": '1' - }, - "displayLabel": 's1', - "fireCentre": { - "id": '1', - "displayLabel": "fc1" - }, - "zone": { - "id": "1", - "displayLabel": 'z1', - 'fireCentre': 'fc1' - } - } - } - - -def test_forecast_valid_flags_are_set(): - """ Noon forecast valid flags are set """ - raw_forecast = { - "weatherTimestamp": 1, - 'updateDate': 1, - "station_code": 1, - "temperature": 1, - "relativeHumidity": 1, - "windSpeed": 1, - "windDirection": 1, - "precipitation": 1, - "grasslandCuring": 1, - "fineFuelMoistureCode": 1, - "duffMoistureCode": 1, - "droughtCode": 1, - "initialSpreadIndex": 1, - "buildUpIndex": 1, - "fireWeatherIndex": 1, - "dailySeverityRating": 1 - } - - result = parse_noon_forecast(1, raw_forecast) - assert result.temp_valid is True - assert result.rh_valid is True - assert result.wspeed_valid is True - assert result.wdir_valid is True - assert result.precip_valid is True - - -def test_actual_valid_flags_are_set(): - """ Noon forecast valid flags are set """ - raw_hourly_actual = { - "weatherTimestamp": 1, - 'updateDate': 1, - "station_code": 1, - "temperature": 1, - "relativeHumidity": 1, - "windSpeed": 1, - "windDirection": 1, - "precipitation": 1, - "grasslandCuring": 1, - "fineFuelMoistureCode": 1, - "duffMoistureCode": 1, - "droughtCode": 1, - "initialSpreadIndex": 1, - "buildUpIndex": 1, - "fireWeatherIndex": 1, - "dailySeverityRating": 1, - "observationValidInd": True, - "observationValidComment": '' - } - - result = parse_hourly_actual(1, raw_hourly_actual) - assert result.temp_valid is True - assert result.rh_valid is True - assert result.wspeed_valid is True - assert result.wdir_valid is True - assert result.precip_valid is True - - -def test_unique_station_mapper_duplicate(): - """ Returns unique stations from raw list of stations """ - stations = [ - build_mock_station_group_member("1", "1"), - build_mock_station_group_member("1", "1") - ] - - result = unique_weather_stations_mapper(stations) - assert len(result) == 1 - assert result[0].station_code == 1 - - -def test_unique_station_mapper_unique(): - """ Returns unique stations from non duplicate station list """ - stations = [ - build_mock_station_group_member("1", "1"), - build_mock_station_group_member("2", "2") - ] - result = unique_weather_stations_mapper(stations) - assert len(result) == 2 - assert result[0].station_code != result[1].station_code - - -def test_station_mapper(): - """ Returns stations based on raw wf1 stations """ - stations = [ - build_mock_station_group_member("1", "1"), - build_mock_station_group_member("2", "2") - ] - result = weather_stations_mapper(stations) - assert len(result) == 2 - assert result[0].station_code == 1 - assert result[0].id == '1' - assert result[1].station_code == 2 - assert result[1].id == '2' - - -async def async_observed_dailies(record_type: str): - yield { - "createdBy": "test_user", - "id": "test_guid_id", - "stationData": { - "stationCode": 1, - "displayLabel": "Test", - "latitude": 1, - "longitude": 1, - "stationStatus": { - "id": "ACTIVE", - }, - }, - "recordType": { - "id": record_type - }, - "weatherTimestamp": 1680984000000, - "temperature": 1, - "relativeHumidity": 1, - "precipitation": 1, - "windDirection": 1, - "windSpeed": 1, - "fineFuelMoistureCode": 1, - "duffMoistureCode": 1, - "droughtCode": 1, - "initialSpreadIndex": 1, - "buildUpIndex": 1, - "fireWeatherIndex": 1, - "dangerForest": 1 - } - - -@pytest.mark.anyio -async def test_dailies_list_mapper_actual_daily_and_actual_filter(anyio_backend): - result: List[StationDailyFromWF1] = await dailies_list_mapper(async_observed_dailies("ACTUAL"), WF1RecordTypeEnum.ACTUAL) - assert len(result) == 1 - assert result[0].station_code == 1 - - -@pytest.mark.anyio -async def test_dailies_list_mapper_actual_daily_and_forecast_filter(anyio_backend): - result: List[StationDailyFromWF1] = await dailies_list_mapper(async_observed_dailies("ACTUAL"), WF1RecordTypeEnum.FORECAST) - assert len(result) == 0 - - -@pytest.mark.anyio -async def test_dailies_list_mapper_forecast_daily_and_forecast_filter(anyio_backend): - result: List[StationDailyFromWF1] = await dailies_list_mapper(async_observed_dailies("FORECAST"), WF1RecordTypeEnum.FORECAST) - assert len(result) == 1 - assert result[0].station_code == 1 - - -@pytest.mark.anyio -async def test_dailies_list_mapper_forecast_daily_and_actual_filter(anyio_backend): - result: List[StationDailyFromWF1] = await dailies_list_mapper(async_observed_dailies("FORECAST"), WF1RecordTypeEnum.ACTUAL) - assert len(result) == 0 - - -@pytest.mark.anyio -async def test_indeterminate_mapper_actual(anyio_backend): - observed, forecast = await weather_indeterminate_list_mapper(async_observed_dailies("ACTUAL")) - assert len(forecast) == 0 - assert len(observed) == 1 - assert observed[0].determinate == WeatherDeterminate.ACTUAL - assert observed[0].station_code == 1 - - -@pytest.mark.anyio -async def test_indeterminate_mapper_manual(anyio_backend): - observed, forecast = await weather_indeterminate_list_mapper(async_observed_dailies("MANUAL")) - assert len(forecast) == 0 - assert len(observed) == 1 - assert observed[0].determinate == WeatherDeterminate.ACTUAL - assert observed[0].station_code == 1 - - -@pytest.mark.anyio -async def test_indeterminate_mapper_forecast(anyio_backend): - observed, forecast = await weather_indeterminate_list_mapper(async_observed_dailies("FORECAST")) - assert len(observed) == 0 - assert len(forecast) == 1 - assert forecast[0].determinate == WeatherDeterminate.FORECAST - assert forecast[0].station_code == 1 diff --git a/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_util.py b/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_util.py deleted file mode 100644 index 4447872fd5..0000000000 --- a/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_util.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Optional -from wps_shared.wildfire_one.util import is_station_valid - - -def build_station(status: str, lat: Optional[int], long: Optional[int]): - return { - "latitude": lat, - "longitude": long, - "stationStatus": { - "id": status, - } - } - - -def test_valid_active_station(): - """ Returns stations based on raw wf1 stations """ - station = build_station("ACTIVE", 1, 1) - - result = is_station_valid(station) - assert result == True - - -def test_valid_project_station(): - """ Returns stations based on raw wf1 stations """ - station = build_station("PROJECT", 1, 1) - - result = is_station_valid(station) - assert result == True - - -def test_valid_test_station(): - """ Returns stations based on raw wf1 stations """ - station = build_station("TEST", 1, 1) - - result = is_station_valid(station) - assert result == True - - -def test_invalid_status_station(): - """ Returns stations based on raw wf1 stations """ - station = build_station("", 1, 1) - - result = is_station_valid(station) - assert result == False - - -def test_invalid_lat_station(): - """ Returns stations based on raw wf1 stations """ - station = build_station("ACTIVE", None, 1) - - result = is_station_valid(station) - assert result == False - - -def test_invalid_long_station(): - """ Returns stations based on raw wf1 stations """ - station = build_station("ACTIVE", 1, None) - - result = is_station_valid(station) - assert result == False diff --git a/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_validation.py b/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_validation.py deleted file mode 100644 index 2304d88d4d..0000000000 --- a/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_validation.py +++ /dev/null @@ -1,209 +0,0 @@ -""" Unit testing for WFWX API validation """ -import math -from datetime import datetime -from wps_shared.schemas.observations import WeatherReading -from wps_shared.wildfire_one.validation import validate_metric, get_valid_flags - - -def test_validate_metric_below(): - """ Below range returns false """ - result = validate_metric(1, 2, 3) - assert result is False - - -def test_validate_metric_above(): - """ Above range returns false """ - result = validate_metric(3, 1, 2) - assert result is False - - -def test_validate_metric_within(): - """ Within range returns true """ - result = validate_metric(2, 1, 3) - assert result is True - - -def test_validate_metric_at_low(): - """ At lower bound returns true """ - result = validate_metric(1, 1, 2) - assert result is True - - -def test_validate_metric_at_high(): - """ At lower bound returns true """ - result = validate_metric(2, 1, 2) - assert result is True - - -def test_temp_valid(): - """ Any temp number is valid""" - test_record = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=1, - relative_humidity=None, - wind_speed=None, - wind_direction=None, - precipitation=None) - temp_valid, _, _, _, _ = get_valid_flags(test_record) - assert temp_valid is True - - -def test_temp_invalid(): - """ No temp number is invalid""" - test_record = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=None, - wind_direction=None, - precipitation=None) - temp_valid, _, _, _, _ = get_valid_flags(test_record) - assert temp_valid is False - - -def test_rh_valid(): - """ 0 to 100 is valid for rh""" - low_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=0, - wind_speed=None, - wind_direction=None, - precipitation=None) - _, low_rh_valid, _, _, _ = get_valid_flags(low_valid) - assert low_rh_valid is True - - high_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=100, - wind_speed=None, - wind_direction=None, - precipitation=None) - _, high_rh_valid, _, _, _ = get_valid_flags(high_valid) - assert high_rh_valid is True - - -def test_rh_invalid(): - """ Below 0 and above 100 is invalid for rh""" - low_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=-1, - wind_speed=None, - wind_direction=None, - precipitation=None) - _, low_rh_invalid, _, _, _ = get_valid_flags(low_valid) - assert low_rh_invalid is False - - high_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=101, - wind_speed=None, - wind_direction=None, - precipitation=None) - _, high_rh_invalid, _, _, _ = get_valid_flags(high_valid) - assert high_rh_invalid is False - - -def test_wind_speed_valid(): - """ 0 to inf is valid for wind_speed""" - low_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=0, - wind_direction=None, - precipitation=None) - _, _, low_wind_speed_valid, _, _ = get_valid_flags(low_valid) - assert low_wind_speed_valid is True - - high_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=math.inf, - wind_direction=None, - precipitation=None) - _, _, high_wind_speed_valid, _, _ = get_valid_flags(high_valid) - assert high_wind_speed_valid is True - - -def test_wind_speed_invalid(): - """ Below 0 is invalid for wind_speed""" - low_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=-1, - wind_direction=None, - precipitation=None) - _, _, low_wind_speed_invalid, _, _ = get_valid_flags(low_valid) - assert low_wind_speed_invalid is False - - -def test_wdir_valid(): - """ 0 to 360 is valid for wdir""" - low_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=None, - wind_direction=0, - precipitation=None) - _, _, _, low_wdir_valid, _ = get_valid_flags(low_valid) - assert low_wdir_valid is True - - high_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=None, - wind_direction=360, - precipitation=None) - _, _, _, high_wdir_valid, _ = get_valid_flags(high_valid) - assert high_wdir_valid is True - - -def test_wdir_invalid(): - """ Below 0 and above 360 is invalid for wdir""" - low_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=None, - wind_direction=-1, - precipitation=None) - _, _, _, low_wdir_invalid, _ = get_valid_flags(low_valid) - assert low_wdir_invalid is False - - high_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=None, - wind_direction=361, - precipitation=None) - _, _, _, high_wdir_invalid, _ = get_valid_flags(high_valid) - assert high_wdir_invalid is False - - -def test_precip_valid(): - """ 0 to inf is valid for precip""" - low_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=None, - wind_direction=None, - precipitation=0) - _, _, _, _, low_precip_valid = get_valid_flags(low_valid) - assert low_precip_valid is True - - high_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=None, - wind_direction=None, - precipitation=math.inf) - _, _, _, _, high_precip_valid = get_valid_flags(high_valid) - assert high_precip_valid is True - - -def test_precip_invalid(): - """ Below 0 is invalid for precip""" - low_valid = WeatherReading(datetime=datetime(2023, 7, 26, 12, 30, 15), - temperature=None, - relative_humidity=None, - wind_speed=None, - wind_direction=None, - precipitation=-1) - _, _, _, _, low_precip_invalid = get_valid_flags(low_valid) - assert low_precip_invalid is False diff --git a/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_wfwx_api.py b/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_wfwx_api.py new file mode 100644 index 0000000000..51c9a6515a --- /dev/null +++ b/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_wfwx_api.py @@ -0,0 +1,151 @@ + +# test_wfwx_api_factory.py +import pytest +from aiohttp import ClientSession + +# Import the module under test as a namespace, so we can patch its local symbols. +import wps_shared.wildfire_one.wfwx_api as factory + + +@pytest.fixture +async def aiohttp_session(): + """Create and cleanup a real aiohttp ClientSession for the factory.""" + session = ClientSession() + try: + yield session + finally: + await session.close() + + +def make_config_get_stub(values: dict): + """ + Returns a stub function that mimics config.get(key, default=None) + using `values` dict. Missing keys return the provided `default`. + """ + def _get(key, default=None): + return values.get(key, default) + return _get + + +class DummyApi: + """Deterministic stub for WfwxApi that captures ctor args.""" + + def __init__(self, *, session, wfwx_settings, cache): + self.session = session + self.settings = wfwx_settings + self.cache = cache + + +@pytest.mark.anyio +async def test_create_wfwx_api_happy_path(monkeypatch, aiohttp_session): + """ + With full config and REDIS_USE == 'True', factory builds settings correctly + and wires session+cache into WfwxApi. + """ + cfg = { + "WFWX_BASE_URL": "https://api.example.com/wfwx", + "WFWX_AUTH_URL": "https://auth.example.com/oauth", + "WFWX_USER": "wfwx-user", + "WFWX_SECRET": "super-secret", + "REDIS_AUTH_CACHE_EXPIRY": "900", + "REDIS_STATION_CACHE_EXPIRY": "86400", + "REDIS_HOURLIES_BY_STATION_CODE_CACHE_EXPIRY": "120", + "REDIS_DAILIES_BY_STATION_CODE_CACHE_EXPIRY": "180", + "REDIS_USE": "True", + } + + # Patch symbols WHERE THEY ARE USED: in the factory module. + monkeypatch.setattr(factory.config, "get", make_config_get_stub(cfg)) + + cache_sentinel = object() + monkeypatch.setattr(factory, "create_redis", lambda: cache_sentinel) + + # Replace the class with our deterministic stub. + monkeypatch.setattr(factory, "WfwxApi", DummyApi) + + # Act + result = factory.create_wfwx_api(session=aiohttp_session) + + # Assert: result is our DummyApi instance with captured args. + assert isinstance(result, DummyApi) + assert result.session is aiohttp_session + assert result.cache is cache_sentinel + + s = result.settings + assert s.base_url == cfg["WFWX_BASE_URL"] + assert s.auth_url == cfg["WFWX_AUTH_URL"] + assert s.user == cfg["WFWX_USER"] + assert s.secret == cfg["WFWX_SECRET"] + assert s.auth_cache_expiry == int(cfg["REDIS_AUTH_CACHE_EXPIRY"]) + assert s.station_cache_expiry == int(cfg["REDIS_STATION_CACHE_EXPIRY"]) + assert s.hourlies_by_station_code_expiry == int( + cfg["REDIS_HOURLIES_BY_STATION_CODE_CACHE_EXPIRY"] + ) + assert s.dailies_by_station_code_expiry == int( + cfg["REDIS_DAILIES_BY_STATION_CODE_CACHE_EXPIRY"] + ) + assert s.use_cache is True + + +@pytest.mark.anyio +async def test_create_wfwx_api_defaults_and_use_cache_false(monkeypatch, aiohttp_session): + """ + When expiry keys are missing and REDIS_USE != 'True', defaults apply and use_cache=False. + """ + cfg = { + "WFWX_BASE_URL": "https://api.example.com/wfwx", + "WFWX_AUTH_URL": "https://auth.example.com/oauth", + "WFWX_USER": "wfwx-user", + "WFWX_SECRET": "super-secret", + "REDIS_USE": "False", + } + + monkeypatch.setattr(factory.config, "get", make_config_get_stub(cfg)) + cache_sentinel = object() + monkeypatch.setattr(factory, "create_redis", lambda: cache_sentinel) + monkeypatch.setattr(factory, "WfwxApi", DummyApi) + + result = factory.create_wfwx_api(session=aiohttp_session) + + assert isinstance(result, DummyApi) + assert result.session is aiohttp_session + assert result.cache is cache_sentinel + + s = result.settings + # Defaults from the factory code: + assert s.auth_cache_expiry == 600 + assert s.station_cache_expiry == 604800 + assert s.hourlies_by_station_code_expiry == 300 + assert s.dailies_by_station_code_expiry == 300 + assert s.use_cache is False + + +@pytest.mark.anyio +async def test_create_wfwx_api_redis_use_truthy_only(monkeypatch, aiohttp_session): + """ + Only the exact string 'True' sets use_cache=True; other values become False. + """ + for value, expected in [ + ("True", True), + ("true", False), + ("1", False), + ("", False), + (None, False), + ]: + cfg = { + "WFWX_BASE_URL": "https://api.example.com/wfwx", + "WFWX_AUTH_URL": "https://auth.example.com/oauth", + "WFWX_USER": "wfwx-user", + "WFWX_SECRET": "super-secret", + "REDIS_USE": value, + } + + monkeypatch.setattr(factory.config, "get", make_config_get_stub(cfg)) + cache_sentinel = object() + monkeypatch.setattr(factory, "create_redis", lambda: cache_sentinel) + monkeypatch.setattr(factory, "WfwxApi", DummyApi) + + result = factory.create_wfwx_api(session=aiohttp_session) + + assert isinstance(result, DummyApi) + assert result.settings.use_cache is expected diff --git a/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_wildfire_one.py b/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_wildfire_one.py deleted file mode 100644 index 3064055eb7..0000000000 --- a/backend/packages/wps-shared/src/wps_shared/tests/wildfire_one/test_wildfire_one.py +++ /dev/null @@ -1,101 +0,0 @@ -"""Unit testing for WFWX API code""" - -import asyncio -from unittest.mock import patch, AsyncMock -import pytest -from fastapi import HTTPException -from pytest_mock import MockFixture - -from wps_shared.wildfire_one.query_builders import BuildQueryAllForecastsByAfterStart, BuildQueryAllHourliesByRange, BuildQueryDailiesByStationCode, BuildQueryStationGroups -from wps_shared.wildfire_one.wfwx_api import WFWXWeatherStation, get_wfwx_stations_from_station_codes -from wps_shared.wildfire_one.wfwx_post_api import post_forecasts - - -def test_build_all_hourlies_query(): - """Verifies the query builder returns the correct url and parameters""" - query_builder = BuildQueryAllHourliesByRange(0, 1) - result = query_builder.query(0) - assert result == ("https://wf1/wfwx/v1/hourlies/rsql", {"size": "1000", "page": 0, "query": "weatherTimestamp >=0;weatherTimestamp <1"}) - - -def test_build_forecasts_query(): - """Verifies the query builder returns the correct url and parameters""" - query_builder = BuildQueryAllForecastsByAfterStart(0) - result = query_builder.query(0) - assert result == ("https://wf1/wfwx/v1/dailies/rsql", {"size": "1000", "page": 0, "query": "weatherTimestamp >=0;recordType.id == 'FORECAST'"}) - - -def test_build_dailies_by_station_code(): - """Verifies the query builder returns the correct url and parameters for dailies by station code""" - query_builder = BuildQueryDailiesByStationCode(0, 1, ["1", "2"]) - result = query_builder.query(0) - assert result == ( - "https://wf1/wfwx/v1/dailies/search/" + "findDailiesByStationIdIsInAndWeather" + "TimestampBetweenOrderByStationIdAscWeatherTimestampAsc", - {"size": "1000", "page": 0, "startingTimestamp": 0, "endingTimestamp": 1, "stationIds": ["1", "2"]}, - ) - - -def test_build_station_groups_query(): - """Verifies the query builder returns the correct url and parameters for a station groups query""" - query_builder = BuildQueryStationGroups() - result = query_builder.query(0) - assert result == ("https://wf1/wfwx/v1/stationGroups", {"size": "1000", "page": 0, "sort": "groupOwnerUserId,asc"}) - - -code1 = 322 -code2 = 239 -all_station_codes = [{"station_code": code1}, {"station_code": code2}] -station_1 = WFWXWeatherStation(code=code1, name="name", wfwx_id="one", latitude=0, longitude=0, elevation=0, zone_code="T1") -station_2 = WFWXWeatherStation(code=code2, name="name", wfwx_id="two", latitude=0, longitude=0, elevation=0, zone_code="T1") -all_stations = [station_1, station_2] - - -@pytest.fixture() -def mock_responses(mocker: MockFixture): - """Mocks out hourly actuals as async result""" - - async def mock_get_stations(_, __, **___): - """Returns mocked WFWXWeatherStations.""" - return all_stations - - def mock_get_fire_centre_station_codes(__): - """Returns mocked WFWXWeatherStations codes.""" - return all_station_codes - - mocker.patch("wps_shared.db.crud.hfi_calc.get_all_stations", mock_get_fire_centre_station_codes) - mocker.patch("wps_shared.wildfire_one.wfwx_api.get_station_data", mock_get_stations) - - -def test_get_ids_from_station_codes_no_stations(mock_responses): - """Verifies the query builder returns the correct url and parameters for dailies by station code""" - - async def run_test(): - """Async function to run test and assert result""" - result = await get_wfwx_stations_from_station_codes(None, {}, None) - assert len(result) == 2 - - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - loop.run_until_complete(run_test()) - - -def test_get_ids_from_station_codes(mock_responses): - """Verifies the query builder returns the correct url and parameters for dailies by station code""" - - async def run_test(): - """Async function to run test and assert result""" - result = await get_wfwx_stations_from_station_codes(None, {}, [code1]) - assert result == [station_1] - - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - loop.run_until_complete(run_test()) - - -@pytest.mark.anyio -@patch("wps_shared.wildfire_one.wfwx_post_api.ClientSession") -async def test_wf1_post_failure(mock_client): - """Verifies that posting to WF1 raises an exception upon failure""" - mock_client.post.return_value.__aenter__.return_value = AsyncMock(status=400) - with pytest.raises(HTTPException): - await post_forecasts(mock_client, []) diff --git a/backend/packages/wps-shared/src/wps_shared/weather_models/fetch/__init__.py b/backend/packages/wps-shared/src/wps_shared/weather_models/fetch/__init__.py index c310761449..ec104f25a1 100644 --- a/backend/packages/wps-shared/src/wps_shared/weather_models/fetch/__init__.py +++ b/backend/packages/wps-shared/src/wps_shared/weather_models/fetch/__init__.py @@ -3,7 +3,7 @@ import logging from typing import List from shapely.geometry import Point, Polygon -from wps_shared.schemas.stations import WeatherStation +from wps_wf1.models import WeatherStation logger = logging.getLogger(__name__) diff --git a/backend/packages/wps-shared/src/wps_shared/weather_models/fetch/predictions.py b/backend/packages/wps-shared/src/wps_shared/weather_models/fetch/predictions.py index 4495a218c3..4a374119f5 100644 --- a/backend/packages/wps-shared/src/wps_shared/weather_models/fetch/predictions.py +++ b/backend/packages/wps-shared/src/wps_shared/weather_models/fetch/predictions.py @@ -1,30 +1,32 @@ """Code for fetching data for API.""" -from itertools import groupby -import logging -from typing import List import datetime +import logging +from collections import defaultdict from datetime import time +from itertools import groupby from time import perf_counter -from collections import defaultdict +from typing import List + from sqlalchemy.orm import Session +from wps_wf1.models import WeatherIndeterminate + import wps_shared.db.database -from wps_shared.schemas.morecast_v2 import WeatherIndeterminate +import wps_shared.stations +from wps_shared.db.crud.weather_models import ( + get_latest_station_model_prediction_per_day, + get_latest_station_prediction, + get_station_model_prediction_from_previous_model_run, + get_station_model_predictions, +) +from wps_shared.db.models.weather_models import WeatherStationModelPrediction from wps_shared.schemas.weather_models import ( - WeatherStationModelPredictionValues, + ModelRunPredictions, WeatherModelPredictionValues, WeatherModelRun, - ModelRunPredictions, + WeatherStationModelPredictionValues, WeatherStationModelRunsPredictions, ) -from wps_shared.db.models.weather_models import WeatherStationModelPrediction -from wps_shared.db.crud.weather_models import ( - get_latest_station_model_prediction_per_day, - get_station_model_predictions, - get_station_model_prediction_from_previous_model_run, - get_latest_station_prediction, -) -import wps_shared.stations from wps_shared.utils.time import get_days_from_range, vancouver_tz from wps_shared.weather_models import ModelEnum diff --git a/backend/packages/wps-shared/src/wps_shared/wildfire_one/wfwx_api.py b/backend/packages/wps-shared/src/wps_shared/wildfire_one/wfwx_api.py index f307e5f68b..de765deae8 100644 --- a/backend/packages/wps-shared/src/wps_shared/wildfire_one/wfwx_api.py +++ b/backend/packages/wps-shared/src/wps_shared/wildfire_one/wfwx_api.py @@ -1,401 +1,31 @@ """This module contains methods for retrieving information from the WFWX Fireweather API.""" -import math -from typing import List, Optional, Final, AsyncGenerator -from datetime import datetime import logging -import asyncio -from aiohttp import ClientSession, TCPConnector -from wps_shared import config -from wps_shared.data.ecodivision_seasons import EcodivisionSeasons -from wps_shared.db.crud.hfi_calc import get_fire_centre_station_codes -from wps_shared.db.models.observations import HourlyActual -from wps_shared.db.models.forecasts import NoonForecast -from wps_shared.schemas.morecast_v2 import StationDailyFromWF1 -from wps_shared.schemas.observations import WeatherStationHourlyReadings -from wps_shared.schemas.fba import FireCentre -from wps_shared.schemas.stations import WeatherStation, WeatherVariables -from wps_shared.wildfire_one.schema_parsers import ( - WF1RecordTypeEnum, - WFWXWeatherStation, - fire_center_mapper, - parse_noon_forecast, - parse_station, - parse_hourly_actual, - station_list_mapper, - unique_weather_stations_mapper, - weather_indeterminate_list_mapper, - weather_station_group_mapper, - wfwx_station_list_mapper, - dailies_list_mapper, -) -from wps_shared.wildfire_one.query_builders import ( - BuildQueryAllForecastsByAfterStart, - BuildQueryStations, - BuildQueryAllHourliesByRange, - BuildQueryByStationCode, - BuildQueryDailiesByStationCode, - BuildQueryStationGroups, -) -from wps_shared.wildfire_one.util import is_station_valid -from wps_shared.wildfire_one.wildfire_fetchers import ( - fetch_access_token, - fetch_detailed_geojson_stations, - fetch_paged_response_generator, - fetch_hourlies, - fetch_raw_dailies_for_all_stations, - fetch_stations_by_group_id, -) - -logger = logging.getLogger(__name__) - - -async def get_auth_header(session: ClientSession) -> dict: - """Get WFWX auth header""" - # Fetch access token - token = await fetch_access_token(session) - # Construct the header. - header = {"Authorization": f"Bearer {token['access_token']}"} - return header - - -async def get_no_cache_auth_header(session: ClientSession) -> dict: - """Get WFWX auth header with explicit no caching""" - # Fetch auth header - header = await get_auth_header(session) - # Add the cache control header - header["Cache-Control"] = "no-cache" - return header - - -async def get_stations_by_codes(station_codes: List[int]) -> List[WeatherStation]: - """Get a list of stations by code, from WFWX Fireweather API.""" - logger.info("Using WFWX to retrieve stations by code") - with EcodivisionSeasons(",".join([str(code) for code in station_codes])) as eco_division: - async with ClientSession() as session: - header = await get_auth_header(session) - stations = [] - # 1 week seems a reasonable period to cache stations for. - redis_station_cache_expiry: Final = int(config.get("REDIS_STATION_CACHE_EXPIRY", 604800)) - # Iterate through "raw" station data. - iterator = fetch_paged_response_generator( - session, header, BuildQueryByStationCode(station_codes), "stations", use_cache=True, cache_expiry_seconds=redis_station_cache_expiry - ) - async for raw_station in iterator: - # If the station is valid, add it to our list of stations. - if is_station_valid(raw_station): - stations.append(parse_station(raw_station, eco_division)) - logger.debug("total stations: %d", len(stations)) - return stations - - -async def get_station_data(session: ClientSession, header: dict, mapper=station_list_mapper): - """Get list of stations from WFWX Fireweather API.""" - logger.info("Using WFWX to retrieve station list") - # 1 week seems a reasonable period to cache stations for. - redis_station_cache_expiry: Final = int(config.get("REDIS_STATION_CACHE_EXPIRY", 604800)) - # Iterate through "raw" station data. - raw_stations = fetch_paged_response_generator(session, header, BuildQueryStations(), "stations", use_cache=True, cache_expiry_seconds=redis_station_cache_expiry) - # Map list of stations into desired shape - stations = await mapper(raw_stations) - logger.debug("total stations: %d", len(stations)) - return stations - - -async def get_detailed_stations(time_of_interest: datetime): - """ - We do two things in parallel. - # 1) list of stations - # 2) list of noon values - Once we've collected them all, we merge them into one response - """ - # Limit the number of concurrent connections. - conn = TCPConnector(limit=10) - async with ClientSession(connector=conn) as session: - # Get the authentication header - header = await get_auth_header(session) - # Fetch the daily (noon) values for all the stations - dailies_task = asyncio.create_task(fetch_raw_dailies_for_all_stations(session, header, time_of_interest)) - # Fetch all the stations - stations_task = asyncio.create_task(fetch_detailed_geojson_stations(session, header, BuildQueryStations())) - - # Await completion of concurrent tasks. - dailies = await dailies_task - stations, id_to_code_map = await stations_task - - # Combine dailies and stations - for daily in dailies: - station_id = daily.get("stationId") - station_code = id_to_code_map.get(station_id, None) - if station_code: - station = stations[station_code] - weather_variable = WeatherVariables(temperature=daily.get("temperature"), relative_humidity=daily.get("relativeHumidity")) - record_type = daily.get("recordType").get("id") - if record_type in ["ACTUAL", "MANUAL"]: - station.properties.observations = weather_variable - elif record_type == "FORECAST": - station.properties.forecasts = weather_variable - else: - logger.info("unexpected record type: %s", record_type) - else: - logger.debug("No station found for daily reading (%s)", station_id) - - return list(stations.values()) - - -async def get_hourly_readings( - session: ClientSession, header: dict, station_codes: List[int], start_timestamp: datetime, end_timestamp: datetime, use_cache: bool = False -) -> List[WeatherStationHourlyReadings]: - """Get the hourly readings for the list of station codes provided.""" - # Create a list containing all the tasks to run in parallel. - tasks = [] - # 1 week seems a reasonable period to cache stations for. - redis_station_cache_expiry: Final = int(config.get("REDIS_STATION_CACHE_EXPIRY", 604800)) - # Iterate through "raw" station data. - iterator = fetch_paged_response_generator(session, header, BuildQueryByStationCode(station_codes), "stations", True, redis_station_cache_expiry) - raw_stations = [] - eco_division_key = "" - # not ideal - we iterate through the stations twice. 1'st time to get the list of station codes, - # so that we can do an eco division lookup in redis. - station_codes = set() - async for raw_station in iterator: - raw_stations.append(raw_station) - station_codes.add(raw_station.get("stationCode")) - eco_division_key = ",".join(str(code) for code in station_codes) - with EcodivisionSeasons(eco_division_key) as eco_division: - for raw_station in raw_stations: - task = asyncio.create_task(fetch_hourlies(session, raw_station, header, start_timestamp, end_timestamp, use_cache, eco_division)) - tasks.append(task) - - # Run the tasks concurrently, waiting for them all to complete. - return await asyncio.gather(*tasks) - - -async def get_noon_forecasts_all_stations(session: ClientSession, header: dict, start_timestamp: datetime) -> List[NoonForecast]: - """Get the noon forecasts for all stations.""" - - noon_forecasts: List[NoonForecast] = [] - - # Iterate through "raw" forecast data. - forecasts_iterator = fetch_paged_response_generator(session, header, BuildQueryAllForecastsByAfterStart(math.floor(start_timestamp.timestamp() * 1000)), "dailies") - - forecasts = [] - async for noon_forecast in forecasts_iterator: - forecasts.append(noon_forecast) - - stations: List[WFWXWeatherStation] = await get_station_data(session, header, mapper=wfwx_station_list_mapper) - - station_code_dict = {station.wfwx_id: station.code for station in stations} - - for noon_forecast in forecasts: - try: - station_code = station_code_dict[(noon_forecast["stationId"])] - parsed_noon_forecast = parse_noon_forecast(station_code, noon_forecast) - if parsed_noon_forecast is not None: - noon_forecasts.append(parsed_noon_forecast) - except KeyError as exception: - logger.warning("Missing noon forecast for station code", exc_info=exception) - - return noon_forecasts - - -async def get_hourly_actuals_all_stations(session: ClientSession, header: dict, start_timestamp: datetime, end_timestamp: datetime) -> List[HourlyActual]: - """Get the hourly actuals for all stations.""" - - hourly_actuals: List[HourlyActual] = [] - - # Iterate through "raw" hourlies data. - hourlies_iterator = fetch_paged_response_generator( - session, header, BuildQueryAllHourliesByRange(math.floor(start_timestamp.timestamp() * 1000), math.floor(end_timestamp.timestamp() * 1000)), "hourlies" - ) - - hourlies = [] - async for hourly in hourlies_iterator: - hourlies.append(hourly) - - stations: List[WFWXWeatherStation] = await get_station_data(session, header, mapper=wfwx_station_list_mapper) - - station_code_dict = {station.wfwx_id: station.code for station in stations} - - for hourly in hourlies: - if hourly.get("hourlyMeasurementTypeCode", "").get("id") == "ACTUAL": - try: - station_code = station_code_dict[(hourly["stationId"])] - hourly_actual = parse_hourly_actual(station_code, hourly) - if hourly_actual is not None: - hourly_actuals.append(hourly_actual) - except KeyError as exception: - logger.warning("Missing hourly for station code", exc_info=exception) - return hourly_actuals - - -async def get_daily_actuals_for_stations_between_dates(session: ClientSession, header: dict, start_datetime: datetime, end_datetime: datetime, stations: List[WeatherStation]): - """Get the daily actuals for each station.""" - wfwx_station_ids = [station.wfwx_station_uuid for station in stations] +from aiohttp import ClientSession +from wps_wf1.wfwx_api import WfwxApi +from wps_wf1.wfwx_settings import WfwxSettings - start_timestamp = math.floor(start_datetime.timestamp() * 1000) - end_timestamp = math.floor(end_datetime.timestamp() * 1000) - - cache_expiry_seconds: Final = int(config.get("REDIS_DAILIES_BY_STATION_CODE_CACHE_EXPIRY", 300)) - use_cache = config.get("REDIS_USE") == "True" - - # Iterate through "raw" hourlies data. - dailies_iterator = fetch_paged_response_generator( - session, header, BuildQueryDailiesByStationCode(start_timestamp, end_timestamp, wfwx_station_ids), "dailies", use_cache=use_cache, cache_expiry_seconds=cache_expiry_seconds - ) - - dailies = [] - async for daily in dailies_iterator: - dailies.append(daily) - - return dailies - - -async def get_wfwx_stations_from_station_codes(session: ClientSession, header, station_codes: Optional[List[int]]) -> List[WFWXWeatherStation]: - """Return the WFWX station ids from WFWX API given a list of station codes.""" - - # All WFWX stations are requested because WFWX returns a malformed JSON response when too - # many station codes are added as query parameters. - # IMPORTANT - the two calls below, cannot be made from within the lambda, as they will be - # be called multiple times! - wfwx_stations = await get_station_data(session, header, mapper=wfwx_station_list_mapper) - # TODO: this is not good. Code in wfwx api shouldn't be filtering on stations codes in hfi.... - fire_centre_station_codes = get_fire_centre_station_codes() - - # Default to all known WFWX station ids if no station codes are specified - if station_codes is None: - return list(filter(lambda x: (x.code in fire_centre_station_codes), wfwx_stations)) - requested_stations: List[WFWXWeatherStation] = [] - station_code_dict = {station.code: station for station in wfwx_stations} - for station_code in station_codes: - wfwx_station = station_code_dict.get(station_code) - if wfwx_station is not None: - requested_stations.append(wfwx_station) - else: - logger.error("No WFWX station id for station code: %s", station_code) - - return requested_stations - - -async def get_raw_dailies_in_range_generator(session: ClientSession, header: dict, wfwx_station_ids: List[str], start_timestamp: int, end_timestamp: int) -> AsyncGenerator[dict, None]: - """Get the raw dailies in range for a list of WFWX station ids.""" - return fetch_paged_response_generator(session, header, BuildQueryDailiesByStationCode(start_timestamp, end_timestamp, wfwx_station_ids), "dailies", True, 60) - - -async def get_dailies_generator( - session: ClientSession, - header: dict, - wfwx_stations: List[WFWXWeatherStation], - time_of_interest: datetime, - end_time_of_interest: Optional[datetime], - check_cache: bool = True, -) -> List[dict]: - """Get the daily actuals/forecasts for the given station ids.""" - # build a list of wfwx station id's - wfwx_station_ids = [wfwx_station.wfwx_id for wfwx_station in wfwx_stations] - - timestamp_of_interest = math.floor(time_of_interest.timestamp() * 1000) - if end_time_of_interest is not None: - end_timestamp_of_interest = math.floor(end_time_of_interest.timestamp() * 1000) - else: - end_timestamp_of_interest = timestamp_of_interest - - # for local dev, we can use redis to reduce load in prod, and generally just makes development faster. - # for production, it's more tricky - we don't want to put too much load on the wf1 api, but we don't - # want stale values either. We default to 5 minutes, or 300 seconds. - cache_expiry_seconds: Final = int(config.get("REDIS_DAILIES_BY_STATION_CODE_CACHE_EXPIRY", 300)) - use_cache = check_cache is True and config.get("REDIS_USE") == "True" - logger.info(f"Using cache: {use_cache}") - - dailies_iterator = fetch_paged_response_generator( - session, - header, - BuildQueryDailiesByStationCode(timestamp_of_interest, end_timestamp_of_interest, wfwx_station_ids), - "dailies", - use_cache=use_cache, - cache_expiry_seconds=cache_expiry_seconds, - ) - - return dailies_iterator - - -async def get_fire_centers( - session: ClientSession, - header: dict, -) -> List[FireCentre]: - """Get the fire centers from WFWX.""" - wfwx_fire_centers = await get_station_data(session, header, mapper=fire_center_mapper) - return list(wfwx_fire_centers.values()) - - -async def get_dailies_for_stations_and_date( - session: ClientSession, header: dict, start_time_of_interest: datetime, end_time_of_interest: datetime, unique_station_codes: List[int], mapper=dailies_list_mapper -): - # get station information from the wfwx api - wfwx_stations = await get_wfwx_stations_from_station_codes(session, header, unique_station_codes) - # get the dailies for all the stations - raw_dailies = await get_dailies_generator(session, header, wfwx_stations, start_time_of_interest, end_time_of_interest) - - yesterday_dailies = await mapper(raw_dailies, WF1RecordTypeEnum.ACTUAL) - - return yesterday_dailies - - -async def get_forecasts_for_stations_by_date_range( - session: ClientSession, header: dict, start_time_of_interest: datetime, end_time_of_interest: datetime, unique_station_codes: List[int], check_cache=True, mapper=dailies_list_mapper -) -> List[StationDailyFromWF1]: - # get station information from the wfwx api - wfwx_stations = await get_wfwx_stations_from_station_codes(session, header, unique_station_codes) - # get the daily forecasts for all the stations in the date range - raw_dailies = await get_dailies_generator( - session=session, header=header, wfwx_stations=wfwx_stations, time_of_interest=start_time_of_interest, end_time_of_interest=end_time_of_interest, check_cache=check_cache - ) - - forecast_dailies = await mapper(raw_dailies, WF1RecordTypeEnum.FORECAST) - - return forecast_dailies +from wps_shared import config +from wps_shared.utils.redis import create_redis +logger = logging.getLogger(__name__) -async def get_daily_determinates_for_stations_and_date( - session: ClientSession, - header: dict, - start_time_of_interest: datetime, - end_time_of_interest: datetime, - unique_station_codes: List[int], - mapper=weather_indeterminate_list_mapper, - check_cache: bool = True, -): - # get station information from the wfwx api - wfwx_stations = await get_wfwx_stations_from_station_codes(session, header, unique_station_codes) - # get the dailies for all the stations - raw_dailies = await get_dailies_generator( - session, header, wfwx_stations, start_time_of_interest, end_time_of_interest, check_cache +def create_wfwx_api(session: ClientSession) -> WfwxApi: + wfwx_settings = WfwxSettings( + base_url=config.get("WFWX_BASE_URL"), + auth_url=config.get("WFWX_AUTH_URL"), + user=config.get("WFWX_USER"), + secret=config.get("WFWX_SECRET"), + auth_cache_expiry=int(config.get("REDIS_AUTH_CACHE_EXPIRY", 600)), + station_cache_expiry=int(config.get("REDIS_STATION_CACHE_EXPIRY", 604800)), + hourlies_by_station_code_expiry=int( + config.get("REDIS_HOURLIES_BY_STATION_CODE_CACHE_EXPIRY", 300) + ), + dailies_by_station_code_expiry=int( + config.get("REDIS_DAILIES_BY_STATION_CODE_CACHE_EXPIRY", 300) + ), + use_cache=config.get("REDIS_USE") == "True", ) - - weather_determinates_actuals, weather_determinates_forecasts = await mapper(raw_dailies) - - return weather_determinates_actuals, weather_determinates_forecasts - - -async def get_station_groups(mapper=weather_station_group_mapper): - """Get the station groups created by all users from Wild Fire One internal API.""" - async with ClientSession() as session: - header = await get_auth_header(session) - all_station_groups = fetch_paged_response_generator(session, header, BuildQueryStationGroups(), "stationGroups", use_cache=False) - # Map list of stations into desired shape - mapped_station_groups = await mapper(all_station_groups) - logger.debug("total station groups: %d", len(mapped_station_groups)) - return mapped_station_groups - - -async def get_stations_by_group_ids(group_ids: List[str], mapper=unique_weather_stations_mapper): - """Get all the stations in the specified group from the Wild Fire One internal API.""" - stations_in_groups = [] - async with ClientSession() as session: - headers = await get_auth_header(session) - for group_id in group_ids: - stations = await fetch_stations_by_group_id(session, headers, group_id) - stations_in_group = mapper(stations) - stations_in_groups.extend(stations_in_group) - return stations_in_groups + wfwx_api = WfwxApi(session=session, wfwx_settings=wfwx_settings, cache=create_redis()) + return wfwx_api diff --git a/backend/packages/wps-shared/src/wps_shared/wildfire_one/wfwx_post_api.py b/backend/packages/wps-shared/src/wps_shared/wildfire_one/wfwx_post_api.py deleted file mode 100644 index 1aa330d6dc..0000000000 --- a/backend/packages/wps-shared/src/wps_shared/wildfire_one/wfwx_post_api.py +++ /dev/null @@ -1,36 +0,0 @@ -"""This module contains methods for submitting information to the WFWX Fireweather API.""" - -import logging -from typing import List -from aiohttp import ClientSession -from fastapi import status, HTTPException -from wps_shared import config -from wps_shared.schemas.morecast_v2 import WF1PostForecast -from wps_shared.wildfire_one.wfwx_api import get_auth_header - -logger = logging.getLogger(__name__) - -WF1_FORECAST_POST_URL = f"{config.get('WFWX_BASE_URL')}/v1/dailies/daily-bulk" -WF1_HTTP_ERROR = HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=""" - Error submitting forecasts to WF1, please retry. - All your forecast inputs have been saved as a draft on your browser and can be submitted at a later time. - If the problem persists, use the following link to verify the status of the WF1 service: https://wfapps.nrs.gov.bc.ca/pub/wfwx-fireweather-web/stations - """, -) - - -async def post_forecasts(session: ClientSession, forecasts: List[WF1PostForecast]): - logger.info("Using WFWX to post/put forecasts") - headers = await get_auth_header(session) - - forecasts_json = [forecast.model_dump() for forecast in forecasts] - - async with session.post(WF1_FORECAST_POST_URL, json=forecasts_json, headers=headers) as response: - response_json = await response.json() - if response.status == status.HTTP_201_CREATED or response.status == status.HTTP_200_OK: - logger.info("submitted forecasts to wf1 %s.", response_json) - else: - logger.error(f"error submitting forecasts to wf1 {response_json}") - raise WF1_HTTP_ERROR diff --git a/backend/packages/wps-shared/src/wps_shared/wildfire_one/wildfire_fetchers.py b/backend/packages/wps-shared/src/wps_shared/wildfire_one/wildfire_fetchers.py deleted file mode 100644 index 3b7dd6ec8d..0000000000 --- a/backend/packages/wps-shared/src/wps_shared/wildfire_one/wildfire_fetchers.py +++ /dev/null @@ -1,238 +0,0 @@ -"""Functions that request and marshall WFWX API responses into our schemas""" - -import math -import logging -from datetime import datetime -from typing import AsyncGenerator, Dict, Tuple, Final -import json -from urllib.parse import urlencode -from aiohttp.client import ClientSession, BasicAuth -from wps_shared.data.ecodivision_seasons import EcodivisionSeasons -from wps_shared.rocketchat_notifications import send_rocketchat_notification -from wps_shared.schemas.observations import WeatherStationHourlyReadings -from wps_shared.schemas.stations import DetailedWeatherStationProperties, GeoJsonDetailedWeatherStation, WeatherStationGeometry -from wps_shared.db.crud.stations import _get_noon_date -from wps_shared.wildfire_one.query_builders import BuildQuery -from wps_shared import config -from wps_shared.wildfire_one.schema_parsers import parse_hourly, parse_station -from wps_shared.wildfire_one.util import is_station_valid -from wps_shared.utils.redis import create_redis - -logger = logging.getLogger(__name__) - - -async def _fetch_cached_response(session: ClientSession, headers: dict, url: str, params: dict, cache_expiry_seconds: int): - cache = create_redis() - key = f"{url}?{urlencode(params)}" - try: - cached_json = cache.get(key) - except Exception as error: - cached_json = None - logger.error(error, exc_info=error) - if cached_json: - logger.info("redis cache hit %s", key) - response_json = json.loads(cached_json.decode()) - else: - logger.info("redis cache miss %s", key) - async with session.get(url, headers=headers, params=params) as response: - try: - response_json = await response.json() - except json.decoder.JSONDecodeError as error: - logger.error(error, exc_info=error) - text = await response.text() - logger.error("response.text() = %s", text) - send_rocketchat_notification(f"JSONDecodeError, response.text() = {text}", error) - raise - try: - if response.status == 200: - cache.set(key, json.dumps(response_json).encode(), ex=cache_expiry_seconds) - except Exception as error: - logger.error(error, exc_info=error) - return response_json - - -async def fetch_paged_response_generator( - session: ClientSession, headers: dict, query_builder: BuildQuery, content_key: str, use_cache: bool = False, cache_expiry_seconds: int = 86400 -) -> AsyncGenerator[dict, None]: - """Asynchronous generator for iterating through responses from the API. - The response is a paged response, but this generator abstracts that away. - """ - # We don't know how many pages until our first call - so we assume one page to start with. - total_pages = 1 - page_count = 0 - while page_count < total_pages: - # Build up the request URL. - url, params = query_builder.query(page_count) - logger.debug("loading page %d...", page_count) - if use_cache and config.get("REDIS_USE") == "True": - logger.info("Using cache") - # We've been told and configured to use the redis cache. - response_json = await _fetch_cached_response(session, headers, url, params, cache_expiry_seconds) - else: - logger.info("Not using cache") - async with session.get(url, headers=headers, params=params) as response: - response_json = await response.json() - logger.debug("done loading page %d.", page_count) - - # keep this code around for dumping responses to a json file - useful for when you're writing - # tests to grab actual responses to use in fixtures. - # import base64 - # TODO: write a beter way to make a temporary filename - # fname = 'thing_{}_{}.json'.format(base64.urlsafe_b64encode(url.encode()), random.randint(0, 1000)) - # with open(fname, 'w') as f: - # json.dump(response_json, f) - - # Update the total page count. - total_pages = response_json["page"]["totalPages"] if "page" in response_json else 1 - for response_object in response_json["_embedded"][content_key]: - yield response_object - # Keep track of our page count. - page_count = page_count + 1 - - -async def fetch_detailed_geojson_stations(session: ClientSession, headers: dict, query_builder: BuildQuery) -> Tuple[Dict[int, GeoJsonDetailedWeatherStation], Dict[str, int]]: - """Fetch and marshall geojson station data""" - stations = {} - id_to_code_map = {} - # 1 week seems a reasonable period to cache stations for. - redis_station_cache_expiry: Final = int(config.get("REDIS_STATION_CACHE_EXPIRY", 604800)) - # Put the stations in a nice dictionary. - async for raw_station in fetch_paged_response_generator(session, headers, query_builder, "stations", True, redis_station_cache_expiry): - station_code = raw_station.get("stationCode") - station_status = raw_station.get("stationStatus", {}).get("id") - # Because we can't filter on status in the RSQL, we have to manually exclude stations that are - # not active. - if is_station_valid(raw_station): - id_to_code_map[raw_station.get("id")] = station_code - geojson_station = GeoJsonDetailedWeatherStation( - properties=DetailedWeatherStationProperties(code=station_code, name=raw_station.get("displayLabel")), - geometry=WeatherStationGeometry(coordinates=[raw_station.get("longitude"), raw_station.get("latitude")]), - ) - stations[station_code] = geojson_station - else: - logger.debug("station %s, status %s", station_code, station_status) - - return stations, id_to_code_map - - -async def fetch_raw_dailies_for_all_stations(session: ClientSession, headers: dict, time_of_interest: datetime) -> list: - """Fetch the noon values(observations and forecasts) for a given time, for all weather stations.""" - # We don't know how many pages until our first call - so we assume one page to start with. - total_pages = 1 - page_count = 0 - hourlies = [] - while page_count < total_pages: - # Build up the request URL. - url, params = prepare_fetch_dailies_for_all_stations_query(time_of_interest, page_count) - # Get dailies - async with session.get(url, params=params, headers=headers) as response: - dailies_json = await response.json() - total_pages = dailies_json["page"]["totalPages"] - hourlies.extend(dailies_json["_embedded"]["dailies"]) - page_count = page_count + 1 - return hourlies - - -def prepare_fetch_hourlies_query(raw_station: dict, start_timestamp: datetime, end_timestamp: datetime): - """Prepare url and params to fetch hourly readings from the WFWX Fireweather API.""" - base_url = config.get("WFWX_BASE_URL") - - logger.debug("requesting historic data from %s to %s", start_timestamp, end_timestamp) - - # Prepare query params and query: - query_start_timestamp = math.floor(start_timestamp.timestamp() * 1000) - query_end_timestamp = math.floor(end_timestamp.timestamp() * 1000) - - station_id = raw_station["id"] - params = {"startTimestamp": query_start_timestamp, "endTimestamp": query_end_timestamp, "stationId": station_id} - endpoint = "/v1/hourlies/search/findHourliesByWeatherTimestampBetweenAndStationIdEqualsOrderByWeatherTimestampAsc" - url = f"{base_url}{endpoint}" - - return url, params - - -def prepare_fetch_dailies_for_all_stations_query(time_of_interest: datetime, page_count: int): - """Prepare url and params for fetching dailies(that's forecast and observations for noon) for all. - stations.""" - base_url = config.get("WFWX_BASE_URL") - noon_date = _get_noon_date(time_of_interest) - timestamp = int(noon_date.timestamp() * 1000) - # one could filter on recordType.id==FORECAST or recordType.id==ACTUAL but we want it all. - params = {"query": f"weatherTimestamp=={timestamp}", "page": page_count, "size": config.get("WFWX_MAX_PAGE_SIZE", 1000)} - endpoint = "/v1/dailies/rsql" - url = f"{base_url}{endpoint}" - logger.info("%s %s", url, params) - return url, params - - -async def fetch_hourlies( - session: ClientSession, raw_station: dict, headers: dict, start_timestamp: datetime, end_timestamp: datetime, use_cache: bool, eco_division: EcodivisionSeasons -) -> WeatherStationHourlyReadings: - """Fetch hourly weather readings for the specified time range for a give station""" - logger.debug("fetching hourlies for %s(%s)", raw_station["displayLabel"], raw_station["stationCode"]) - - url, params = prepare_fetch_hourlies_query(raw_station, start_timestamp, end_timestamp) - - cache_expiry_seconds: Final = int(config.get("REDIS_HOURLIES_BY_STATION_CODE_CACHE_EXPIRY", 300)) - - # Get hourlies - if use_cache and config.get("REDIS_USE") == "True": - hourlies_json = await _fetch_cached_response(session, headers, url, params, cache_expiry_seconds) - else: - async with session.get(url, params=params, headers=headers) as response: - hourlies_json = await response.json() - - hourlies = [] - for hourly in hourlies_json["_embedded"]["hourlies"]: - # We only accept "ACTUAL" values - if hourly.get("hourlyMeasurementTypeCode", "").get("id") == "ACTUAL": - hourlies.append(parse_hourly(hourly)) - - logger.debug("fetched %d hourlies for %s(%s)", len(hourlies), raw_station["displayLabel"], raw_station["stationCode"]) - - return WeatherStationHourlyReadings(values=hourlies, station=parse_station(raw_station, eco_division)) - - -async def fetch_access_token(session: ClientSession) -> dict: - """Fetch an access token for WFWX Fireweather API""" - logger.debug("fetching access token...") - password = config.get("WFWX_SECRET") - user = config.get("WFWX_USER") - auth_url = config.get("WFWX_AUTH_URL") - cache = create_redis() - # NOTE: Consider using a hashed version of the password as part of the key. - params = {"user": user} - key = f"{auth_url}?{urlencode(params)}" - try: - cached_json = cache.get(key) - except Exception as error: - cached_json = None - logger.error(error, exc_info=error) - if cached_json: - logger.info("redis cache hit %s", auth_url) - response_json = json.loads(cached_json.decode()) - else: - logger.info("redis cache miss %s", auth_url) - async with session.get(auth_url, auth=BasicAuth(login=user, password=password)) as response: - response_json = await response.json() - try: - if response.status == 200: - # We expire when the token expires, or 10 minutes, whichever is less. - # NOTE: only caching for 10 minutes right now, since we aren't handling cases - # where the token is invalidated. - redis_auth_cache_expiry: Final = int(config.get("REDIS_AUTH_CACHE_EXPIRY", 600)) - expires = min(response_json["expires_in"], redis_auth_cache_expiry) - cache.set(key, json.dumps(response_json).encode(), ex=expires) - except Exception as error: - logger.error(error, exc_info=error) - return response_json - - -async def fetch_stations_by_group_id(session: ClientSession, headers: dict, group_id: str): - logger.debug(f"Fetching stations for group {group_id}") - base_url = config.get("WFWX_BASE_URL") - url = f"{base_url}/v1/stationGroups/{group_id}/members" - - async with session.get(url, headers=headers) as response: - raw_stations = await response.json() - return raw_stations diff --git a/backend/packages/wps-wf1/README.md b/backend/packages/wps-wf1/README.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/packages/wps-wf1/pyproject.toml b/backend/packages/wps-wf1/pyproject.toml new file mode 100644 index 0000000000..049e41aef6 --- /dev/null +++ b/backend/packages/wps-wf1/pyproject.toml @@ -0,0 +1,18 @@ +[project] +name = "wps-wf1" +version = "0.1.0" +description = "Wildfire Predictive Services Unit WF1 utils" +authors = [ + { name = "Darren Boss", email = "darren.boss@gov.bc.ca" } +] +requires-python = ">=3.12.3,<4.0" +dependencies = [ + "aiohttp>=3.13.2", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["src/wps_wf1"] diff --git a/backend/packages/wps-wf1/src/wps_wf1/__init__.py b/backend/packages/wps-wf1/src/wps_wf1/__init__.py new file mode 100644 index 0000000000..92d06689e1 --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/__init__.py @@ -0,0 +1,12 @@ + +"""Thin WFWX API client with optional caching. +""" +from wps_wf1.wfwx_settings import WfwxSettings +from wps_wf1.wfwx_client import WfwxClient +from wps_wf1.cache_protocol import CacheProtocol + +__all__ = [ + 'WfwxSettings', + 'WfwxClient', + 'CacheProtocol', +] diff --git a/backend/packages/wps-wf1/src/wps_wf1/cache_protocol.py b/backend/packages/wps-wf1/src/wps_wf1/cache_protocol.py new file mode 100644 index 0000000000..70b17f8cad --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/cache_protocol.py @@ -0,0 +1,11 @@ +from typing import Optional, Protocol + + +class CacheProtocol(Protocol): + """ + Interface for cache implementation in wps-wf1 package to support dependency injection + """ + + def get(self, key: str) -> Optional[bytes]: ... + + def set(self, key: str, value: bytes, ex: int) -> None: ... diff --git a/backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.cpg b/backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.cpg similarity index 100% rename from backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.cpg rename to backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.cpg diff --git a/backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.dbf b/backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.dbf similarity index 100% rename from backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.dbf rename to backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.dbf diff --git a/backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.prj b/backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.prj similarity index 100% rename from backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.prj rename to backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.prj diff --git a/backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.shp b/backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.shp similarity index 100% rename from backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.shp rename to backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.shp diff --git a/backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.shx b/backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.shx similarity index 100% rename from backend/packages/wps-shared/src/wps_shared/data/ERC_ECODIV_polygon/ERC_ECODIV_polygon.shx rename to backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ERC_ECODIV_polygon/ERC_ECODIV_polygon.shx diff --git a/backend/packages/wps-shared/src/wps_shared/data/ecodivision_seasons.py b/backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ecodivision_seasons.py similarity index 97% rename from backend/packages/wps-shared/src/wps_shared/data/ecodivision_seasons.py rename to backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ecodivision_seasons.py index b92c33aba2..4cb85e2eb6 100644 --- a/backend/packages/wps-shared/src/wps_shared/data/ecodivision_seasons.py +++ b/backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ecodivision_seasons.py @@ -5,6 +5,7 @@ import logging import geopandas from shapely.geometry import Point +from wps_wf1.cache_protocol import CacheProtocol from wps_shared.utils.redis import create_redis @@ -26,7 +27,7 @@ class EcodivisionSeasons: stations. """ - def __init__(self, cache_key: str): + def __init__(self, cache_key: str, cache: CacheProtocol): """ The cache key would typically be a list of stations. """ with open(core_season_file_path, encoding="utf-8") as file_handle: @@ -35,13 +36,12 @@ def __init__(self, cache_key: str): self.ecodivisions = geopandas.read_file(ecodiv_shape_file_path) self.name_lookup: Dict[str, str] = {} self.cache_key = cache_key - self.cache = None + self.cache = cache self.update_cache_on_exit = False def __enter__(self): if self.cache_key: self.cache_key = f'ecodivision_names:{self.cache_key}' - self.cache = create_redis() try: self.name_lookup = self.cache.get(self.cache_key) if self.name_lookup is None: diff --git a/backend/packages/wps-shared/src/wps_shared/data/ecodivisions_core_seasons.json b/backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ecodivisions_core_seasons.json similarity index 100% rename from backend/packages/wps-shared/src/wps_shared/data/ecodivisions_core_seasons.json rename to backend/packages/wps-wf1/src/wps_wf1/ecodivisions/ecodivisions_core_seasons.json diff --git a/backend/packages/wps-wf1/src/wps_wf1/models.py b/backend/packages/wps-wf1/src/wps_wf1/models.py new file mode 100644 index 0000000000..09178f63bb --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/models.py @@ -0,0 +1,306 @@ +"""This module contains pydandict schemas relating to weather stations for the API.""" + +from datetime import datetime, timezone +from enum import Enum +from typing import List, Optional + +from pydantic import BaseModel, ConfigDict, Field + + +class FireZone(BaseModel): + id: int + display_label: str + fire_centre: str + + +class StationFireCentre(BaseModel): + """The fire centre associated with a station""" + + id: int + display_label: str + + +class Season(BaseModel): + """A fire season consists of a start date (month and day) and an end date (month and day).""" + + start_month: int + start_day: int + end_month: int + end_day: int + + +class WeatherStationProperties(BaseModel): + """Non-geometrical weather station properties""" + + code: int + name: str + ecodivision_name: Optional[str] = None + core_season: Optional[Season] = None + + +class WeatherVariables(BaseModel): + """Weather variables""" + + temperature: Optional[float] = None + relative_humidity: Optional[float] = None + + +class DetailedWeatherStationProperties(WeatherStationProperties): + """Detailed, non-geometrical weather station properties""" + + observations: Optional[WeatherVariables] = None + forecasts: Optional[WeatherVariables] = None + + +class WeatherStationGeometry(BaseModel): + """Geometrical coordinates of a weather station""" + + type: str = "Point" + coordinates: List[float] + + +class GeoJsonDetailedWeatherStation(BaseModel): + """GeoJson formatted weather station with details""" + + type: str = "Feature" + properties: DetailedWeatherStationProperties + geometry: WeatherStationGeometry + + +class NoonForecast(BaseModel): + """Class representing noon forecasts.""" + + weather_date: datetime + station_code: int + temp_valid: bool + temperature: float + rh_valid: bool + relative_humidity: float + wdir_valid: bool + wind_direction: float + wspeed_valid: bool + wind_speed: float + precip_valid: bool + precipitation: float + gc: float + ffmc: float + dmc: float + dc: float + isi: float + bui: float + fwi: float + created_at: datetime + wfwx_update_date: datetime + + +class WeatherStation(BaseModel): + """A fire weather station has a code, name and geographical coordinate.""" + + zone_code: Optional[str] = None + code: int + name: str + lat: float + long: float + ecodivision_name: Optional[str] = None + core_season: Optional[Season] = None + elevation: Optional[int] = None + wfwx_station_uuid: Optional[str] = None + + +class StationCodeList(BaseModel): + """List of station codes.""" + + stations: List[int] + + +class WeatherStationGroupMember(BaseModel): + """Description of a station in a group""" + + id: str + display_label: str + fire_centre: StationFireCentre + fire_zone: Optional[FireZone] = None + station_code: int + station_status: str + + +class WeatherStationGroup(BaseModel): + """A weather station group from WF1""" + + display_label: str + group_description: Optional[str] = None + group_owner_user_guid: str + group_owner_user_id: str + id: str + + +class WeatherReading(BaseModel): + """Weather reading for a particular point in time""" + + datetime: Optional[datetime] + temperature: Optional[float] = None + relative_humidity: Optional[float] = None + wind_speed: Optional[float] = None + wind_direction: Optional[float] = None + barometric_pressure: Optional[float] = None + precipitation: Optional[float] = None + dewpoint: Optional[float] = None + ffmc: Optional[float] = None + isi: Optional[float] = None + fwi: Optional[float] = None + observation_valid: Optional[bool] = None + observation_valid_comment: Optional[str] = None + + +class WeatherStationHourlyReadings(BaseModel): + """The weather readings for a particular station""" + + values: List[WeatherReading] + station: WeatherStation + + +class WFWXWeatherStation(BaseModel): + """A WFWX station includes a code and WFWX API-specific ID""" + + model_config = ConfigDict( + populate_by_name=True, frozen=True + ) # allows populating by alias name, and frozen makes it hashable for collections + + wfwx_id: str + code: int + name: str + lat: float = Field(alias="latitude") + long: float = Field(alias="longitude") + elevation: int + zone_code: Optional[str] + + +class HourlyActual(BaseModel): + """Class representing table structure of 'hourly_actuals.'""" + + weather_date: datetime + station_code: int + temp_valid: Optional[bool] = False + temperature: Optional[float] = None + dewpoint: Optional[float] = None + rh_valid: Optional[bool] = False + relative_humidity: Optional[float] + wdir_valid: Optional[bool] = False + wind_direction: Optional[float] + wspeed_valid: Optional[bool] = False + wind_speed: Optional[float] + precip_valid: Optional[bool] = False + precipitation: Optional[float] + ffmc: Optional[float] + isi: Optional[float] + fwi: Optional[float] + created_at: Optional[datetime] = datetime.now(tz=timezone.utc) + + +class FireCenterStation(BaseModel): + """A fire weather station has a code, name and geographical coordinate.""" + + code: int + name: str + zone: Optional[str] = None + + +class FireCentre(BaseModel): + """The highest-level organizational unit for wildfire planning. Each fire centre + has 1 or more planning areas within it.""" + + id: str + name: str + stations: List[FireCenterStation] + + +class StationDailyFromWF1(BaseModel): + """Daily weather data (forecast or observed) for a specific station and date retrieved from WF1 API""" + + created_by: str + forecast_id: str + station_code: int + station_name: str + utcTimestamp: datetime + temperature: Optional[float] = None + relative_humidity: Optional[float] = None + precipitation: Optional[float] = None + wind_direction: Optional[float] = None + wind_speed: Optional[float] = None + + +class WeatherDeterminate(str, Enum): + """Enumerator for all valid determinate weather sources""" + + GDPS = "GDPS" + GDPS_BIAS = "GDPS_BIAS" + GFS = "GFS" + GFS_BIAS = "GFS_BIAS" + HRDPS = "HRDPS" + HRDPS_BIAS = "HRDPS_BIAS" + NAM = "NAM" + NAM_BIAS = "NAM_BIAS" + RDPS = "RDPS" + RDPS_BIAS = "RDPS_BIAS" + GRASS_CURING_CWFIS = "Grass_Curing_CWFIS" + ECMWF = "ECMWF" + + # non prediction models + FORECAST = "Forecast" + ACTUAL = "Actual" + + @classmethod + def from_string(cls, value: str) -> "WeatherDeterminate": + try: + return cls(value) + except ValueError: + raise ValueError(f"{value!r} is not a valid WeatherDeterminate") + + +class WeatherIndeterminate(BaseModel): + """Used to represent a predicted or actual value""" + + station_code: int + station_name: str + determinate: WeatherDeterminate + utc_timestamp: datetime + latitude: Optional[float] = None + longitude: Optional[float] = None + temperature: Optional[float] = None + relative_humidity: Optional[float] = None + precipitation: Optional[float] = None + wind_direction: Optional[float] = None + wind_speed: Optional[float] = None + fine_fuel_moisture_code: Optional[float] = None + duff_moisture_code: Optional[float] = None + drought_code: Optional[float] = None + initial_spread_index: Optional[float] = None + build_up_index: Optional[float] = None + fire_weather_index: Optional[float] = None + danger_rating: Optional[int] = None + grass_curing: Optional[float] = None + update_date: Optional[datetime] = None + prediction_run_timestamp: Optional[datetime] = None + + +class WF1ForecastRecordType(BaseModel): + id: str = "FORECAST" + displayLabel: str = "Forecast" + + +class WF1PostForecast(BaseModel): + """Used to represent a forecast to be POSTed to WF1""" + + archive: str = "false" + createdBy: Optional[str] = None + id: Optional[str] = None + station: str # station URL + stationId: str # station UUID + weatherTimestamp: int # UTC timestamp in millis + temperature: float + relativeHumidity: float + precipitation: float + windSpeed: float + windDirection: Optional[float] = None + grasslandCuring: Optional[float] = None + recordType: WF1ForecastRecordType diff --git a/backend/packages/wps-shared/src/wps_shared/wildfire_one/schema_parsers.py b/backend/packages/wps-wf1/src/wps_wf1/parsers.py similarity index 70% rename from backend/packages/wps-shared/src/wps_shared/wildfire_one/schema_parsers.py rename to backend/packages/wps-wf1/src/wps_wf1/parsers.py index ca5b21e8c6..5f9c5469b3 100644 --- a/backend/packages/wps-shared/src/wps_shared/wildfire_one/schema_parsers.py +++ b/backend/packages/wps-wf1/src/wps_wf1/parsers.py @@ -1,23 +1,33 @@ -"""Parsers that extract fields from WFWX API responses and build ours""" - -import math import enum import logging +import math from datetime import datetime, timezone -from typing import Generator, List, Optional - -from pydantic import BaseModel, ConfigDict, Field -from wps_shared.db.models.observations import HourlyActual -from wps_shared.schemas.morecast_v2 import MoreCastForecastOutput, StationDailyFromWF1, WeatherDeterminate, WeatherIndeterminate -from wps_shared.schemas.stations import WeatherStationGroup, WeatherStation, WeatherStationGroupMember, FireZone, StationFireCentre -from wps_shared.utils.dewpoint import compute_dewpoint -from wps_shared.data.ecodivision_seasons import EcodivisionSeasons -from wps_shared.schemas.observations import WeatherReading -from wps_shared.db.models.forecasts import NoonForecast -from wps_shared.utils.time import get_utc_now -from wps_shared.wildfire_one.util import is_station_valid, is_station_fire_zone_valid, get_zone_code_prefix -from wps_shared.wildfire_one.validation import get_valid_flags -from wps_shared.schemas.fba import FireCentre, FireCenterStation +from typing import Generator, List + +from wps_wf1.ecodivisions.ecodivision_seasons import EcodivisionSeasons +from wps_wf1.models import ( + FireCenterStation, + FireCentre, + FireZone, + HourlyActual, + NoonForecast, + StationDailyFromWF1, + StationFireCentre, + WeatherDeterminate, + WeatherIndeterminate, + WeatherReading, + WeatherStation, + WeatherStationGroup, + WeatherStationGroupMember, + WFWXWeatherStation, +) +from wps_wf1.util import ( + compute_dewpoint, + get_zone_code_prefix, + is_station_fire_zone_valid, + is_station_valid, +) +from wps_wf1.validation import get_valid_flags logger = logging.getLogger(__name__) @@ -28,161 +38,6 @@ class WF1RecordTypeEnum(enum.Enum): MANUAL = "MANUAL" -class WFWXWeatherStation(BaseModel): - """A WFWX station includes a code and WFWX API-specific ID""" - - model_config = ConfigDict(populate_by_name=True, frozen=True) # allows populating by alias name, and frozen makes it hashable for collections - - wfwx_id: str - code: int - name: str - lat: float = Field(alias="latitude") - long: float = Field(alias="longitude") - elevation: int - zone_code: Optional[str] - - -async def station_list_mapper(raw_stations: Generator[dict, None, None]): - """Maps raw stations to WeatherStation list""" - stations = [] - # Iterate through "raw" station data. - async for raw_station in raw_stations: - # If the station is valid, add it to our list of stations. - if is_station_valid(raw_station): - stations.append(WeatherStation(code=raw_station["stationCode"], name=raw_station["displayLabel"], lat=raw_station["latitude"], long=raw_station["longitude"])) - return stations - - -async def dailies_list_mapper(raw_dailies: Generator[dict, None, None], record_type: WF1RecordTypeEnum): - """Maps raw dailies for list of StationDailyFromWF1 objects""" - wf1_dailies: List[StationDailyFromWF1] = [] - async for raw_daily in raw_dailies: - if is_station_valid(raw_daily.get("stationData")) and raw_daily.get("recordType").get("id") == record_type.value: - wf1_dailies.append( - StationDailyFromWF1( - created_by=raw_daily.get("createdBy"), - forecast_id=raw_daily.get("id"), - station_code=raw_daily.get("stationData").get("stationCode"), - station_name=raw_daily.get("stationData").get("displayLabel"), - utcTimestamp=datetime.fromtimestamp(raw_daily.get("weatherTimestamp") / 1000, tz=timezone.utc), - temperature=raw_daily.get("temperature"), - relative_humidity=raw_daily.get("relativeHumidity"), - precipitation=raw_daily.get("precipitation"), - wind_direction=raw_daily.get("windDirection"), - wind_speed=raw_daily.get("windSpeed"), - ) - ) - return wf1_dailies - - -async def weather_indeterminate_list_mapper(raw_dailies: Generator[dict, None, None]): - """Maps raw dailies to weather indeterminate list""" - observed_dailies = [] - forecasts = [] - async for raw_daily in raw_dailies: - station_code = raw_daily.get("stationData").get("stationCode") - station_name = raw_daily.get("stationData").get("displayLabel") - latitude = raw_daily.get("stationData").get("latitude") - longitude = raw_daily.get("stationData").get("longitude") - utc_timestamp = datetime.fromtimestamp(raw_daily.get("weatherTimestamp") / 1000, tz=timezone.utc) - precip = raw_daily.get("precipitation") - rh = raw_daily.get("relativeHumidity") - temp = raw_daily.get("temperature") - wind_spd = raw_daily.get("windSpeed") - wind_dir = raw_daily.get("windDirection") - ffmc = raw_daily.get("fineFuelMoistureCode") - dmc = raw_daily.get("duffMoistureCode") - dc = raw_daily.get("droughtCode") - isi = raw_daily.get("initialSpreadIndex") - bui = raw_daily.get("buildUpIndex") - fwi = raw_daily.get("fireWeatherIndex") - dgr = raw_daily.get("dangerForest") - gc = raw_daily.get("grasslandCuring") - - if is_station_valid(raw_daily.get("stationData")) and raw_daily.get("recordType").get("id") in [WF1RecordTypeEnum.ACTUAL.value, WF1RecordTypeEnum.MANUAL.value]: - observed_dailies.append( - WeatherIndeterminate( - station_code=station_code, - station_name=station_name, - latitude=latitude, - longitude=longitude, - determinate=WeatherDeterminate.ACTUAL, - utc_timestamp=utc_timestamp, - temperature=temp, - relative_humidity=rh, - precipitation=precip, - wind_direction=wind_dir, - wind_speed=wind_spd, - fine_fuel_moisture_code=ffmc, - duff_moisture_code=dmc, - drought_code=dc, - initial_spread_index=isi, - build_up_index=bui, - fire_weather_index=fwi, - danger_rating=dgr, - grass_curing=gc, - ) - ) - elif is_station_valid(raw_daily.get("stationData")) and raw_daily.get("recordType").get("id") == WF1RecordTypeEnum.FORECAST.value: - forecasts.append( - WeatherIndeterminate( - station_code=station_code, - station_name=station_name, - latitude=latitude, - longitude=longitude, - determinate=WeatherDeterminate.FORECAST, - utc_timestamp=utc_timestamp, - temperature=temp, - relative_humidity=rh, - precipitation=precip, - wind_direction=wind_dir, - wind_speed=wind_spd, - grass_curing=gc, - ) - ) - return observed_dailies, forecasts - - -async def wfwx_station_list_mapper(raw_stations: Generator[dict, None, None]) -> List[WFWXWeatherStation]: - """Maps raw stations to WFWXWeatherStation list""" - stations = [] - # Iterate through "raw" station data. - async for raw_station in raw_stations: - # If the station is valid, add it to our list of stations. - if is_station_valid(raw_station): - stations.append( - WFWXWeatherStation( - wfwx_id=raw_station["id"], - code=raw_station["stationCode"], - latitude=raw_station["latitude"], - longitude=raw_station["longitude"], - elevation=raw_station["elevation"], - name=raw_station["displayLabel"], - zone_code=construct_zone_code(raw_station), - ) - ) - return stations - - -async def fire_center_mapper(raw_stations: Generator[dict, None, None]): - """Maps raw stations to their respective fire centers.""" - fire_centers = {} - # Iterate through "raw" station data. - async for raw_station in raw_stations: - # If the station is valid, add it to our list of stations. - if is_station_valid(raw_station) and is_station_fire_zone_valid(raw_station): - raw_fire_center = raw_station["fireCentre"] - fire_center_id = raw_fire_center["id"] - station = FireCenterStation(code=raw_station["stationCode"], name=raw_station["displayLabel"], zone=construct_zone_code(raw_station)) - - fire_center = fire_centers.get(fire_center_id, None) - if fire_center is None: - fire_centers[fire_center_id] = FireCentre(id=str(raw_fire_center["id"]), name=raw_fire_center["displayLabel"], stations=[station]) - else: - fire_center.stations.append(station) - return fire_centers - - def construct_zone_code(station: any): """Constructs the 2-character zone code for a weather station, using the station's zone.alias integer value, prefixed by the fire centre-to-letter mapping. @@ -199,26 +54,11 @@ def construct_zone_code(station: any): return zone_code -def parse_station(station, eco_division: EcodivisionSeasons) -> WeatherStation: - """Transform from the json object returned by wf1, to our station object.""" - core_seasons = eco_division.get_core_seasons() - ecodiv_name = eco_division.get_ecodivision_name(station["stationCode"], station["latitude"], station["longitude"]) - return WeatherStation( - zone_code=construct_zone_code(station), - code=station["stationCode"], - name=station["displayLabel"], - lat=station["latitude"], - long=station["longitude"], - ecodivision_name=ecodiv_name, - core_season=core_seasons[ecodiv_name]["core_season"], - elevation=station["elevation"], - wfwx_station_uuid=station["id"], - ) - - def parse_hourly(hourly) -> WeatherReading: """Transform from the raw hourly json object returned by wf1, to our hourly object.""" - timestamp = datetime.fromtimestamp(int(hourly["weatherTimestamp"]) / 1000, tz=timezone.utc).isoformat() + timestamp = datetime.fromtimestamp( + int(hourly["weatherTimestamp"]) / 1000, tz=timezone.utc + ).isoformat() return WeatherReading( datetime=timestamp, temperature=hourly.get("temperature", None), @@ -236,12 +76,66 @@ def parse_hourly(hourly) -> WeatherReading: ) +def parse_hourly_actual(station_code: int, hourly): + """Transform from the raw hourly json object returned by wf1, to our hour actual object.""" + timestamp = datetime.fromtimestamp( + int(hourly["weatherTimestamp"]) / 1000, tz=timezone.utc + ).isoformat() + hourly_actual = HourlyActual( + weather_date=timestamp, + station_code=station_code, + temperature=hourly.get("temperature", math.nan), + relative_humidity=hourly.get("relativeHumidity", math.nan), + dewpoint=compute_dewpoint(hourly.get("temperature"), hourly.get("relativeHumidity")), + wind_speed=hourly.get("windSpeed", math.nan), + wind_direction=hourly.get("windDirection", math.nan), + precipitation=hourly.get("precipitation", math.nan), + ffmc=hourly.get("fineFuelMoistureCode", None), + isi=hourly.get("initialSpreadIndex", None), + fwi=hourly.get("fireWeatherIndex", None), + ) + temp_valid, rh_valid, wdir_valid, wspeed_valid, precip_valid = get_valid_flags(hourly_actual) + hourly_actual.temp_valid = temp_valid + hourly_actual.rh_valid = rh_valid + hourly_actual.wdir_valid = wdir_valid + hourly_actual.wspeed_valid = wspeed_valid + hourly_actual.precip_valid = precip_valid + + observation_valid = hourly.get("observationValidInd") + observation_valid_comment = hourly.get("observationValidComment") + if observation_valid is None or bool(observation_valid) is False: + logger.warning( + "Invalid hourly received from WF1 API for station code %s at time %s: %s", + station_code, + hourly_actual.weather_date, + observation_valid_comment, + ) + + is_obs_invalid = ( + not temp_valid and not rh_valid and not wdir_valid and not wspeed_valid and not precip_valid + ) + + if is_obs_invalid: + logger.error( + "Hourly actual not written to DB for station code %s at time %s: %s", + station_code, + hourly_actual.weather_date, + observation_valid_comment, + ) + + # don't write the HourlyActual to our database if every value is invalid. If even one + # weather variable observed is valid, write the HourlyActual to DB. + return None if is_obs_invalid else hourly_actual + + def parse_noon_forecast(station_code, forecast) -> NoonForecast: """Transform from the raw forecast json object returned by wf1, to our noon forecast object.""" - timestamp = datetime.fromtimestamp(int(forecast["weatherTimestamp"]) / 1000, tz=timezone.utc).isoformat() + timestamp = datetime.fromtimestamp( + int(forecast["weatherTimestamp"]) / 1000, tz=timezone.utc + ).isoformat() noon_forecast = NoonForecast( weather_date=timestamp, - created_at=get_utc_now(), + created_at=datetime.now(tz=timezone.utc), wfwx_update_date=forecast.get("updateDate", None), station_code=station_code, temperature=forecast.get("temperature", math.nan), @@ -266,45 +160,59 @@ def parse_noon_forecast(station_code, forecast) -> NoonForecast: return noon_forecast -def parse_hourly_actual(station_code: int, hourly): - """Transform from the raw hourly json object returned by wf1, to our hour actual object.""" - timestamp = datetime.fromtimestamp(int(hourly["weatherTimestamp"]) / 1000, tz=timezone.utc).isoformat() - hourly_actual = HourlyActual( - weather_date=timestamp, - station_code=station_code, - temperature=hourly.get("temperature", math.nan), - relative_humidity=hourly.get("relativeHumidity", math.nan), - dewpoint=compute_dewpoint(hourly.get("temperature"), hourly.get("relativeHumidity")), - wind_speed=hourly.get("windSpeed", math.nan), - wind_direction=hourly.get("windDirection", math.nan), - precipitation=hourly.get("precipitation", math.nan), - ffmc=hourly.get("fineFuelMoistureCode", None), - isi=hourly.get("initialSpreadIndex", None), - fwi=hourly.get("fireWeatherIndex", None), +def parse_station(station, eco_division: EcodivisionSeasons) -> WeatherStation: + """Transform from the json object returned by wf1, to our station object.""" + core_seasons = eco_division.get_core_seasons() + ecodiv_name = eco_division.get_ecodivision_name( + station["stationCode"], station["latitude"], station["longitude"] + ) + return WeatherStation( + zone_code=construct_zone_code(station), + code=station["stationCode"], + name=station["displayLabel"], + lat=station["latitude"], + long=station["longitude"], + ecodivision_name=ecodiv_name, + core_season=core_seasons[ecodiv_name]["core_season"], + elevation=station["elevation"], + wfwx_station_uuid=station["id"], ) - temp_valid, rh_valid, wdir_valid, wspeed_valid, precip_valid = get_valid_flags(hourly_actual) - hourly_actual.temp_valid = temp_valid - hourly_actual.rh_valid = rh_valid - hourly_actual.wdir_valid = wdir_valid - hourly_actual.wspeed_valid = wspeed_valid - hourly_actual.precip_valid = precip_valid - observation_valid = hourly.get("observationValidInd") - observation_valid_comment = hourly.get("observationValidComment") - if observation_valid is None or bool(observation_valid) is False: - logger.warning("Invalid hourly received from WF1 API for station code %s at time %s: %s", station_code, hourly_actual.weather_date, observation_valid_comment) - is_obs_invalid = not temp_valid and not rh_valid and not wdir_valid and not wspeed_valid and not precip_valid +async def station_list_mapper(raw_stations: Generator[dict, None, None]): + """Maps raw stations to WeatherStation list""" + stations = [] + # Iterate through "raw" station data. + async for raw_station in raw_stations: + # If the station is valid, add it to our list of stations. + if is_station_valid(raw_station): + stations.append( + WeatherStation( + code=raw_station["stationCode"], + name=raw_station["displayLabel"], + lat=raw_station["latitude"], + long=raw_station["longitude"], + ) + ) + return stations + - if is_obs_invalid: - logger.error("Hourly actual not written to DB for station code %s at time %s: %s", station_code, hourly_actual.weather_date, observation_valid_comment) +def unique_weather_stations_mapper(stations) -> List[WeatherStationGroupMember]: + all_stations = weather_stations_mapper(stations) + unique_stations = [] + stations_added = set() - # don't write the HourlyActual to our database if every value is invalid. If even one - # weather variable observed is valid, write the HourlyActual to DB. - return None if is_obs_invalid else hourly_actual + for station in all_stations: + if station.station_code not in stations_added: + unique_stations.append(station) + stations_added.add(station.station_code) + + return unique_stations -async def weather_station_group_mapper(raw_station_groups_by_owner: Generator[dict, None, None]) -> List[WeatherStationGroup]: +async def weather_station_group_mapper( + raw_station_groups_by_owner: Generator[dict, None, None], +) -> List[WeatherStationGroup]: """Maps raw weather station groups to WeatherStationGroup""" weather_station_groups = [] async for raw_group in raw_station_groups_by_owner: @@ -325,11 +233,21 @@ def weather_stations_mapper(stations) -> List[WeatherStationGroupMember]: mapped_stations = [] for item in stations: station = item["station"] - fire_zone = FireZone(id=station["zone"]["id"], display_label=station["zone"]["displayLabel"], fire_centre=station["zone"]["fireCentre"]) if station["zone"] is not None else None + fire_zone = ( + FireZone( + id=station["zone"]["id"], + display_label=station["zone"]["displayLabel"], + fire_centre=station["zone"]["fireCentre"], + ) + if station["zone"] is not None + else None + ) weather_station = WeatherStationGroupMember( id=station["id"], display_label=station["displayLabel"], - fire_centre=StationFireCentre(id=station["fireCentre"]["id"], display_label=station["fireCentre"]["displayLabel"]), + fire_centre=StationFireCentre( + id=station["fireCentre"]["id"], display_label=station["fireCentre"]["displayLabel"] + ), fire_zone=fire_zone, station_code=station["stationCode"], station_status=station["stationStatus"]["id"], @@ -339,39 +257,155 @@ def weather_stations_mapper(stations) -> List[WeatherStationGroupMember]: return mapped_stations -def unique_weather_stations_mapper(stations) -> List[WeatherStationGroupMember]: - all_stations = weather_stations_mapper(stations) - unique_stations = [] - stations_added = set() +async def wfwx_station_list_mapper( + raw_stations: Generator[dict, None, None], +) -> List[WFWXWeatherStation]: + """Maps raw stations to WFWXWeatherStation list""" + stations = [] + # Iterate through "raw" station data. + async for raw_station in raw_stations: + # If the station is valid, add it to our list of stations. + if is_station_valid(raw_station): + stations.append( + WFWXWeatherStation( + wfwx_id=raw_station["id"], + code=raw_station["stationCode"], + latitude=raw_station["latitude"], + longitude=raw_station["longitude"], + elevation=raw_station["elevation"], + name=raw_station["displayLabel"], + zone_code=construct_zone_code(raw_station), + ) + ) + return stations - for station in all_stations: - if station.station_code not in stations_added: - unique_stations.append(station) - stations_added.add(station.station_code) - return unique_stations +async def fire_center_mapper(raw_stations: Generator[dict, None, None]): + """Maps raw stations to their respective fire centers.""" + fire_centers = {} + # Iterate through "raw" station data. + async for raw_station in raw_stations: + # If the station is valid, add it to our list of stations. + if is_station_valid(raw_station) and is_station_fire_zone_valid(raw_station): + raw_fire_center = raw_station["fireCentre"] + fire_center_id = raw_fire_center["id"] + station = FireCenterStation( + code=raw_station["stationCode"], + name=raw_station["displayLabel"], + zone=construct_zone_code(raw_station), + ) + + fire_center = fire_centers.get(fire_center_id, None) + if fire_center is None: + fire_centers[fire_center_id] = FireCentre( + id=str(raw_fire_center["id"]), + name=raw_fire_center["displayLabel"], + stations=[station], + ) + else: + fire_center.stations.append(station) + return fire_centers -def transform_morecastforecastoutput_to_weatherindeterminate(forecast_outputs: List[MoreCastForecastOutput], wfwx_stations: List[WFWXWeatherStation]) -> List[WeatherIndeterminate]: - """Helper function to convert list of MoreCastForecastOutput objects (taken from our database) - into list of WeatherIndeterminate objects to match the structure of the forecasts pulled from WFWX. - wfwx_stations list (station data from WFWX) is used to populate station_name data. - """ - weather_indeterminates: List[WeatherIndeterminate] = [] - for output in forecast_outputs: - station = next(s for s in wfwx_stations if s.code == output.station_code) - - weather_indeterminates.append( - WeatherIndeterminate( - station_code=output.station_code, - station_name=station.name if station else "", - utc_timestamp=output.for_date, - determinate=WeatherDeterminate.FORECAST, - temperature=output.temp, - relative_humidity=output.rh, - precipitation=output.precip, - wind_direction=output.wind_direction, - wind_speed=output.wind_speed, +async def dailies_list_mapper( + raw_dailies: Generator[dict, None, None], record_type: WF1RecordTypeEnum +): + """Maps raw dailies for list of StationDailyFromWF1 objects""" + wf1_dailies: List[StationDailyFromWF1] = [] + async for raw_daily in raw_dailies: + if ( + is_station_valid(raw_daily.get("stationData")) + and raw_daily.get("recordType").get("id") == record_type.value + ): + wf1_dailies.append( + StationDailyFromWF1( + created_by=raw_daily.get("createdBy"), + forecast_id=raw_daily.get("id"), + station_code=raw_daily.get("stationData").get("stationCode"), + station_name=raw_daily.get("stationData").get("displayLabel"), + utcTimestamp=datetime.fromtimestamp( + raw_daily.get("weatherTimestamp") / 1000, tz=timezone.utc + ), + temperature=raw_daily.get("temperature"), + relative_humidity=raw_daily.get("relativeHumidity"), + precipitation=raw_daily.get("precipitation"), + wind_direction=raw_daily.get("windDirection"), + wind_speed=raw_daily.get("windSpeed"), + ) ) + return wf1_dailies + + +async def weather_indeterminate_list_mapper(raw_dailies: Generator[dict, None, None]): + """Maps raw dailies to weather indeterminate list""" + observed_dailies = [] + forecasts = [] + async for raw_daily in raw_dailies: + station_code = raw_daily.get("stationData").get("stationCode") + station_name = raw_daily.get("stationData").get("displayLabel") + latitude = raw_daily.get("stationData").get("latitude") + longitude = raw_daily.get("stationData").get("longitude") + utc_timestamp = datetime.fromtimestamp( + raw_daily.get("weatherTimestamp") / 1000, tz=timezone.utc ) - return weather_indeterminates + precip = raw_daily.get("precipitation") + rh = raw_daily.get("relativeHumidity") + temp = raw_daily.get("temperature") + wind_spd = raw_daily.get("windSpeed") + wind_dir = raw_daily.get("windDirection") + ffmc = raw_daily.get("fineFuelMoistureCode") + dmc = raw_daily.get("duffMoistureCode") + dc = raw_daily.get("droughtCode") + isi = raw_daily.get("initialSpreadIndex") + bui = raw_daily.get("buildUpIndex") + fwi = raw_daily.get("fireWeatherIndex") + dgr = raw_daily.get("dangerForest") + gc = raw_daily.get("grasslandCuring") + + if is_station_valid(raw_daily.get("stationData")) and raw_daily.get("recordType").get( + "id" + ) in [WF1RecordTypeEnum.ACTUAL.value, WF1RecordTypeEnum.MANUAL.value]: + observed_dailies.append( + WeatherIndeterminate( + station_code=station_code, + station_name=station_name, + latitude=latitude, + longitude=longitude, + determinate=WeatherDeterminate.ACTUAL, + utc_timestamp=utc_timestamp, + temperature=temp, + relative_humidity=rh, + precipitation=precip, + wind_direction=wind_dir, + wind_speed=wind_spd, + fine_fuel_moisture_code=ffmc, + duff_moisture_code=dmc, + drought_code=dc, + initial_spread_index=isi, + build_up_index=bui, + fire_weather_index=fwi, + danger_rating=dgr, + grass_curing=gc, + ) + ) + elif ( + is_station_valid(raw_daily.get("stationData")) + and raw_daily.get("recordType").get("id") == WF1RecordTypeEnum.FORECAST.value + ): + forecasts.append( + WeatherIndeterminate( + station_code=station_code, + station_name=station_name, + latitude=latitude, + longitude=longitude, + determinate=WeatherDeterminate.FORECAST, + utc_timestamp=utc_timestamp, + temperature=temp, + relative_humidity=rh, + precipitation=precip, + wind_direction=wind_dir, + wind_speed=wind_spd, + grass_curing=gc, + ) + ) + return observed_dailies, forecasts diff --git a/backend/packages/wps-shared/src/wps_shared/wildfire_one/query_builders.py b/backend/packages/wps-wf1/src/wps_wf1/query_builders.py similarity index 100% rename from backend/packages/wps-shared/src/wps_shared/wildfire_one/query_builders.py rename to backend/packages/wps-wf1/src/wps_wf1/query_builders.py diff --git a/backend/packages/wps-wf1/src/wps_wf1/tests/conftest.py b/backend/packages/wps-wf1/src/wps_wf1/tests/conftest.py new file mode 100644 index 0000000000..aa7f6d5473 --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/tests/conftest.py @@ -0,0 +1,43 @@ +"""Global fixtures""" + +from unittest.mock import MagicMock + +import pytest +from aiohttp import ClientSession +from wps_wf1.cache_protocol import CacheProtocol +from wps_wf1.wfwx_settings import WfwxSettings + + +@pytest.fixture(autouse=True) +def mock_env(monkeypatch): + """Automatically mock environment variable""" + monkeypatch.setenv("BASE_URI", "https://python-test-base-uri") + monkeypatch.setenv("WFWX_USER", "user") + monkeypatch.setenv("WFWX_SECRET", "secret") + monkeypatch.setenv("WFWX_AUTH_URL", "https://wf1/pub/oauth2/v1/oauth/token") + monkeypatch.setenv("WFWX_BASE_URL", "https://wf1/wfwx") + monkeypatch.setenv("WFWX_MAX_PAGE_SIZE", "1000") + + +@pytest.fixture +def mock_session(): + """Mock ClientSession for unit tests""" + return MagicMock(spec=ClientSession) + + +@pytest.fixture +def mock_settings(): + """Mock WfwxSettings for unit tests""" + return WfwxSettings( + base_url="https://test.example.com", + auth_url="https://auth.example.com", + user="test_user", + secret="test_secret", + max_page_size=100, + ) + + +@pytest.fixture +def mock_cache(): + """Mock CacheProtocol for unit tests""" + return MagicMock(spec=CacheProtocol) diff --git a/backend/packages/wps-wf1/src/wps_wf1/tests/test_parsers.py b/backend/packages/wps-wf1/src/wps_wf1/tests/test_parsers.py new file mode 100644 index 0000000000..81a508c511 --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/tests/test_parsers.py @@ -0,0 +1,80 @@ + + +from datetime import datetime, timezone +import math +from wps_wf1.models import HourlyActual +from wps_wf1.parsers import parse_hourly_actual + + +class TestParseHourlyActual: + def test_parse_hourly_actual(self): + """ Valid fields are set when values exist """ + raw_actual = { + "weatherTimestamp": datetime.now(tz=timezone.utc).timestamp(), + "temperature": 0.0, + "relativeHumidity": 0.0, + "windSpeed": 0.0, + "windDirection": 0.0, + "precipitation": 0.0, + "fineFuelMoistureCode": 0.0, + "initialSpreadIndex": 0.0, + "fireWeatherIndex": 0.0 + } + + hourly_actual = parse_hourly_actual(1, raw_actual) + assert isinstance(hourly_actual, HourlyActual) + assert hourly_actual.rh_valid is True + assert hourly_actual.temp_valid is True + assert hourly_actual.wdir_valid is True + assert hourly_actual.precip_valid is True + assert hourly_actual.wspeed_valid is True + + + def test_invalid_metrics(self): + """ Metric valid flags should be false """ + + raw_actual = { + "weatherTimestamp": datetime.now(tz=timezone.utc).timestamp(), + "temperature": 0.0, + "relativeHumidity": 101, + "windSpeed": -1, + "windDirection": 361, + "precipitation": -1, + "fineFuelMoistureCode": 0.0, + "initialSpreadIndex": 0.0, + "fireWeatherIndex": 0.0 + } + + hourly_actual = parse_hourly_actual(1, raw_actual) + assert isinstance(hourly_actual, HourlyActual) + assert hourly_actual.temp_valid is True + assert hourly_actual.rh_valid is False + assert hourly_actual.precip_valid is False + assert hourly_actual.wspeed_valid is False + assert hourly_actual.wdir_valid is False + + + def test_invalid_metrics_from_wfwx(self): + """ Metric valid flags should be false """ + + raw_actual = { + "weatherTimestamp": datetime.now(tz=timezone.utc).timestamp(), + "temperature": 0.0, + "relativeHumidity": 101, + "windSpeed": -1, + "windDirection": 361, + "fineFuelMoistureCode": 0.0, + "initialSpreadIndex": 0.0, + "fireWeatherIndex": 0.0, + "observationValid": False, + "observationValidComment": "Precipitation can not be null." + } + + hourly_actual = parse_hourly_actual(1, raw_actual) + assert isinstance(hourly_actual, HourlyActual) + assert hourly_actual.temp_valid is True + assert hourly_actual.rh_valid is False + assert hourly_actual.precip_valid is False + assert hourly_actual.wspeed_valid is False + assert hourly_actual.wdir_valid is False + assert hourly_actual.precipitation == math.nan diff --git a/backend/packages/wps-wf1/src/wps_wf1/tests/test_query_builders.py b/backend/packages/wps-wf1/src/wps_wf1/tests/test_query_builders.py new file mode 100644 index 0000000000..df0f87ff73 --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/tests/test_query_builders.py @@ -0,0 +1,54 @@ +from wps_wf1.query_builders import ( + BuildQueryAllForecastsByAfterStart, + BuildQueryAllHourliesByRange, + BuildQueryDailiesByStationCode, + BuildQueryStationGroups, +) + + +def test_build_all_hourlies_query(): + """Verifies the query builder returns the correct url and parameters""" + query_builder = BuildQueryAllHourliesByRange(0, 1) + result = query_builder.query(0) + assert result == ( + "https://wf1/wfwx/v1/hourlies/rsql", + {"size": "1000", "page": 0, "query": "weatherTimestamp >=0;weatherTimestamp <1"}, + ) + + +def test_build_forecasts_query(): + """Verifies the query builder returns the correct url and parameters""" + query_builder = BuildQueryAllForecastsByAfterStart(0) + result = query_builder.query(0) + assert result == ( + "https://wf1/wfwx/v1/dailies/rsql", + {"size": "1000", "page": 0, "query": "weatherTimestamp >=0;recordType.id == 'FORECAST'"}, + ) + + +def test_build_dailies_by_station_code(): + """Verifies the query builder returns the correct url and parameters for dailies by station code""" + query_builder = BuildQueryDailiesByStationCode(0, 1, ["1", "2"]) + result = query_builder.query(0) + assert result == ( + "https://wf1/wfwx/v1/dailies/search/" + + "findDailiesByStationIdIsInAndWeather" + + "TimestampBetweenOrderByStationIdAscWeatherTimestampAsc", + { + "size": "1000", + "page": 0, + "startingTimestamp": 0, + "endingTimestamp": 1, + "stationIds": ["1", "2"], + }, + ) + + +def test_build_station_groups_query(): + """Verifies the query builder returns the correct url and parameters for a station groups query""" + query_builder = BuildQueryStationGroups() + result = query_builder.query(0) + assert result == ( + "https://wf1/wfwx/v1/stationGroups", + {"size": "1000", "page": 0, "sort": "groupOwnerUserId,asc"}, + ) diff --git a/backend/packages/wps-wf1/src/wps_wf1/tests/test_validation.py b/backend/packages/wps-wf1/src/wps_wf1/tests/test_validation.py new file mode 100644 index 0000000000..4fe314802c --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/tests/test_validation.py @@ -0,0 +1,243 @@ +"""Unit testing for WFWX API validation""" + +import math +from datetime import datetime + +from wps_wf1.models import WeatherReading +from wps_wf1.validation import get_valid_flags, validate_metric + + +def test_validate_metric_below(): + """Below range returns false""" + result = validate_metric(1, 2, 3) + assert result is False + + +def test_validate_metric_above(): + """Above range returns false""" + result = validate_metric(3, 1, 2) + assert result is False + + +def test_validate_metric_within(): + """Within range returns true""" + result = validate_metric(2, 1, 3) + assert result is True + + +def test_validate_metric_at_low(): + """At lower bound returns true""" + result = validate_metric(1, 1, 2) + assert result is True + + +def test_validate_metric_at_high(): + """At lower bound returns true""" + result = validate_metric(2, 1, 2) + assert result is True + + +def test_temp_valid(): + """Any temp number is valid""" + test_record = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=1, + relative_humidity=None, + wind_speed=None, + wind_direction=None, + precipitation=None, + ) + temp_valid, _, _, _, _ = get_valid_flags(test_record) + assert temp_valid is True + + +def test_temp_invalid(): + """No temp number is invalid""" + test_record = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=None, + wind_direction=None, + precipitation=None, + ) + temp_valid, _, _, _, _ = get_valid_flags(test_record) + assert temp_valid is False + + +def test_rh_valid(): + """0 to 100 is valid for rh""" + low_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=0, + wind_speed=None, + wind_direction=None, + precipitation=None, + ) + _, low_rh_valid, _, _, _ = get_valid_flags(low_valid) + assert low_rh_valid is True + + high_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=100, + wind_speed=None, + wind_direction=None, + precipitation=None, + ) + _, high_rh_valid, _, _, _ = get_valid_flags(high_valid) + assert high_rh_valid is True + + +def test_rh_invalid(): + """Below 0 and above 100 is invalid for rh""" + low_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=-1, + wind_speed=None, + wind_direction=None, + precipitation=None, + ) + _, low_rh_invalid, _, _, _ = get_valid_flags(low_valid) + assert low_rh_invalid is False + + high_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=101, + wind_speed=None, + wind_direction=None, + precipitation=None, + ) + _, high_rh_invalid, _, _, _ = get_valid_flags(high_valid) + assert high_rh_invalid is False + + +def test_wind_speed_valid(): + """0 to inf is valid for wind_speed""" + low_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=0, + wind_direction=None, + precipitation=None, + ) + _, _, low_wind_speed_valid, _, _ = get_valid_flags(low_valid) + assert low_wind_speed_valid is True + + high_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=math.inf, + wind_direction=None, + precipitation=None, + ) + _, _, high_wind_speed_valid, _, _ = get_valid_flags(high_valid) + assert high_wind_speed_valid is True + + +def test_wind_speed_invalid(): + """Below 0 is invalid for wind_speed""" + low_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=-1, + wind_direction=None, + precipitation=None, + ) + _, _, low_wind_speed_invalid, _, _ = get_valid_flags(low_valid) + assert low_wind_speed_invalid is False + + +def test_wdir_valid(): + """0 to 360 is valid for wdir""" + low_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=None, + wind_direction=0, + precipitation=None, + ) + _, _, _, low_wdir_valid, _ = get_valid_flags(low_valid) + assert low_wdir_valid is True + + high_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=None, + wind_direction=360, + precipitation=None, + ) + _, _, _, high_wdir_valid, _ = get_valid_flags(high_valid) + assert high_wdir_valid is True + + +def test_wdir_invalid(): + """Below 0 and above 360 is invalid for wdir""" + low_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=None, + wind_direction=-1, + precipitation=None, + ) + _, _, _, low_wdir_invalid, _ = get_valid_flags(low_valid) + assert low_wdir_invalid is False + + high_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=None, + wind_direction=361, + precipitation=None, + ) + _, _, _, high_wdir_invalid, _ = get_valid_flags(high_valid) + assert high_wdir_invalid is False + + +def test_precip_valid(): + """0 to inf is valid for precip""" + low_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=None, + wind_direction=None, + precipitation=0, + ) + _, _, _, _, low_precip_valid = get_valid_flags(low_valid) + assert low_precip_valid is True + + high_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=None, + wind_direction=None, + precipitation=math.inf, + ) + _, _, _, _, high_precip_valid = get_valid_flags(high_valid) + assert high_precip_valid is True + + +def test_precip_invalid(): + """Below 0 is invalid for precip""" + low_valid = WeatherReading( + datetime=datetime(2023, 7, 26, 12, 30, 15), + temperature=None, + relative_humidity=None, + wind_speed=None, + wind_direction=None, + precipitation=-1, + ) + _, _, _, _, low_precip_invalid = get_valid_flags(low_valid) + assert low_precip_invalid is False diff --git a/backend/packages/wps-wf1/src/wps_wf1/tests/test_wfwx_api.py b/backend/packages/wps-wf1/src/wps_wf1/tests/test_wfwx_api.py new file mode 100644 index 0000000000..2d2963c1c7 --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/tests/test_wfwx_api.py @@ -0,0 +1,641 @@ + +# tests/test_wfwx_api.py +import asyncio +import math +from datetime import datetime, timezone +from dataclasses import dataclass +from typing import Any, AsyncGenerator, Dict, List, Optional +from unittest.mock import AsyncMock + +import pytest + +import wps_wf1.wfwx_api as wfwx_api_module + +# ----------------------------- +# Pytest config / async support +# ----------------------------- +pytestmark = pytest.mark.asyncio + + +# ----------------------------- +# Lightweight stub models +# ----------------------------- +@dataclass +class StubWeatherVariables: + temperature: Optional[float] = None + relative_humidity: Optional[float] = None + + +@dataclass +class StubDetailedWeatherStationProperties: + code: int + name: str + observations: Optional[StubWeatherVariables] = None + forecasts: Optional[StubWeatherVariables] = None + + +@dataclass +class StubWeatherStationGeometry: + coordinates: List[float] + + +@dataclass +class StubGeoJsonDetailedWeatherStation: + properties: StubDetailedWeatherStationProperties + geometry: StubWeatherStationGeometry + + +@dataclass +class StubWeatherStationHourlyReadings: + values: List[Any] + station: Any + + +@dataclass +class StubWFWXWeatherStation: + wfwx_id: str + code: int + + +# ----------------------------- +# Fake aiohttp session + response +# ----------------------------- +class FakeResponse: + def __init__(self, status: int = 200): + self.status = status + self.raise_for_status_called = False + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + return False + + def raise_for_status(self): + self.raise_for_status_called = True + if not (200 <= self.status < 300): + raise AssertionError(f"HTTP {self.status}") + + +class MockSession: + def __init__(self): + self.last_post_args = None + self.last_post_kwargs = None + self.response_status = 200 + + def post(self, url, **kwargs): + self.last_post_args = (url,) + self.last_post_kwargs = kwargs + return FakeResponse(status=self.response_status) + + +# ----------------------------- +# Fake WfwxClient +# ----------------------------- +class FakeWfwxClient: + def __init__(self, session, settings, cache): + self.session = session + self.settings = settings + self.cache = cache + self.last_post_forecasts_args = None + self.hourlies_payload = None + # Pre-configured data sources for generators + self._paged_data: Dict[str, List[dict]] = { + "stations": [], + "dailies": [], + "hourlies": [], + "stationGroups": [], + } + self._raw_dailies_all_stations: List[dict] = [] + + async def fetch_access_token(self, expiry: int): + return {"access_token": "TOKEN123"} + + def set_paged_data(self, key: str, items: List[dict]): + self._paged_data[key] = items + + def _make_generator(self, key: str) -> AsyncGenerator[dict, None]: + async def gen(): + for item in self._paged_data.get(key, []): + yield item + return gen() + + def fetch_paged_response_generator( + self, headers: Dict[str, str], query_builder: Any, resource_name: str, + use_cache: bool = False, ttl: Optional[int] = None + ): + # For test assertions, attach for later inspection: + self.last_fetch_headers = headers + self.last_fetch_resource = resource_name + self.last_fetch_use_cache = use_cache + self.last_fetch_ttl = ttl + self.last_fetch_query = query_builder + return self._make_generator(resource_name) + + async def fetch_raw_dailies_for_all_stations(self, headers: Dict[str, str], time_of_interest: datetime): + return list(self._raw_dailies_all_stations) + + async def fetch_hourlies(self, raw_station, headers, start_ts, end_ts, use_cache, ttl): + # Example structure that WfwxApi expects: + return { + "_embedded": { + "hourlies": self._paged_data.get("hourlies", []) + } + } + + async def fetch_stations_by_group_id(self, headers, group_id: str): + # Return whatever is stored in "stations" for simplicity. + return self._paged_data.get("stations", []) + + async def post_forecasts(self, headers, forecasts_json): + self.last_post_forecasts_args = (headers, forecasts_json) + + +# ----------------------------- +# Helpers: stubs for parsers & utils +# ----------------------------- +async def stub_station_list_mapper(generator): + # Consume generator and return list of raw items (identity) + stations = [] + async for item in generator: + stations.append(item) + return stations + + +async def stub_wfwx_station_list_mapper(generator): + # Map raw station dicts {id, stationCode} into StubWFWXWeatherStation + stations = [] + async for item in generator: + stations.append(StubWFWXWeatherStation(wfwx_id=str(item.get("id")), code=item.get("stationCode"))) + return stations + + +async def stub_dailies_list_mapper(generator, record_type_enum): + # Return consumed list tagged by record_type_enum for verification + items = [] + async for item in generator: + items.append({"rt": record_type_enum, "raw": item}) + return items + + +async def stub_weather_indeterminate_list_mapper(generator): + # Return a tuple of (actuals, forecasts) separating by recordType.id + actuals, forecasts = [], [] + async def collect(): + a, f = [], [] + async for item in generator: + rt = item.get("recordType", {}).get("id") + (a if rt == "ACTUAL" else f).append(item) + return a, f + return await collect() + + +async def stub_weather_station_group_mapper(generator): + # Return list of group ids (identity map) + return ["grpA", "grpB"] + + +def stub_unique_weather_stations_mapper(stations): + # Identity mapper: wrap codes if present + return [s.get("stationCode") for s in stations] + + +def stub_parse_station(raw_station, eco_division): + return {"parsed_station": raw_station.get("stationCode"), "eco": eco_division} + + +def stub_parse_hourly(hourly): + return {"parsed_hourly": hourly.get("ts")} + + +def stub_parse_hourly_actual(station_code, hourly): + return {"station_code": station_code, "parsed_actual": hourly.get("ts")} + + +def stub_parse_noon_forecast(station_code, noon_forecast): + return {"station_code": station_code, "nf": noon_forecast.get("ts")} + + +def stub_is_station_valid(raw_station): + return raw_station.get("valid", True) + + +# ----------------------------- +# Fixtures +# ----------------------------- +@pytest.fixture +def mock_session(): + return MockSession() + + +@pytest.fixture +def fake_settings(): + class S: + base_url = "https://example.test" + auth_cache_expiry = 600 + station_cache_expiry = 300 + hourlies_by_station_code_expiry = 60 + dailies_by_station_code_expiry = 120 + use_cache = True + return S() + + +@pytest.fixture +def wfwx_api(mock_session, fake_settings, monkeypatch): + # Patch models in the module to stubs + monkeypatch.setattr(wfwx_api_module, "GeoJsonDetailedWeatherStation", StubGeoJsonDetailedWeatherStation) + monkeypatch.setattr(wfwx_api_module, "DetailedWeatherStationProperties", StubDetailedWeatherStationProperties) + monkeypatch.setattr(wfwx_api_module, "WeatherStationGeometry", StubWeatherStationGeometry) + monkeypatch.setattr(wfwx_api_module, "WeatherStationHourlyReadings", StubWeatherStationHourlyReadings) + monkeypatch.setattr(wfwx_api_module, "WeatherVariables", StubWeatherVariables) + # Patch parsers + monkeypatch.setattr(wfwx_api_module, "parse_station", stub_parse_station) + monkeypatch.setattr(wfwx_api_module, "parse_hourly", stub_parse_hourly) + monkeypatch.setattr(wfwx_api_module, "parse_hourly_actual", stub_parse_hourly_actual) + monkeypatch.setattr(wfwx_api_module, "parse_noon_forecast", stub_parse_noon_forecast) + # Patch mappers + monkeypatch.setattr(wfwx_api_module, "station_list_mapper", stub_station_list_mapper) + monkeypatch.setattr(wfwx_api_module, "wfwx_station_list_mapper", stub_wfwx_station_list_mapper) + monkeypatch.setattr(wfwx_api_module, "dailies_list_mapper", stub_dailies_list_mapper) + monkeypatch.setattr(wfwx_api_module, "weather_indeterminate_list_mapper", stub_weather_indeterminate_list_mapper) + monkeypatch.setattr(wfwx_api_module, "weather_station_group_mapper", stub_weather_station_group_mapper) + monkeypatch.setattr(wfwx_api_module, "unique_weather_stations_mapper", stub_unique_weather_stations_mapper) + # Patch util + monkeypatch.setattr(wfwx_api_module, "is_station_valid", stub_is_station_valid) + # Patch EcodivisionSeasons context manager + class FakeEcodivisionSeasons: + def __init__(self, key, cache): + self.key = key + self.cache = cache + def __enter__(self): + return f"ECO-{self.key}" + def __exit__(self, exc_type, exc, tb): + return False + monkeypatch.setattr(wfwx_api_module, "EcodivisionSeasons", FakeEcodivisionSeasons) + + # Instantiate API + wfwx_api = wfwx_api_module.WfwxApi(mock_session, fake_settings, cache=None) + + # Replace client with fake one + fake_client = FakeWfwxClient(mock_session, fake_settings, cache=None) + wfwx_api.wfwx_client = fake_client + return wfwx_api + + +# ----------------------------- +# Tests +# ----------------------------- +@pytest.mark.anyio +async def test_get_auth_headers(wfwx_api): + hdr = await wfwx_api._get_auth_header() + assert hdr["Authorization"] == "Bearer TOKEN123" + + no_cache_hdr = await wfwx_api._get_no_cache_auth_header() + assert no_cache_hdr["Authorization"] == "Bearer TOKEN123" + assert no_cache_hdr["Cache-Control"] == "no-cache" + +@pytest.mark.anyio +async def test_get_stations_by_codes_filters_and_parses(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [ + {"id": "1", "stationCode": 100, "valid": True}, + {"id": "2", "stationCode": 200, "valid": False}, + ]) + # Act + stations = await wfwx_api.get_stations_by_codes([100, 200]) + # Assert + assert len(stations) == 1 + assert stations[0]["parsed_station"] == 100 + assert stations[0]["eco"].startswith("ECO-") # eco division is propagated + +@pytest.mark.anyio +async def test_get_station_data_missing_await_bug(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [ + {"id": "1", "stationCode": 100, "valid": True}, + ]) + # Act + stations = await wfwx_api.get_station_data(use_no_cache_header=False) # should await _get_auth_header() + # Assert (this would fail prior to fix if header is coroutine and client inspects type) + assert isinstance(wfwx_api.wfwx_client.last_fetch_headers, dict) + +@pytest.mark.anyio +async def test_get_station_data_with_no_cache_header(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [{"id": "1", "displayLabel": "test", "stationCode": 100, "stationStatus": {"id": "ACTIVE"}, "latitude": 1, "longitude": 1}]) + # Act + stations = await wfwx_api.get_station_data(use_no_cache_header=True) + # Assert + assert isinstance(wfwx_api.wfwx_client.last_fetch_headers, dict) + assert wfwx_api.wfwx_client.last_fetch_headers.get("Cache-Control") == "no-cache" + station = stations[0] + assert station.code == 100 + +@pytest.mark.anyio +async def test_get_detailed_geojson_stations_builds_maps(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [ + {"id": "A", "stationCode": 111, "stationStatus": {"id": "ACTIVE"}, "displayLabel": "Alpha", "longitude": -123.1, "latitude": 48.4, "valid": True}, + {"id": "B", "stationCode": 222, "stationStatus": {"id": "INACTIVE"}, "displayLabel": "Beta", "longitude": -123.2, "latitude": 48.5, "valid": False}, + ]) + qb = wfwx_api_module.BuildQueryStations() # or any builder; value is not inspected in fake client + # Act + stations, id_map = await wfwx_api.get_detailed_geojson_stations(qb) + # Assert + assert set(stations.keys()) == {111} + geo = stations[111] + assert isinstance(geo, StubGeoJsonDetailedWeatherStation) + assert geo.properties.code == 111 + assert geo.properties.name == "Alpha" + assert geo.geometry.coordinates == [-123.1, 48.4] + assert id_map == {"A": 111} + +@pytest.mark.anyio +async def test_get_detailed_stations_merges_dailies(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [ + {"id": "S1", "stationCode": 777, "displayLabel": "S1", "longitude": -1, "latitude": 1, "valid": True}, + ]) + wfwx_api.wfwx_client._raw_dailies_all_stations = [ + {"stationId": "S1", "recordType": {"id": "ACTUAL"}, "temperature": 21.0, "relativeHumidity": 40}, + {"stationId": "S1", "recordType": {"id": "FORECAST"}, "temperature": 23.0, "relativeHumidity": 35}, + ] + # Act + out = await wfwx_api.get_detailed_stations(datetime(2025, 1, 1, tzinfo=timezone.utc)) + # Assert + assert len(out) == 1 + props = out[0].properties + assert props.observations == StubWeatherVariables(temperature=21.0, relative_humidity=40) + assert props.forecasts == StubWeatherVariables(temperature=23.0, relative_humidity=35) + +@pytest.mark.anyio +async def test_get_hourly_for_station_filters_non_actual(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("hourlies", [ + {"hourlyMeasurementTypeCode": {"id": "ACTUAL"}, "ts": 111}, + {"hourlyMeasurementTypeCode": {"id": "FORECAST"}, "ts": 222}, + ]) + raw_station = {"stationCode": 999} + # Act + readings = await wfwx_api.get_hourly_for_station( + raw_station=raw_station, + start_timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + end_timestamp=datetime(2025, 1, 2, tzinfo=timezone.utc), + eco_division="ECO-key", + use_cache=False, + ttl=30, + ) + # Assert + assert isinstance(readings, StubWeatherStationHourlyReadings) + assert [v["parsed_hourly"] for v in readings.values] == [111] + assert readings.station["parsed_station"] == 999 + +@pytest.mark.anyio +async def test_get_hourly_readings_runs_parallel(wfwx_api): + # Arrange: two stations + wfwx_api.wfwx_client.set_paged_data("stations", [ + {"id": "S1", "stationCode": 100}, + {"id": "S2", "stationCode": 200}, + ]) + wfwx_api.wfwx_client.set_paged_data("hourlies", [ + {"hourlyMeasurementTypeCode": {"id": "ACTUAL"}, "ts": 10}, + {"hourlyMeasurementTypeCode": {"id": "ACTUAL"}, "ts": 20}, + ]) + # Act + out = await wfwx_api.get_hourly_readings( + station_codes=[100, 200], + start_timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + end_timestamp=datetime(2025, 1, 2, tzinfo=timezone.utc), + use_cache=True, + ) + # Assert + assert len(out) == 2 + # We don't enforce order due to concurrency, but both should be there. + parsed_codes = sorted(r.station["parsed_station"] for r in out) + assert parsed_codes == [100, 200] + +@pytest.mark.anyio +async def test_get_noon_forecasts_all_stations_maps_by_wfwx_id(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("dailies", [ + {"stationId": "S1", "ts": 100}, + {"stationId": "S2", "ts": 200}, + ]) + wfwx_api.wfwx_client.set_paged_data("stations", [ + {"id": "S1", "stationCode": 111}, + {"id": "S2", "stationCode": 222}, + ]) + # Act + out = await wfwx_api.get_noon_forecasts_all_stations(datetime(2025, 1, 1, tzinfo=timezone.utc)) + # Assert + assert len(out) == 2 + assert {o["station_code"] for o in out} == {111, 222} + assert {o["nf"] for o in out} == {100, 200} + +@pytest.mark.anyio +async def test_get_hourly_actuals_all_stations_filters_actual(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("hourlies", [ + {"stationId": "S1", "ts": 100, "hourlyMeasurementTypeCode": {"id": "ACTUAL"}}, + {"stationId": "S2", "ts": 200, "hourlyMeasurementTypeCode": {"id": "FORECAST"}}, + ]) + wfwx_api.wfwx_client.set_paged_data("stations", [ + {"id": "S1", "stationCode": 111}, + {"id": "S2", "stationCode": 222}, + ]) + # Act + out = await wfwx_api.get_hourly_actuals_all_stations( + start_timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + end_timestamp=datetime(2025, 1, 2, tzinfo=timezone.utc), + ) + # Assert + assert len(out) == 1 + assert out[0]["station_code"] == 111 + assert out[0]["parsed_actual"] == 100 + +@pytest.mark.anyio +async def test_get_wfwx_stations_from_station_codes_none_filters_by_fire_centre(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [ + {"id": "A", "stationCode": 10}, + {"id": "B", "stationCode": 20}, + {"id": "C", "stationCode": 30}, + ]) + # Act + result = await wfwx_api.get_wfwx_stations_from_station_codes( + station_codes=None, + fire_centre_station_codes=[10, 30], + use_no_cache_header=False, + ) + # Assert + codes = sorted(s.code for s in result) + assert codes == [10, 30] + +@pytest.mark.anyio +async def test_get_wfwx_stations_from_station_codes_specific_with_missing_logs_error(wfwx_api, caplog): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [ + {"id": "A", "stationCode": 10}, + {"id": "B", "stationCode": 20}, + ]) + # Act + result = await wfwx_api.get_wfwx_stations_from_station_codes( + station_codes=[10, 99], + fire_centre_station_codes=[10, 20, 99], + use_no_cache_header=False, + ) + # Assert + codes = sorted(s.code for s in result) + assert codes == [10] + # Missing one logs an error + assert any("No WFWX station id for station code: 99" in rec.message for rec in caplog.records) + +@pytest.mark.anyio +async def test_get_raw_dailies_in_range_generator(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("dailies", [{"x": 1}, {"x": 2}]) + # Act + gen = await wfwx_api.get_raw_dailies_in_range_generator(["A", "B"], 0, 10) + items = [] + async for item in gen: + items.append(item) + # Assert + assert items == [{"x": 1}, {"x": 2}] + +@pytest.mark.anyio +async def test_get_dailies_generator_respects_cache_and_headers(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("dailies", [{"rec": 1}, {"rec": 2}]) + # Act + gen = await wfwx_api.get_dailies_generator( + wfwx_stations=[StubWFWXWeatherStation(wfwx_id="A", code=1)], + time_of_interest=datetime(2025, 1, 1, tzinfo=timezone.utc), + end_time_of_interest=datetime(2025, 1, 2, tzinfo=timezone.utc), + check_cache=False, # force use_cache False even if settings.use_cache True + use_no_cache_header=True, + ) + items = [] + async for item in gen: + items.append(item) + # Assert + assert items == [{"rec": 1}, {"rec": 2}] + assert wfwx_api.wfwx_client.last_fetch_use_cache is False + assert wfwx_api.wfwx_client.last_fetch_headers.get("Cache-Control") == "no-cache" + # Check timestamps were millisecond-rounded + qb = wfwx_api.wfwx_client.last_fetch_query + assert isinstance(qb, wfwx_api_module.BuildQueryDailiesByStationCode) + +@pytest.mark.anyio +async def test_get_fire_centers_returns_values_list(wfwx_api, monkeypatch): + # Arrange: fire_center_mapper returns dict + async def fake_fire_center_mapper(generator): + return {"FC1": {"name": "A"}, "FC2": {"name": "B"}} + monkeypatch.setattr(wfwx_api_module, "fire_center_mapper", fake_fire_center_mapper) + # Act + out = await wfwx_api.get_fire_centers() + # Assert + assert out == [{"name": "A"}, {"name": "B"}] + +@pytest.mark.anyio +async def test_get_dailies_for_stations_and_date_uses_mapper(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [{"id": "S1", "stationCode": 1}]) + wfwx_api.wfwx_client.set_paged_data("dailies", [{"d": 1}, {"d": 2}]) + # Act + out = await wfwx_api.get_dailies_for_stations_and_date( + start_time_of_interest=datetime(2025, 1, 1, tzinfo=timezone.utc), + end_time_of_interest=datetime(2025, 1, 2, tzinfo=timezone.utc), + unique_station_codes=[1], + fire_centre_station_codes=[1], + mapper=stub_dailies_list_mapper, + ) + # Assert + assert all(item["rt"] == wfwx_api_module.WF1RecordTypeEnum.ACTUAL for item in out) + assert [i["raw"] for i in out] == [{"d": 1}, {"d": 2}] + +@pytest.mark.anyio +async def test_get_forecasts_for_stations_by_date_range(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [{"id": "S1", "stationCode": 1}]) + wfwx_api.wfwx_client.set_paged_data("dailies", [{"d": 1}, {"d": 2}]) + # Act + out = await wfwx_api.get_forecasts_for_stations_by_date_range( + start_time_of_interest=datetime(2025, 1, 1, tzinfo=timezone.utc), + end_time_of_interest=datetime(2025, 1, 2, tzinfo=timezone.utc), + unique_station_codes=[1], + fire_centre_station_codes=[1], + check_cache=True, + mapper=stub_dailies_list_mapper, + use_no_cache_header=False, + ) + # Assert + assert all(item["rt"] == wfwx_api_module.WF1RecordTypeEnum.FORECAST for item in out) + assert [i["raw"] for i in out] == [{"d": 1}, {"d": 2}] + +@pytest.mark.anyio +async def test_get_daily_determinates_for_stations_and_date(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [{"id": "S1", "stationCode": 1}]) + wfwx_api.wfwx_client.set_paged_data("dailies", [ + {"recordType": {"id": "ACTUAL"}, "val": 1}, + {"recordType": {"id": "FORECAST"}, "val": 2}, + ]) + # Act + actuals, forecasts = await wfwx_api.get_daily_determinates_for_stations_and_date( + start_time_of_interest=datetime(2025, 1, 1, tzinfo=timezone.utc), + end_time_of_interest=datetime(2025, 1, 2, tzinfo=timezone.utc), + unique_station_codes=[1], + fire_centre_station_codes=[1], + mapper=stub_weather_indeterminate_list_mapper, + check_cache=True, + ) + # Assert + assert [a["val"] for a in actuals] == [1] + assert [f["val"] for f in forecasts] == [2] + +@pytest.mark.anyio +async def test_get_station_groups_maps(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stationGroups", [{"g": 1}, {"g": 2}]) + # Act + out = await wfwx_api.get_station_groups(mapper=stub_weather_station_group_mapper) + # Assert + assert out == ["grpA", "grpB"] + +@pytest.mark.anyio +async def test_get_stations_by_group_ids_accumulates(wfwx_api): + # Arrange + wfwx_api.wfwx_client.set_paged_data("stations", [{"stationCode": 10}, {"stationCode": 20}]) + # Act + out = await wfwx_api.get_stations_by_group_ids(["G1", "G2"], mapper=stub_unique_weather_stations_mapper) + # Assert + assert out == [10, 20, 10, 20] + +@pytest.mark.anyio +async def test_post_forecasts_calls_client_and_session_post(wfwx_api, monkeypatch): + # Arrange + # Ensure auth header is stable + async def fake_get_auth_header(): + return {"Authorization": "Bearer TOKEN123"} + monkeypatch.setattr(wfwx_api, "_get_auth_header", fake_get_auth_header) + + class WF1Stub: + def __init__(self, val): + self.val = val + def model_dump(self): + return {"v": self.val} + + forecasts = [WF1Stub(1), WF1Stub(2)] + wfwx_api.wfwx_client.last_post_forecasts_args = None + wfwx_api.wfwx_client.session.response_status = 200 + # Act + await wfwx_api.post_forecasts(forecasts) + # Assert: client helper called + headers, posted_json = wfwx_api.wfwx_client.last_post_forecasts_args + assert headers == {"Authorization": "Bearer TOKEN123"} + assert posted_json == [{"v": 1}, {"v": 2}] + # Assert: direct session POST called with correct URL & headers + url = f"{wfwx_api.wfwx_settings.base_url}/v1/dailies/daily-bulk" + assert wfwx_api.wfwx_client.session.last_post_args == (url,) + assert wfwx_api.wfwx_client.session.last_post_kwargs["json"] == [{"v": 1}, {"v": 2}] + assert wfwx_api.wfwx_client.session.last_post_kwargs["headers"] == {"Authorization": "Bearer TOKEN123"} diff --git a/backend/packages/wps-wf1/src/wps_wf1/tests/test_wfwx_client.py b/backend/packages/wps-wf1/src/wps_wf1/tests/test_wfwx_client.py new file mode 100644 index 0000000000..bf302b8c64 --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/tests/test_wfwx_client.py @@ -0,0 +1,483 @@ +"""Unit tests for wfwx_client.py""" + +import json +from datetime import datetime +from unittest.mock import AsyncMock, MagicMock +from urllib.parse import urlencode + +import pytest +from wps_wf1.query_builders import BuildQuery +from wps_wf1.wfwx_client import WfwxClient, _cache_key + + +class MockAsyncContextManager: + """Mock async context manager for aiohttp responses""" + + def __init__(self, response_data): + self.response_data = response_data + + async def __aenter__(self): + mock_response = AsyncMock() + mock_response.json.return_value = self.response_data + mock_response.raise_for_status.return_value = None + return mock_response + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + +class TestCacheKey: + """Test cases for the _cache_key function""" + + def test_cache_key_generation(self): + """Test that cache key is generated correctly from URL and params""" + url = "https://example.com/api/data" + params = {"param1": "value1", "param2": "value2"} + + expected_key = f"{url}?{urlencode(params)}" + result = _cache_key(url, params) + + assert result == expected_key + + def test_cache_key_with_empty_params(self): + """Test cache key generation with empty parameters""" + url = "https://example.com/api/data" + params = {} + + expected_key = f"{url}?" + result = _cache_key(url, params) + + assert result == expected_key + + def test_cache_key_with_special_characters(self): + """Test cache key generation with special characters in parameters""" + url = "https://example.com/api/data" + params = {"query": "name=test&value=123", "filter": "active=true"} + + expected_key = f"{url}?{urlencode(params)}" + result = _cache_key(url, params) + + assert result == expected_key + + +class TestWfwxClient: + """Test cases for the WfwxClient class""" + + @pytest.fixture + def wfwx_client(self, mock_session, mock_settings, mock_cache): + """Create a WfwxClient instance with mocked dependencies""" + return WfwxClient(mock_session, mock_settings, mock_cache) + + def test_init(self, mock_session, mock_settings, mock_cache): + """Test WfwxClient initialization""" + client = WfwxClient(mock_session, mock_settings, mock_cache) + + assert client.session == mock_session + assert client.settings == mock_settings + assert client.cache == mock_cache + + def test_init_without_cache(self, mock_session, mock_settings): + """Test WfwxClient initialization without cache""" + client = WfwxClient(mock_session, mock_settings) + + assert client.session == mock_session + assert client.settings == mock_settings + assert client.cache is None + + +class TestWfwxClientGetJson: + """Test cases for the _get_json method""" + + @pytest.fixture + def wfwx_client(self, mock_session, mock_settings, mock_cache): + """Create a WfwxClient instance""" + return WfwxClient(mock_session, mock_settings, mock_cache) + + @pytest.mark.anyio + async def test_get_json_with_cache_hit(self, wfwx_client, mock_cache): + """Test _get_json returns cached data when available""" + url = "https://test.example.com/api/data" + headers = {"Authorization": "Bearer token"} + params = {"key": "value"} + cached_data = {"cached": True} + + # Setup cache to return data + mock_cache.get.return_value = json.dumps(cached_data).encode("utf-8") + + result = await wfwx_client._get_json(url, headers, params) + + assert result == cached_data + # Verify cache was checked but no HTTP request was made + mock_cache.get.assert_called_once() + wfwx_client.session.get.assert_not_called() + + @pytest.mark.anyio + async def test_get_json_with_cache_miss(self, wfwx_client, mock_cache): + """Test _get_json fetches data when not cached""" + url = "https://test.example.com/api/data" + headers = {"Authorization": "Bearer token"} + params = {"key": "value"} + response_data = {"data": "test"} + + # Setup cache to return None (no cached data) + mock_cache.get.return_value = None + + # Setup the session.get to return our mock context manager + wfwx_client.session.get.return_value = MockAsyncContextManager(response_data) + + result = await wfwx_client._get_json(url, headers, params) + + assert result == response_data + mock_cache.get.assert_called_once() + # Verify the data was cached + mock_cache.set.assert_called_once() + + @pytest.mark.anyio + async def test_get_json_without_cache(self, mock_session, mock_settings): + """Test _get_json when no cache is provided""" + client = WfwxClient(mock_session, mock_settings) + url = "https://test.example.com/api/data" + headers = {"Authorization": "Bearer token"} + params = {"key": "value"} + response_data = {"data": "test"} + + # Setup the session.get to return our mock context manager + mock_session.get.return_value = MockAsyncContextManager(response_data) + + result = await client._get_json(url, headers, params) + + assert result == response_data + # Verify HTTP request was made + mock_session.get.assert_called_once_with(url, headers=headers, params=params) + + @pytest.mark.anyio + async def test_get_json_with_use_cache_false(self, wfwx_client, mock_cache): + """Test _get_json respects use_cache=False parameter""" + url = "https://test.example.com/api/data" + headers = {"Authorization": "Bearer token"} + params = {"key": "value"} + response_data = {"data": "test"} + + # Setup the session.get to return our mock context manager + wfwx_client.session.get.return_value = MockAsyncContextManager(response_data) + + result = await wfwx_client._get_json(url, headers, params, use_cache=False) + + assert result == response_data + # Verify cache was not checked + mock_cache.get.assert_not_called() + + @pytest.mark.anyio + async def test_get_json_with_custom_ttl(self, wfwx_client, mock_cache): + """Test _get_json uses custom TTL when provided""" + url = "https://test.example.com/api/data" + headers = {"Authorization": "Bearer token"} + params = {"key": "value"} + response_data = {"data": "test"} + custom_ttl = 3600 + + # Setup cache to return None (no cached data) + mock_cache.get.return_value = None + + # Setup the session.get to return our mock context manager + wfwx_client.session.get.return_value = MockAsyncContextManager(response_data) + + result = await wfwx_client._get_json(url, headers, params, ttl=custom_ttl) + + assert result == response_data + # Verify the data was cached with custom TTL + mock_cache.set.assert_called_once() + call_args = mock_cache.set.call_args + assert call_args[1]["ex"] == custom_ttl + + +class TestWfwxClientFetchAccessToken: + """Test cases for the fetch_access_token method""" + + @pytest.fixture + def wfwx_client(self, mock_session, mock_settings, mock_cache): + """Create a WfwxClient instance""" + return WfwxClient(mock_session, mock_settings, mock_cache) + + @pytest.mark.anyio + async def test_fetch_access_token_with_cache_hit(self, wfwx_client, mock_cache): + """Test fetch_access_token returns cached token when available""" + cached_token = {"access_token": "cached_token", "expires_in": 3600} + + # Setup cache to return data + mock_cache.get.return_value = json.dumps(cached_token).encode("utf-8") + + result = await wfwx_client.fetch_access_token(3600) + + assert result == cached_token + mock_cache.get.assert_called_once() + wfwx_client.session.get.assert_not_called() + + @pytest.mark.anyio + async def test_fetch_access_token_with_cache_miss(self, wfwx_client, mock_cache): + """Test fetch_access_token fetches new token when not cached""" + token_response = {"access_token": "new_token", "expires_in": 7200} + + # Setup cache to return None (no cached data) + mock_cache.get.return_value = None + + # Setup the session.get to return our mock context manager + wfwx_client.session.get.return_value = MockAsyncContextManager(token_response) + + result = await wfwx_client.fetch_access_token(3600) + + assert result == token_response + # Verify the token was cached with min(expires_in, ttl) + mock_cache.set.assert_called_once() + call_args = mock_cache.set.call_args + assert call_args[1]["ex"] == 3600 # min(7200, 3600) + + @pytest.mark.anyio + async def test_fetch_access_token_without_cache(self, mock_session, mock_settings): + """Test fetch_access_token when no cache is provided""" + client = WfwxClient(mock_session, mock_settings) + token_response = {"access_token": "new_token", "expires_in": 3600} + + # Setup the session.get to return our mock context manager + mock_session.get.return_value = MockAsyncContextManager(token_response) + + result = await client.fetch_access_token(3600) + + assert result == token_response + # Verify HTTP request was made with correct auth + mock_session.get.assert_called_once() + call_args = mock_session.get.call_args + assert call_args[1]["auth"].login == "test_user" + assert call_args[1]["auth"].password == "test_secret" + + +class TestWfwxClientFetchPagedResponse: + """Test cases for the fetch_paged_response_generator method""" + + @pytest.fixture + def mock_query_builder(self): + """Mock BuildQuery""" + mock = MagicMock(spec=BuildQuery) + return mock + + @pytest.fixture + def wfwx_client(self, mock_session, mock_settings, mock_cache): + """Create a WfwxClient instance""" + return WfwxClient(mock_session, mock_settings, mock_cache) + + @pytest.mark.anyio + async def test_fetch_paged_response_generator_single_page( + self, wfwx_client, mock_query_builder + ): + """Test fetch_paged_response_generator with single page of results""" + headers = {"Authorization": "Bearer token"} + content_key = "items" + + # Setup query builder to return URL and params + mock_query_builder.query.return_value = ("https://test.example.com/api/items", {"page": 0}) + + # Setup response data + response_data = {"page": {"totalPages": 1}, "_embedded": {"items": [{"id": 1}, {"id": 2}]}} + + # Mock _get_json to return response data + wfwx_client._get_json = AsyncMock(return_value=response_data) + + # Test the generator + results = [] + async for item in wfwx_client.fetch_paged_response_generator( + headers, mock_query_builder, content_key + ): + results.append(item) + + assert len(results) == 2 + assert results[0] == {"id": 1} + assert results[1] == {"id": 2} + + @pytest.mark.anyio + async def test_fetch_paged_response_generator_multiple_pages( + self, wfwx_client, mock_query_builder + ): + """Test fetch_paged_response_generator with multiple pages""" + headers = {"Authorization": "Bearer token"} + content_key = "items" + + # Setup query builder to return different URLs for each page + mock_query_builder.query.side_effect = [ + ("https://test.example.com/api/items", {"page": 0}), + ("https://test.example.com/api/items", {"page": 1}), + ("https://test.example.com/api/items", {"page": 2}), + ] + + # Setup response data for each page + response_data_page_0 = { + "page": {"totalPages": 3}, + "_embedded": {"items": [{"id": 1}, {"id": 2}]}, + } + response_data_page_1 = { + "page": {"totalPages": 3}, + "_embedded": {"items": [{"id": 3}, {"id": 4}]}, + } + response_data_page_2 = {"page": {"totalPages": 3}, "_embedded": {"items": [{"id": 5}]}} + + # Setup the session.get to return different responses for each call + wfwx_client.session.get.side_effect = [ + MockAsyncContextManager(response_data_page_0), + MockAsyncContextManager(response_data_page_1), + MockAsyncContextManager(response_data_page_2), + ] + + # Test the generator + results = [] + async for item in wfwx_client.fetch_paged_response_generator( + headers, mock_query_builder, content_key + ): + results.append(item) + + assert len(results) == 5 + assert results == [{"id": 1}, {"id": 2}, {"id": 3}, {"id": 4}, {"id": 5}] + + +class TestWfwxClientFetchRawDailies: + """Test cases for the fetch_raw_dailies_for_all_stations method""" + + @pytest.fixture + def wfwx_client(self, mock_session, mock_settings): + """Create a WfwxClient instance""" + return WfwxClient(mock_session, mock_settings) + + @pytest.mark.anyio + async def test_fetch_raw_dailies_single_page(self, wfwx_client): + """Test fetch_raw_dailies_for_all_stations with single page""" + headers = {"Authorization": "Bearer token"} + time_of_interest = datetime(2023, 1, 1, 12, 0, 0) + timestamp = int(time_of_interest.timestamp() * 1000) + + # Setup response data + response_data = { + "page": {"totalPages": 1}, + "_embedded": {"dailies": [{"id": 1, "temp": 20}, {"id": 2, "temp": 22}]}, + } + + # Setup the session.get to return our mock context manager + wfwx_client.session.get.return_value = MockAsyncContextManager(response_data) + + result = await wfwx_client.fetch_raw_dailies_for_all_stations(headers, time_of_interest) + + assert len(result) == 2 + assert result[0] == {"id": 1, "temp": 20} + assert result[1] == {"id": 2, "temp": 22} + + # Verify the correct URL and parameters were used + expected_url = f"{wfwx_client.settings.base_url}/v1/dailies/rsql" + expected_params = { + "query": f"weatherTimestamp=={timestamp}", + "page": 0, + "size": wfwx_client.settings.max_page_size, + } + + wfwx_client.session.get.assert_called_once_with( + expected_url, params=expected_params, headers=headers + ) + + @pytest.mark.anyio + async def test_fetch_raw_dailies_multiple_pages(self, wfwx_client): + """Test fetch_raw_dailies_for_all_stations with multiple pages""" + headers = {"Authorization": "Bearer token"} + time_of_interest = datetime(2023, 1, 1, 12, 0, 0) + + # Setup response data for multiple pages + response_data_page_0 = {"page": {"totalPages": 2}, "_embedded": {"dailies": [{"id": 1}]}} + response_data_page_1 = { + "page": {"totalPages": 2}, + "_embedded": {"dailies": [{"id": 2}, {"id": 3}]}, + } + + # Setup the session.get to return different responses for each call + wfwx_client.session.get.side_effect = [ + MockAsyncContextManager(response_data_page_0), + MockAsyncContextManager(response_data_page_1), + ] + + result = await wfwx_client.fetch_raw_dailies_for_all_stations(headers, time_of_interest) + + assert len(result) == 3 + assert result == [{"id": 1}, {"id": 2}, {"id": 3}] + + +class TestWfwxClientFetchHourlies: + """Test cases for hourlies-related methods""" + + @pytest.fixture + def wfwx_client(self, mock_session, mock_settings, mock_cache): + """Create a WfwxClient instance""" + return WfwxClient(mock_session, mock_settings, mock_cache) + + def test_prepare_fetch_hourlies_query(self, wfwx_client): + """Test prepare_fetch_hourlies_query generates correct URL and parameters""" + raw_station = {"id": "station123"} + start_datetime = datetime(2023, 1, 1, 0, 0, 0) + end_datetime = datetime(2023, 1, 1, 23, 59, 59) + + start_ts = int(start_datetime.timestamp() * 1000) + end_ts = int(end_datetime.timestamp() * 1000) + + url, params = wfwx_client.prepare_fetch_hourlies_query( + raw_station, start_datetime, end_datetime + ) + + expected_url = f"{wfwx_client.settings.base_url}/v1/hourlies/search/findHourliesByWeatherTimestampBetweenAndStationIdEqualsOrderByWeatherTimestampAsc" + expected_params = { + "startTimestamp": start_ts, + "endTimestamp": end_ts, + "stationId": "station123", + } + + assert url == expected_url + assert params == expected_params + + @pytest.mark.anyio + async def test_fetch_hourlies(self, wfwx_client): + """Test fetch_hourlies calls _get_json with correct parameters""" + raw_station = {"id": "station123"} + headers = {"Authorization": "Bearer token"} + start_datetime = datetime(2023, 1, 1, 0, 0, 0) + end_datetime = datetime(2023, 1, 1, 23, 59, 59) + use_cache = True + ttl = 3600 + + response_data = {"hourlies": [{"temp": 20}, {"temp": 22}]} + wfwx_client._get_json = AsyncMock(return_value=response_data) + + result = await wfwx_client.fetch_hourlies( + raw_station, headers, start_datetime, end_datetime, use_cache, ttl + ) + + assert result == response_data + wfwx_client._get_json.assert_called_once() + + +class TestWfwxClientFetchStations: + """Test cases for the fetch_stations_by_group_id method""" + + @pytest.fixture + def wfwx_client(self, mock_session, mock_settings): + """Create a WfwxClient instance""" + return WfwxClient(mock_session, mock_settings) + + @pytest.mark.anyio + async def test_fetch_stations_by_group_id(self, wfwx_client): + """Test fetch_stations_by_group_id fetches stations for a group""" + headers = {"Authorization": "Bearer token"} + group_id = "group123" + response_data = {"_embedded": {"stations": [{"id": 1}, {"id": 2}]}} + + # Setup the session.get to return our mock context manager + wfwx_client.session.get.return_value = MockAsyncContextManager(response_data) + + result = await wfwx_client.fetch_stations_by_group_id(headers, group_id) + + assert result == response_data + + # Verify the correct URL was used + expected_url = f"{wfwx_client.settings.base_url}/v1/stationGroups/{group_id}/members" + wfwx_client.session.get.assert_called_once_with(expected_url, headers=headers) diff --git a/backend/packages/wps-shared/src/wps_shared/wildfire_one/util.py b/backend/packages/wps-wf1/src/wps_wf1/util.py similarity index 76% rename from backend/packages/wps-shared/src/wps_shared/wildfire_one/util.py rename to backend/packages/wps-wf1/src/wps_wf1/util.py index 295c4a0a30..f4e3ebeebd 100644 --- a/backend/packages/wps-shared/src/wps_shared/wildfire_one/util.py +++ b/backend/packages/wps-wf1/src/wps_wf1/util.py @@ -1,6 +1,9 @@ """ Utility functions used in several places within the wildfire_one module""" +import math + + def is_station_valid(station) -> bool: """ Run through a set of conditions to check if the station is valid. @@ -42,3 +45,16 @@ def get_zone_code_prefix(fire_centre_id: int): 50: 'V' # Coastal Fire Centre } return fire_centre_to_zone_code_prefix.get(fire_centre_id, None) + + +def compute_dewpoint(temp, relative_humidity): + """ Computes dewpoint based on code from the legacy system. + See: https://chat.developer.gov.bc.ca/channel/wildfire-wfwx?msg=vzjt28hWCP9J5pZtK + """ + if temp is None or relative_humidity is None: + return None + return (temp - (14.55 + 0.114 * temp) * + (1 - (0.01 * relative_humidity)) - + math.pow(((2.5 + 0.007 * temp) * + (1 - (0.01 * relative_humidity))), 3) - (15.9 + 0.117 * temp) * + math.pow((1 - (0.01 * relative_humidity)), 14)) diff --git a/backend/packages/wps-shared/src/wps_shared/wildfire_one/validation.py b/backend/packages/wps-wf1/src/wps_wf1/validation.py similarity index 51% rename from backend/packages/wps-shared/src/wps_shared/wildfire_one/validation.py rename to backend/packages/wps-wf1/src/wps_wf1/validation.py index 8a9c0b9171..1f8ac0e94b 100644 --- a/backend/packages/wps-shared/src/wps_shared/wildfire_one/validation.py +++ b/backend/packages/wps-wf1/src/wps_wf1/validation.py @@ -1,23 +1,26 @@ -""" Validation functions that indicate sound response or clean them to our specific standards""" +"""Validation functions that indicate sound response or clean them to our specific standards""" + import math -from wps_shared.schemas.observations import WeatherReading -from wps_shared.schemas.forecasts import NoonForecast + +from wps_wf1.models import NoonForecast, WeatherReading def get_valid_flags(record: WeatherReading | NoonForecast): - """ Validate fields and return flags indiciating their validity """ + """Validate fields and return flags indiciating their validity""" temp_valid = record.temperature is not None rh_valid = record.relative_humidity is not None and validate_metric( - record.relative_humidity, 0, 100) - wspeed_valid = record.wind_speed is not None and validate_metric( - record.wind_speed, 0, math.inf) + record.relative_humidity, 0, 100 + ) + wspeed_valid = record.wind_speed is not None and validate_metric(record.wind_speed, 0, math.inf) wdir_valid = record.wind_direction is not None and validate_metric( - record.wind_direction, 0, 360) + record.wind_direction, 0, 360 + ) precip_valid = record.precipitation is not None and validate_metric( - record.precipitation, 0, math.inf) + record.precipitation, 0, math.inf + ) return temp_valid, rh_valid, wspeed_valid, wdir_valid, precip_valid def validate_metric(value, low, high): - """ Validate metric with it's range of accepted values """ + """Validate metric with it's range of accepted values""" return low <= value <= high diff --git a/backend/packages/wps-wf1/src/wps_wf1/wfwx_api.py b/backend/packages/wps-wf1/src/wps_wf1/wfwx_api.py new file mode 100644 index 0000000000..0aca73561c --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/wfwx_api.py @@ -0,0 +1,535 @@ +import asyncio +import logging +import math +from datetime import datetime +from typing import AsyncGenerator, Dict, List, Optional, Tuple + +from aiohttp import ClientSession + +from wps_wf1.cache_protocol import CacheProtocol +from wps_wf1.ecodivisions.ecodivision_seasons import EcodivisionSeasons +from wps_wf1.models import ( + DetailedWeatherStationProperties, + FireCentre, + GeoJsonDetailedWeatherStation, + HourlyActual, + NoonForecast, + StationDailyFromWF1, + WeatherStation, + WeatherStationGeometry, + WeatherStationHourlyReadings, + WeatherVariables, + WF1PostForecast, + WFWXWeatherStation, +) +from wps_wf1.parsers import ( + WF1RecordTypeEnum, + dailies_list_mapper, + fire_center_mapper, + parse_hourly, + parse_hourly_actual, + parse_noon_forecast, + parse_station, + station_list_mapper, + unique_weather_stations_mapper, + weather_indeterminate_list_mapper, + weather_station_group_mapper, + wfwx_station_list_mapper, +) +from wps_wf1.query_builders import ( + BuildQuery, + BuildQueryAllForecastsByAfterStart, + BuildQueryAllHourliesByRange, + BuildQueryByStationCode, + BuildQueryDailiesByStationCode, + BuildQueryStationGroups, + BuildQueryStations, +) +from wps_wf1.util import is_station_valid +from wps_wf1.wfwx_client import WfwxClient +from wps_wf1.wfwx_settings import WfwxSettings + +DEFAULT_REDIS_AUTH_CACHE_EXPIRY = 600 +logger = logging.getLogger(__name__) + + +class WfwxApi: + def __init__( + self, + session: ClientSession, + wfwx_settings: WfwxSettings, + cache: Optional[CacheProtocol] = None, + ): + self.cache = cache + self.wfwx_settings = wfwx_settings + self.wfwx_client = WfwxClient(session, wfwx_settings, cache) + + async def _get_auth_header(self) -> dict: + """Get WFWX auth header""" + # Fetch access token + token = await self.wfwx_client.fetch_access_token(self.wfwx_settings.auth_cache_expiry) + # Construct the header. + header = {"Authorization": f"Bearer {token['access_token']}"} + return header + + async def _get_no_cache_auth_header(self) -> dict: + """Get WFWX auth header with explicit no caching""" + # Fetch auth header + header = await self._get_auth_header() + # Add the cache control header + header["Cache-Control"] = "no-cache" + return header + + async def get_stations_by_codes(self, station_codes: List[int]) -> List[WeatherStation]: + """Get a list of stations by code, from WFWX Fireweather API.""" + logger.info("Using WFWX to retrieve stations by code") + with EcodivisionSeasons( + ",".join([str(code) for code in station_codes]), self.cache + ) as eco_division: + header = await self._get_auth_header() + stations = [] + # Iterate through "raw" station data. + iterator = self.wfwx_client.fetch_paged_response_generator( + header, + BuildQueryByStationCode(station_codes), + "stations", + use_cache=True, + ttl=self.wfwx_settings.station_cache_expiry, + ) + async for raw_station in iterator: + # If the station is valid, add it to our list of stations. + if is_station_valid(raw_station): + stations.append(parse_station(raw_station, eco_division)) + logger.debug("total stations: %d", len(stations)) + return stations + + async def get_station_data(self, mapper=station_list_mapper, use_no_cache_header: bool = False): + """Get list of stations from WFWX Fireweather API.""" + logger.info("Using WFWX to retrieve station list") + if use_no_cache_header: + header = await self._get_no_cache_auth_header() + else: + header = await self._get_auth_header() + # Iterate through "raw" station data. + raw_stations = self.wfwx_client.fetch_paged_response_generator( + header, + BuildQueryStations(), + "stations", + use_cache=True, + ttl=self.wfwx_settings.station_cache_expiry, + ) + # Map list of stations into desired shape + stations = await mapper(raw_stations) + logger.debug("total stations: %d", len(stations)) + return stations + + async def get_detailed_geojson_stations( + self, query_builder: BuildQuery + ) -> Tuple[Dict[int, GeoJsonDetailedWeatherStation], Dict[str, int]]: + """Fetch and marshall geojson station data""" + stations = {} + id_to_code_map = {} + headers = await self._get_auth_header() + # Put the stations in a nice dictionary. + async for raw_station in self.wfwx_client.fetch_paged_response_generator( + headers, query_builder, "stations", True, self.wfwx_settings.station_cache_expiry + ): + station_code = raw_station.get("stationCode") + station_status = raw_station.get("stationStatus", {}).get("id") + # Because we can't filter on status in the RSQL, we have to manually exclude stations that are + # not active. + if is_station_valid(raw_station): + id_to_code_map[raw_station.get("id")] = station_code + geojson_station = GeoJsonDetailedWeatherStation( + properties=DetailedWeatherStationProperties( + code=station_code, name=raw_station.get("displayLabel") + ), + geometry=WeatherStationGeometry( + coordinates=[raw_station.get("longitude"), raw_station.get("latitude")] + ), + ) + stations[station_code] = geojson_station + else: + logger.debug("station %s, status %s", station_code, station_status) + + return stations, id_to_code_map + + async def get_detailed_stations(self, time_of_interest: datetime): + """ + We do two things in parallel. + # 1) list of stations + # 2) list of noon values + Once we've collected them all, we merge them into one response + """ + + # Get the authentication header + header = await self._get_auth_header() + # Fetch the daily (noon) values for all the stations + dailies_task = asyncio.create_task( + self.wfwx_client.fetch_raw_dailies_for_all_stations(header, time_of_interest) + ) + # Fetch all the stations + stations_task = asyncio.create_task( + self.get_detailed_geojson_stations(BuildQueryStations()) + ) + + # Await completion of concurrent tasks. + dailies = await dailies_task + stations, id_to_code_map = await stations_task + + # Combine dailies and stations + for daily in dailies: + station_id = daily.get("stationId") + station_code = id_to_code_map.get(station_id, None) + if station_code: + station = stations[station_code] + weather_variable = WeatherVariables( + temperature=daily.get("temperature"), + relative_humidity=daily.get("relativeHumidity"), + ) + record_type = daily.get("recordType").get("id") + if record_type in ["ACTUAL", "MANUAL"]: + station.properties.observations = weather_variable + elif record_type == "FORECAST": + station.properties.forecasts = weather_variable + else: + logger.info("unexpected record type: %s", record_type) + else: + logger.debug("No station found for daily reading (%s)", station_id) + + return list(stations.values()) + + async def get_hourly_for_station( + self, raw_station, start_timestamp, end_timestamp, eco_division, use_cache, ttl + ): + headers = await self._get_auth_header() + hourlies_json = await self.wfwx_client.fetch_hourlies( + raw_station, headers, start_timestamp, end_timestamp, use_cache, ttl + ) + hourlies = [] + for hourly in hourlies_json["_embedded"]["hourlies"]: + # We only accept "ACTUAL" values + if hourly.get("hourlyMeasurementTypeCode", "").get("id") == "ACTUAL": + hourlies.append(parse_hourly(hourly)) + + return WeatherStationHourlyReadings( + values=hourlies, station=parse_station(raw_station, eco_division) + ) + + async def get_hourly_readings( + self, + station_codes: List[int], + start_timestamp: datetime, + end_timestamp: datetime, + use_cache: bool = False, + ) -> List[WeatherStationHourlyReadings]: + """Get the hourly readings for the list of station codes provided.""" + # Create a list containing all the tasks to run in parallel. + tasks = [] + # Iterate through "raw" station data. + headers = await self._get_auth_header() + iterator = self.wfwx_client.fetch_paged_response_generator( + headers, + BuildQueryByStationCode(station_codes), + "stations", + True, + self.wfwx_settings.station_cache_expiry, + ) + raw_stations = [] + eco_division_key = "" + # not ideal - we iterate through the stations twice. 1'st time to get the list of station codes, + # so that we can do an eco division lookup in cache. + station_codes = set() + async for raw_station in iterator: + raw_stations.append(raw_station) + station_codes.add(raw_station.get("stationCode")) + eco_division_key = ",".join(str(code) for code in station_codes) + with EcodivisionSeasons(eco_division_key, self.cache) as eco_division: + for raw_station in raw_stations: + task = asyncio.create_task( + self.get_hourly_for_station( + raw_station, + start_timestamp, + end_timestamp, + eco_division, + use_cache, + self.wfwx_settings.hourlies_by_station_code_expiry, + ) + ) + tasks.append(task) + + # Run the tasks concurrently, waiting for them all to complete. + return await asyncio.gather(*tasks) + + async def get_noon_forecasts_all_stations( + self, start_timestamp: datetime + ) -> List[NoonForecast]: + """Get the noon forecasts for all stations.""" + + noon_forecasts: List[NoonForecast] = [] + headers = await self._get_auth_header() + + # Iterate through "raw" forecast data. + forecasts_iterator = self.wfwx_client.fetch_paged_response_generator( + headers, + BuildQueryAllForecastsByAfterStart(math.floor(start_timestamp.timestamp() * 1000)), + "dailies", + ) + + forecasts = [] + async for noon_forecast in forecasts_iterator: + forecasts.append(noon_forecast) + + stations: List[WFWXWeatherStation] = await self.get_station_data( + mapper=wfwx_station_list_mapper + ) + + station_code_dict = {station.wfwx_id: station.code for station in stations} + + for noon_forecast in forecasts: + try: + station_code = station_code_dict[(noon_forecast["stationId"])] + parsed_noon_forecast = parse_noon_forecast(station_code, noon_forecast) + if parsed_noon_forecast is not None: + noon_forecasts.append(parsed_noon_forecast) + except KeyError as exception: + logger.warning("Missing noon forecast for station code", exc_info=exception) + + return noon_forecasts + + async def get_hourly_actuals_all_stations( + self, start_timestamp: datetime, end_timestamp: datetime + ) -> List[HourlyActual]: + """Get the hourly actuals for all stations.""" + + hourly_actuals: List[HourlyActual] = [] + headers = await self._get_auth_header() + + # Iterate through "raw" hourlies data. + hourlies_iterator = self.wfwx_client.fetch_paged_response_generator( + headers, + BuildQueryAllHourliesByRange( + math.floor(start_timestamp.timestamp() * 1000), + math.floor(end_timestamp.timestamp() * 1000), + ), + "hourlies", + ) + + hourlies = [] + async for hourly in hourlies_iterator: + hourlies.append(hourly) + + stations: List[WFWXWeatherStation] = await self.get_station_data( + mapper=wfwx_station_list_mapper + ) + + station_code_dict = {station.wfwx_id: station.code for station in stations} + + for hourly in hourlies: + if hourly.get("hourlyMeasurementTypeCode", "").get("id") == "ACTUAL": + try: + station_code = station_code_dict[(hourly["stationId"])] + hourly_actual = parse_hourly_actual(station_code, hourly) + if hourly_actual is not None: + hourly_actuals.append(hourly_actual) + except KeyError as exception: + logger.warning("Missing hourly for station code", exc_info=exception) + return hourly_actuals + + async def get_wfwx_stations_from_station_codes( + self, + station_codes: Optional[List[int]], + fire_centre_station_codes: List[int], + use_no_cache_header: bool = False, + ) -> List[WFWXWeatherStation]: + """Return the WFWX station ids from WFWX API given a list of station codes.""" + + # All WFWX stations are requested because WFWX returns a malformed JSON response when too + # many station codes are added as query parameters. + # IMPORTANT - the two calls below, cannot be made from within the lambda, as they will be + # be called multiple times! + wfwx_stations = await self.get_station_data( + mapper=wfwx_station_list_mapper, use_no_cache_header=use_no_cache_header + ) + + # Default to all known WFWX station ids if no station codes are specified + if station_codes is None: + return list(filter(lambda x: (x.code in fire_centre_station_codes), wfwx_stations)) + requested_stations: List[WFWXWeatherStation] = [] + station_code_dict = {station.code: station for station in wfwx_stations} + for station_code in station_codes: + wfwx_station = station_code_dict.get(station_code) + if wfwx_station is not None: + requested_stations.append(wfwx_station) + else: + logger.error("No WFWX station id for station code: %s", station_code) + + return requested_stations + + async def get_raw_dailies_in_range_generator( + self, + wfwx_station_ids: List[str], + start_timestamp: int, + end_timestamp: int, + ) -> AsyncGenerator[dict, None]: + """Get the raw dailies in range for a list of WFWX station ids.""" + headers = await self._get_auth_header() + return self.wfwx_client.fetch_paged_response_generator( + headers, + BuildQueryDailiesByStationCode(start_timestamp, end_timestamp, wfwx_station_ids), + "dailies", + True, + 60, + ) + + async def get_dailies_generator( + self, + wfwx_stations: List[WFWXWeatherStation], + time_of_interest: datetime, + end_time_of_interest: Optional[datetime], + check_cache: bool = True, + use_no_cache_header: bool = False, + ) -> List[dict]: + """Get the daily actuals/forecasts for the given station ids.""" + # build a list of wfwx station id's + wfwx_station_ids = [wfwx_station.wfwx_id for wfwx_station in wfwx_stations] + + timestamp_of_interest = math.floor(time_of_interest.timestamp() * 1000) + if end_time_of_interest is not None: + end_timestamp_of_interest = math.floor(end_time_of_interest.timestamp() * 1000) + else: + end_timestamp_of_interest = timestamp_of_interest + + # for local dev, we can use redis to reduce load in prod, and generally just makes development faster. + # for production, it's more tricky - we don't want to put too much load on the wf1 api, but we don't + # want stale values either. We default to 5 minutes, or 300 seconds. + use_cache = check_cache is True and self.wfwx_settings.use_cache + logger.info(f"Using cache: {use_cache}") + + if use_no_cache_header: + headers = await self._get_no_cache_auth_header() + else: + headers = await self._get_auth_header() + + dailies_iterator = self.wfwx_client.fetch_paged_response_generator( + headers, + BuildQueryDailiesByStationCode( + timestamp_of_interest, end_timestamp_of_interest, wfwx_station_ids + ), + "dailies", + use_cache=use_cache, + ttl=self.wfwx_settings.dailies_by_station_code_expiry, + ) + + return dailies_iterator + + async def get_fire_centers(self) -> List[FireCentre]: + """Get the fire centers from WFWX.""" + wfwx_fire_centers = await self.get_station_data(mapper=fire_center_mapper) + return list(wfwx_fire_centers.values()) + + async def get_dailies_for_stations_and_date( + self, + start_time_of_interest: datetime, + end_time_of_interest: datetime, + unique_station_codes: List[int], + fire_centre_station_codes: List[int], + mapper=dailies_list_mapper, + ): + # get station information from the wfwx api + wfwx_stations = await self.get_wfwx_stations_from_station_codes( + unique_station_codes, fire_centre_station_codes + ) + # get the dailies for all the stations + raw_dailies = await self.get_dailies_generator( + wfwx_stations, start_time_of_interest, end_time_of_interest + ) + + yesterday_dailies = await mapper(raw_dailies, WF1RecordTypeEnum.ACTUAL) + + return yesterday_dailies + + async def get_forecasts_for_stations_by_date_range( + self, + start_time_of_interest: datetime, + end_time_of_interest: datetime, + unique_station_codes: List[int], + fire_centre_station_codes: List[int], + check_cache=True, + mapper=dailies_list_mapper, + use_no_cache_header: bool = False, + ) -> List[StationDailyFromWF1]: + # get station information from the wfwx api + wfwx_stations = await self.get_wfwx_stations_from_station_codes( + unique_station_codes, fire_centre_station_codes, use_no_cache_header=use_no_cache_header + ) + # get the daily forecasts for all the stations in the date range + raw_dailies = await self.get_dailies_generator( + wfwx_stations=wfwx_stations, + time_of_interest=start_time_of_interest, + end_time_of_interest=end_time_of_interest, + check_cache=check_cache, + use_no_cache_header=use_no_cache_header, + ) + + forecast_dailies = await mapper(raw_dailies, WF1RecordTypeEnum.FORECAST) + + return forecast_dailies + + async def get_daily_determinates_for_stations_and_date( + self, + start_time_of_interest: datetime, + end_time_of_interest: datetime, + unique_station_codes: List[int], + fire_centre_station_codes: List[int], + mapper=weather_indeterminate_list_mapper, + check_cache: bool = True, + ): + # get station information from the wfwx api + wfwx_stations = await self.get_wfwx_stations_from_station_codes( + unique_station_codes, fire_centre_station_codes + ) + # get the dailies for all the stations + raw_dailies = await self.get_dailies_generator( + wfwx_stations, start_time_of_interest, end_time_of_interest, check_cache + ) + + weather_determinates_actuals, weather_determinates_forecasts = await mapper(raw_dailies) + + return weather_determinates_actuals, weather_determinates_forecasts + + async def get_station_groups(self, mapper=weather_station_group_mapper): + """Get the station groups created by all users from Wild Fire One internal API.""" + header = await self._get_auth_header() + all_station_groups = self.wfwx_client.fetch_paged_response_generator( + header, BuildQueryStationGroups(), "stationGroups" + ) + # Map list of stations into desired shape + mapped_station_groups = await mapper(all_station_groups) + logger.debug("total station groups: %d", len(mapped_station_groups)) + return mapped_station_groups + + async def get_stations_by_group_ids( + self, group_ids: List[str], mapper=unique_weather_stations_mapper + ): + """Get all the stations in the specified group from the Wild Fire One internal API.""" + stations_in_groups = [] + headers = await self._get_auth_header() + for group_id in group_ids: + stations = await self.wfwx_client.fetch_stations_by_group_id(headers, group_id) + stations_in_group = mapper(stations) + stations_in_groups.extend(stations_in_group) + return stations_in_groups + + async def post_forecasts(self, forecasts: List[WF1PostForecast]): + logger.info("Using WFWX to post/put forecasts") + wfwx_forecast_post_url = f"{self.wfwx_settings.base_url}/v1/dailies/daily-bulk" + forecasts_json = [forecast.model_dump() for forecast in forecasts] + headers = await self._get_auth_header() + await self.wfwx_client.post_forecasts(headers, forecasts_json) + + async with self.wfwx_client.session.post( + wfwx_forecast_post_url, json=forecasts_json, headers=headers + ) as response: + response.raise_for_status() + logger.info("submitted forecasts to wf1..") diff --git a/backend/packages/wps-wf1/src/wps_wf1/wfwx_client.py b/backend/packages/wps-wf1/src/wps_wf1/wfwx_client.py new file mode 100644 index 0000000000..8749c476df --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/wfwx_client.py @@ -0,0 +1,160 @@ +import json +import logging +from datetime import datetime +from typing import Any, AsyncGenerator, Dict, Optional +from urllib.parse import urlencode + +from aiohttp import BasicAuth, ClientSession + +from wps_wf1.cache_protocol import CacheProtocol +from wps_wf1.query_builders import BuildQuery +from wps_wf1.wfwx_settings import WfwxSettings + +logger = logging.getLogger(__name__) + +DEFAULT_TTL = 86400 + + +def _cache_key(url: str, params: Dict[str, Any]) -> str: + """ + Generate a key to use for caching from the provided url and parameter dictionary + + :param url: The URL + :param params: The key-value pairs to include in the cache key. + :return: A string representing the derived cache key. + """ + return f"{url}?{urlencode(params)}" + + +class WfwxClient: + def __init__( + self, session: ClientSession, settings: WfwxSettings, cache: Optional[CacheProtocol] = None + ): + self.session = session + self.settings = settings + self.cache = cache + + async def _get_json( + self, + url: str, + headers: Dict[str, Any], + params: Dict[str, Any], + use_cache: bool = True, + ttl: int = DEFAULT_TTL, + ) -> Dict[str, Any]: + key = _cache_key(url, params) + if use_cache and self.cache: + cached = self.cache.get(key) + if cached: + return json.loads(cached.decode("utf-8")) + + async with self.session.get(url, headers=headers, params=params) as resp: + resp.raise_for_status() + data = await resp.json() + + if use_cache and self.cache: + self.cache.set(key, json.dumps(data).encode("utf-8"), ex=ttl) + + return data + + async def fetch_access_token(self, ttl: int) -> Dict[str, Any]: + url = self.settings.auth_url + params = {"user": self.settings.user} + key = _cache_key(url, params) + + if self.cache: + cached = self.cache.get(key) + if cached: + return json.loads(cached.decode("utf-8")) + + async with self.session.get( + url, auth=BasicAuth(self.settings.user, self.settings.secret) + ) as resp: + resp.raise_for_status() + data = await resp.json() + + expires = min(data.get("expires_in", ttl), ttl) + if self.cache: + self.cache.set(key, json.dumps(data).encode("utf-8"), ex=expires) + + return data + + async def fetch_paged_response_generator( + self, + headers: Dict[str, Any], + query_builder: BuildQuery, + content_key: str, + use_cache: bool = False, + ttl: int = DEFAULT_TTL, + ) -> AsyncGenerator[Dict[str, Any], None]: + total_pages = 1 + page_count = 0 + while page_count < total_pages: + # Build up the request URL. + url, params = query_builder.query(page_count) + logger.debug("loading page %d...", page_count) + data = await self._get_json(url, headers, params, use_cache, ttl) + total_pages = data.get("page", {}).get("totalPages", 1) + for obj in data["_embedded"][content_key]: + yield obj + page_count += 1 + + async def fetch_raw_dailies_for_all_stations( + self, headers: Dict[str, Any], time_of_interest: datetime + ) -> list: + timestamp = int(time_of_interest.timestamp() * 1000) + params = { + "query": f"weatherTimestamp=={timestamp}", + "page": 0, + "size": self.settings.max_page_size, + } + url = f"{self.settings.base_url}/v1/dailies/rsql" + + total_pages = 1 + page_count = 0 + results = [] + while page_count < total_pages: + p = {**params, "page": page_count} + async with self.session.get(url, params=p, headers=headers) as resp: + resp.raise_for_status() + data = await resp.json() + total_pages = data["page"]["totalPages"] + results.extend(data["_embedded"]["dailies"]) + page_count += 1 + return results + + def prepare_fetch_hourlies_query( + self, raw_station: dict, start_datetime: datetime, end_datetime: datetime + ): + start_ts = int(start_datetime.timestamp() * 1000) + end_ts = int(end_datetime.timestamp() * 1000) + params = { + "startTimestamp": start_ts, + "endTimestamp": end_ts, + "stationId": raw_station["id"], + } + url = f"{self.settings.base_url}/v1/hourlies/search/findHourliesByWeatherTimestampBetweenAndStationIdEqualsOrderByWeatherTimestampAsc" + return url, params + + async def fetch_hourlies( + self, + raw_station: dict, + headers: Dict[str, Any], + start_datetime: datetime, + end_datetime: datetime, + use_cache: bool, + ttl: int, + ) -> dict: + url, params = self.prepare_fetch_hourlies_query(raw_station, start_datetime, end_datetime) + return await self._get_json(url, headers, params, use_cache, ttl) + + async def fetch_stations_by_group_id(self, headers: Dict[str, Any], group_id: str) -> dict: + url = f"{self.settings.base_url}/v1/stationGroups/{group_id}/members" + async with self.session.get(url, headers=headers) as resp: + resp.raise_for_status() + return await resp.json() + + async def post_forecasts(self, headers, forecasts_json): + url = f"{self.settings.base_url}/v1/dailies/daily-bulk" + async with self.session.post(url, json=forecasts_json, headers=headers) as response: + response.raise_for_status() diff --git a/backend/packages/wps-wf1/src/wps_wf1/wfwx_settings.py b/backend/packages/wps-wf1/src/wps_wf1/wfwx_settings.py new file mode 100644 index 0000000000..dfa0787d54 --- /dev/null +++ b/backend/packages/wps-wf1/src/wps_wf1/wfwx_settings.py @@ -0,0 +1,16 @@ +from dataclasses import dataclass +from typing import Optional + + +@dataclass(frozen=True) +class WfwxSettings: + base_url: str + auth_url: str + user: str + secret: str + max_page_size: Optional[int] = 1000 + auth_cache_expiry: Optional[int] = 600 + station_cache_expiry: Optional[int] = 604800 + hourlies_by_station_code_expiry: Optional[int] = 300 + dailies_by_station_code_expiry: Optional[int] = 300 + use_cache: Optional[bool] = True # flag to optionally disable caching diff --git a/backend/pytest.ini b/backend/pytest.ini index a8d45f2bc7..1ffc72d217 100644 --- a/backend/pytest.ini +++ b/backend/pytest.ini @@ -3,6 +3,7 @@ testpaths = packages/wps-api/src packages/wps-jobs/src packages/wps-shared/src + packages/wps-wf1/src packages/wps-tools/tests python_files = test_*.py *_test.py python_classes = Test* @@ -13,3 +14,4 @@ pythonpath = packages/wps-jobs/src packages/wps-shared/src packages/wps-tools/src + packages/wps-wf1/src diff --git a/backend/uv.lock b/backend/uv.lock index 8e45f79033..2415ad896a 100644 --- a/backend/uv.lock +++ b/backend/uv.lock @@ -13,6 +13,7 @@ members = [ "wps-shared", "wps-tools", "wps-weather", + "wps-wf1", ] [manifest.dependency-groups] @@ -4273,6 +4274,7 @@ dependencies = [ { name = "sqlalchemy" }, { name = "uvicorn" }, { name = "wps-shared" }, + { name = "wps-wf1" }, ] [package.optional-dependencies] @@ -4337,6 +4339,7 @@ requires-dist = [ { name = "sqlalchemy", specifier = ">=2,<3" }, { name = "uvicorn", specifier = ">=0,<1" }, { name = "wps-shared", editable = "packages/wps-shared" }, + { name = "wps-wf1", editable = "packages/wps-wf1" }, ] provides-extras = ["dev"] @@ -4356,6 +4359,7 @@ dependencies = [ { name = "scipy" }, { name = "sqlalchemy" }, { name = "wps-shared" }, + { name = "wps-wf1" }, { name = "xarray" }, ] @@ -4378,6 +4382,7 @@ requires-dist = [ { name = "scipy", specifier = ">=1.15.2,<2" }, { name = "sqlalchemy", specifier = ">=2.0.38,<3" }, { name = "wps-shared", editable = "packages/wps-shared" }, + { name = "wps-wf1", editable = "packages/wps-wf1" }, { name = "xarray", specifier = ">=2025.3.1,<2026" }, ] provides-extras = ["dev"] @@ -4404,6 +4409,7 @@ dependencies = [ { name = "sentry-sdk" }, { name = "shapely" }, { name = "sqlalchemy" }, + { name = "wps-wf1" }, ] [package.optional-dependencies] @@ -4431,6 +4437,7 @@ requires-dist = [ { name = "shapely", specifier = ">=2.0.5,<3" }, { name = "sqlalchemy", specifier = ">=2,<3" }, { name = "testcontainers", extras = ["postgres"], marker = "extra == 'dev'", specifier = ">=4.10.0,<5" }, + { name = "wps-wf1", editable = "packages/wps-wf1" }, ] provides-extras = ["dev"] @@ -4490,6 +4497,17 @@ requires-dist = [ { name = "xarray", specifier = ">=2025.3.1,<2026" }, ] +[[package]] +name = "wps-wf1" +version = "0.1.0" +source = { editable = "packages/wps-wf1" } +dependencies = [ + { name = "aiohttp" }, +] + +[package.metadata] +requires-dist = [{ name = "aiohttp", specifier = ">=3.13.2" }] + [[package]] name = "wrapt" version = "1.17.3"