diff --git a/.gitignore b/.gitignore index b6e4761..475c2b1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# Mac +.DS_Store + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -127,3 +130,12 @@ dmypy.json # Pyre type checker .pyre/ + +# Downloads +*.grib +*.grib2 + +# Outputs +*.vtk +*.bin +*.png diff --git a/README.md b/README.md index 9425d50..c349e9e 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,24 @@ -# ERFtools -A collection of scripts for facilitating the usage of ERF. +# ERF Tools +A collection of Python-based modules and scripts for facilitating the usage of ERF. + +## Installation + +Clone and create a conda (or mamba) environment: +```shell +git clone https://github.com/erf-model/erftools.git +cd erftools +conda create -n erftools python=3.9 +conda activate erftools +``` + +Then, install with: +```shell +pip install -e . # editable install +``` +or install with all optional dependencies: +```shell +pip install -e .[all] +``` ## Examples @@ -31,6 +50,8 @@ print(log.ds) # data are stored in an xarray dataset Some notes and recommendations: -* An aspirational goal is to contribute code that can be used as in the examples above, with clear, intuitive naming and following PEP-8 style as a set of guidelines rather than gospel. -* To avoid duplication, model constants are defined in `erf.constants`, which should replicate `ERF/Source/ERF_Constants.H`. -* In the same vein, equation of state evaluations are defined in `erf.EOS`, which should replicate `ERF/Source/Utils/ERF_EOS.H`. +* An aspirational goal is to contribute code that can be used as in the examples above, with clear, intuitive naming. +* To avoid duplication, model constants are defined in `erftools.constants`, which should replicate `ERF/Source/ERF_Constants.H`. +* In the same vein, equation of state evaluations are defined in `erftools.utils.EOS`, which should replicate `ERF/Source/Utils/ERF_EOS.H`. +* Other utilities for calculating/deriving/diagnosing quantities of interest are also in `erftools.utils.*` +* Please follow PEP-8 style--as a set of guidelines rather than gospel--to facilitate code usage and maintenance by the community. diff --git a/erftools/HSE.py b/erftools/HSE.py index f7680d4..1537915 100644 --- a/erftools/HSE.py +++ b/erftools/HSE.py @@ -1,7 +1,7 @@ import numpy as np from .constants import R_d, Cp_d, CONST_GRAV -from .EOS import getRhogivenThetaPress +from .utils.EOS import getRhogivenThetaPress def Newton_Raphson_hse(p,F,dz,C,T,qv, qt=None, diff --git a/notebooks/era5/StateBordersCoordinates.txt b/erftools/data/US_state_borders_coordinates.txt similarity index 100% rename from notebooks/era5/StateBordersCoordinates.txt rename to erftools/data/US_state_borders_coordinates.txt diff --git a/erftools/data/__init__.py b/erftools/data/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/erftools/data/typical_atmosphere/__init__.py b/erftools/data/typical_atmosphere/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/notebooks/era5/TypicalAtmosphereData/pressure_vs_z_actual.txt b/erftools/data/typical_atmosphere/pressure_vs_z_actual.txt similarity index 100% rename from notebooks/era5/TypicalAtmosphereData/pressure_vs_z_actual.txt rename to erftools/data/typical_atmosphere/pressure_vs_z_actual.txt diff --git a/notebooks/era5/TypicalAtmosphereData/temp_vs_z_actual.txt b/erftools/data/typical_atmosphere/temp_vs_z_actual.txt similarity index 100% rename from notebooks/era5/TypicalAtmosphereData/temp_vs_z_actual.txt rename to erftools/data/typical_atmosphere/temp_vs_z_actual.txt diff --git a/notebooks/era5/TypicalAtmosphereData/theta_vs_z_actual.txt b/erftools/data/typical_atmosphere/theta_vs_z_actual.txt similarity index 100% rename from notebooks/era5/TypicalAtmosphereData/theta_vs_z_actual.txt rename to erftools/data/typical_atmosphere/theta_vs_z_actual.txt diff --git a/erftools/input_sounding.py b/erftools/input_sounding.py index f33f64e..2d60fbe 100644 --- a/erftools/input_sounding.py +++ b/erftools/input_sounding.py @@ -4,7 +4,7 @@ from .constants import R_d, Cp_d, Gamma, CONST_GRAV, p_0 from .wrf.constants import rvovrd, cvpm -from .EOS import getRhogivenThetaPress +from .utils.EOS import getRhogivenThetaPress from .HSE import Newton_Raphson_hse diff --git a/erftools/io/__init__.py b/erftools/io/__init__.py new file mode 100644 index 0000000..f5c8df8 --- /dev/null +++ b/erftools/io/__init__.py @@ -0,0 +1,4 @@ +from .simple import write_binary_simple_erf +from .vtk import (write_binary_vtk_on_native_grid, + write_binary_vtk_on_cartesian_grid, + write_vtk_map) diff --git a/erftools/io/simple.py b/erftools/io/simple.py new file mode 100644 index 0000000..0399ad0 --- /dev/null +++ b/erftools/io/simple.py @@ -0,0 +1,49 @@ +import numpy as np +import struct + +def write_binary_simple_erf(output_binary, + x_grid, y_grid, z_grid, + lat_erf=None, lon_erf=None, + point_data=None): + + x_grid = np.asarray(x_grid) + y_grid = np.asarray(y_grid) + + nrow, ncol = x_grid.shape + nz = len(z_grid[0,0,:]) + + # TODO: rewrite without nested loops for efficiency? + with open(output_binary, "wb") as file: + file.write(struct.pack('iiii', ncol, nrow, nz, len(point_data))) + + if lat_erf is not None: + for j in range(nrow): # Iterate over the y-dimension + for i in range(ncol): # Iterate over the x-dimension + file.write(struct.pack('f', lat_erf[i,j,0])) + + if lon_erf is not None: + for j in range(nrow): # Iterate over the y-dimension + for i in range(ncol): # Iterate over the x-dimension + file.write(struct.pack('f', lon_erf[i,j,0])) + + # Write grid points using a nested for loop + for i in range(ncol): + x = x_grid[0, i] + file.write(struct.pack('f', x)) + + for j in range(nrow): + y = y_grid[j, 0] + file.write(struct.pack('f', y)) + + for k in range(nz): + zavg = np.mean(z_grid[:,:,k]) + file.write(struct.pack('f', zavg)) + + # Write point data (if any) + if point_data: + for name, data in point_data.items(): + for k in range(nz): # Iterate over the z-dimension + for j in range(nrow): # Iterate over the y-dimension + for i in range(ncol): # Iterate over the x-dimension + value = data[i, j, k] + file.write(struct.pack('f', value)) diff --git a/erftools/io/vtk.py b/erftools/io/vtk.py new file mode 100644 index 0000000..c126ee0 --- /dev/null +++ b/erftools/io/vtk.py @@ -0,0 +1,229 @@ +import numpy as np +import struct + + +def write_binary_structured_vtk(filename, + x_grid, y_grid, z_grid, + point_data=None, + velocity=None, + indexing='xy', + k_to_delete=[], + mirrored_x=False, + top_down=False, + zoffset=0.0, + skip_latlon=False): + """ + General function to write a binary VTK file on a structured grid. + + Arguments + --------- + filename : str + Name of the VTK file to write. + x_grid, y_grid, z_grid : numpy.ndarray + 2D, 2D, and 3D arrays of grid coordinates, respectively. + point_data : dict, optional + Dictionary of output scalar data associated with the grid points. + velocity : numpy.ndarray, optional + 4D array (nx,ny,nz,3) of output velocity data. + indexing : {'xy', 'ij'}, optional + Input grid indexing, following numpy naming convention + k_to_delete : list, optional + Indices of vertical levels to exclude + mirrored_x : bool, optional + Output has the first index reversed. + top_down : bool, optional + Output has the last index reversed. + zoffset : float, optional + Offset to apply to output grid in z direction. + skip_latlon : bool, optional + If point_data are given, skip writing 'latitude' and + 'longitude' fields. + """ + assert indexing in ('xy','ij') # Cartesian or matrix (row,col) + + x_grid = np.asarray(x_grid) + y_grid = np.asarray(y_grid) + + # Get grid extents + if indexing == 'xy': + nx, ny = x_grid.shape + elif indexing == 'ij': + ny, nx = x_grid.shape + nz = z_grid.shape[2] + print('nx, ny, nz =', nx, ny, nz) + + nzval = nz - len(k_to_delete) + + # Setup loop indices + irange = range(nx-1,-1,-1) if mirrored_x else range(nx) + krange = range(nz-1,-1,-1) if top_down else range(nz) + + with open(filename, 'wb') as f: + # Write the VTK header + f.write(b'# vtk DataFile Version 3.0\n') + f.write(b'Generated by Python script\n') + f.write(b'BINARY\n') + f.write(b'DATASET STRUCTURED_GRID\n') + f.write(f'DIMENSIONS {nx} {ny} {nzval}\n'.encode()) + f.write(f'POINTS {nx * ny * nzval} float\n'.encode()) + + # TODO: rewrite without nested loops for efficiency? + + # Write grid points + #points = np.stack((x_grid.ravel(), y_grid.ravel(), z_grid.ravel()), axis=-1) + #f.write(struct.pack('>' + 'f' * points.size, *points.ravel())) + + # Write grid points using a nested for loop + for k in krange: # loop may be reversed + if k in k_to_delete: + print("Val is ", k) + continue + + z = np.mean(z_grid[:, :, k]) + zoffset + + if indexing == 'xy': + for j in range(ny): + for i in range(nx): + x = x_grid[i, j] + y = y_grid[i, j] + f.write(struct.pack('>fff', x, y, z)) + elif indexing == 'ij': + for j in range(ny): + for i in range(nx): + x = x_grid[j, i] # indices flipped + y = y_grid[j, i] + f.write(struct.pack('>fff', x, y, z)) + + # Write point data (if any) + if point_data is not None: + f.write(f'POINT_DATA {nx * ny * nzval}\n'.encode()) + for name, data in point_data.items(): + if skip_latlon and (name == "latitude" or name=="longitude"): + continue + f.write(f'SCALARS {name} float 1\n'.encode()) + f.write(b'LOOKUP_TABLE default\n') + for k in krange: # Iterate over the z-dimension (may be reversed) + if k in k_to_delete: + continue + for j in range(ny): # Iterate over the y-dimension + for i in irange: # Iterate over the x-dimension (may be reversed) + value = data[i, j, k] + f.write(struct.pack('>f', value)) + + # Write velocity vector field + if velocity is not None: + f.write("VECTORS velocity float\n".encode()) + for k in krange: # may be reversed + if k in k_to_delete: + continue + for j in range(ny): + for i in irange: # may be reversed + vx, vy, vz = velocity[i, j, k] + f.write(struct.pack('>fff', vx, vy, vz)) + + +# formerly `write_binary_vtk_structured_grid` +def write_binary_vtk_on_native_grid(filename, + x_grid, y_grid, z_grid, + k_to_delete=[], + point_data=None, + velocity=None, + **kwargs): + if point_data is not None: + # don't write out lat,lon + point_data = { + k:v for k,v in point_data.items() + if k not in ['latitude','longitude'] + } + return write_binary_structured_vtk( + filename, + x_grid, y_grid, z_grid, + point_data=point_data, + velocity=velocity, + indexing='xy', + k_to_delete=k_to_delete, + mirrored_x=True, + top_down=True, + **kwargs) + +# formerly `write_binary_vtk_cartesian_file` +def write_binary_vtk_on_cartesian_grid(filename, + x_grid, y_grid, z_grid, + point_data=None, + **kwargs): + return write_binary_structured_vtk( + filename, + x_grid, y_grid, z_grid, + point_data=point_data, + indexing='ij', + mirrored_x=False, + top_down=False, + **kwargs) + + +def write_vtk_map(x, y, ids, filename, zlo=1e-12, + point_data=None): + """ + Write an ASCII polydata VTK file containing borders for specified + regions. + + Parameters: + x (list or ndarray): x coordinates + y (list or ndarray): y coordinates + ids (list or ndarray): state/country/region indices + corresponding to each (x, y) + filename (str): Name of the output VTK file. + zlo (float): constant elevation value + point_data (dict): scalar data associated with points, optional + """ + x = np.asarray(x) + y = np.asarray(y) + ids = np.asarray(ids) + + if len(x) != len(y) or len(x) != len(ids): + raise ValueError("The length of x, y, and ids must be the same.") + + # Open VTK file for writing + with open(filename, 'w') as vtk_file: + # Write VTK header + vtk_file.write("# vtk DataFile Version 3.0\n") + vtk_file.write("State borders\n") + vtk_file.write("ASCII\n") + vtk_file.write("DATASET POLYDATA\n") + + # Group points by region + unique_ids = np.unique(ids) + points = [] # List of all points + lines = [] # List of all lines + + # Process each region + for region in unique_ids: + region_indices = np.where(ids == region)[0] + region_points = [(x[i], y[i]) for i in region_indices] + start_idx = len(points) # Starting index for this region's points + points.extend(region_points) + + # Create line segments connecting adjacent points + for i in range(len(region_points) - 1): + lines.append((start_idx + i, start_idx + i + 1)) + + # Write points + vtk_file.write(f"POINTS {len(points)} float\n") + for px, py in points: + vtk_file.write(f"{px} {py} {zlo}\n") + + # Write lines + vtk_file.write(f"LINES {len(lines)} {3 * len(lines)}\n") + for p1, p2 in lines: + vtk_file.write(f"2 {p1} {p2}\n") + + # Write point data (optional) + if point_data is not None: + vtk_file.write(f"POINT_DATA {len(points)}\n") + + for name, values in point_data.items(): + assert len(values) == len(points) + vtk_file.write(f"SCALARS {name} double\n") + vtk_file.write("LOOKUP_TABLE default\n") + valstr = '\n'.join([f"{val:.14g}" for val in values]) + vtk_file.write(valstr+'\n') diff --git a/erftools/preprocessing/__init__.py b/erftools/preprocessing/__init__.py index 2e7de1c..0b7a5ad 100644 --- a/erftools/preprocessing/__init__.py +++ b/erftools/preprocessing/__init__.py @@ -1,33 +1,26 @@ from .wrf_inputs import WRFInputDeck from .grids import LambertConformalGrid -# ERA5 related funrcions -from .era5.IO import calculate_utm_zone -from .era5.IO import write_binary_simple_ERF -from .era5.IO import write_binary_vtk_structured_grid -from .era5.IO import write_binary_vtk_cartesian_file -from .era5.IO import find_latlon_indices -from .era5.IO import find_erf_domain_extents -from .era5.IO import write_binary_vtk_cartesian -from .era5.Plot_1D import plot_1d -from .era5.ReadERA5DataAndWriteERF_IC import ReadERA5_3DData -from .era5.ReadERA5DataAndWriteERF_SurfBC import Download_ERA5_SurfaceData -from .era5.ReadERA5DataAndWriteERF_SurfBC import Download_ERA5_ForecastSurfaceData -from .era5.ReadERA5DataAndWriteERF_SurfBC import ReadERA5_SurfaceData -from .era5.Download_ERA5Data import Download_ERA5_Data -from .era5.Download_ERA5Data import Download_ERA5_ForecastData +from .plotting import plot_1d + +try: + import cdsapi +except ModuleNotFoundError: + print("Note: Need to install package 'cdsapi' to work with reanalysis data") +else: + # ERA5 related funrcions + from .era5.IO import write_binary_vtk_cartesian + from .era5.ReadERA5DataAndWriteERF_IC import ReadERA5_3DData + from .era5.ReadERA5DataAndWriteERF_SurfBC import Download_ERA5_SurfaceData + from .era5.ReadERA5DataAndWriteERF_SurfBC import Download_ERA5_ForecastSurfaceData + from .era5.ReadERA5DataAndWriteERF_SurfBC import ReadERA5_SurfaceData + from .era5.Download_ERA5Data import Download_ERA5_Data + from .era5.Download_ERA5Data import Download_ERA5_ForecastData # GFS related funrcions from .gfs.Download_GFSData import Download_GFS_Data from .gfs.Download_GFSData import Download_GFS_ForecastData -from .gfs.IO import calculate_utm_zone -from .gfs.IO import write_binary_simple_ERF -from .gfs.IO import write_binary_vtk_structured_grid -from .gfs.IO import write_binary_vtk_cartesian_file -from .gfs.IO import find_latlon_indices -from .gfs.IO import find_erf_domain_extents from .gfs.IO import write_binary_vtk_cartesian -from .gfs.Plot_1D import plot_1d from .gfs.ReadGFSDataAndWriteERF_IC import ReadGFS_3DData from .gfs.ReadGFSDataAndWriteERF_IC_FourCastNetGFS import ReadGFS_3DData_FourCastNetGFS from .gfs.ReadGFSDataAndWriteERF_IC_OnlyUVW import ReadGFS_3DData_UVW @@ -35,6 +28,6 @@ try: from herbie import Herbie except ModuleNotFoundError: - print('Note: Need to install herbie to work with HRRR data') + print("Note: Need to install package 'herbie-data' to work with HRRR data") else: from .hrrr import NativeHRRR, hrrr_projection diff --git a/erftools/preprocessing/era5/Download_ERA5Data.py b/erftools/preprocessing/era5/Download_ERA5Data.py index 8103bb8..c084725 100644 --- a/erftools/preprocessing/era5/Download_ERA5Data.py +++ b/erftools/preprocessing/era5/Download_ERA5Data.py @@ -75,7 +75,7 @@ def Download_ERA5_ForecastData(inputs_file, forecast_time, interval): timestamps = generate_timestamps(start_time, forecast_time, interval) - # MPI setup + # MPI setup comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() @@ -157,10 +157,17 @@ def Download_ERA5_Data(inputs): assert area[0] > area[2], "Latitude order invalid: North (1st) must be greater than South (3rd)" assert area[3] > area[1], "Longitude order invalid: East (4th) must be greater than West (2nd)" + # Instantiate Copernicus client + client = cdsapi.Client() + result = client.retrieve(dataset, request) + filename = result.location.split('/')[-1] + if os.path.isfile(filename): + print(f"File already downloaded: {filename}") + else: + print(f"Downloading {filename}") + downloaded = result.download() + print(f"Downloaded file: {downloaded}") + assert downloaded == filename - # Download data - client = cdsapi.Client() - filename = client.retrieve(dataset, request).download() - print(f"Downloaded file: {filename}") return filename, area diff --git a/erftools/preprocessing/era5/IO.py b/erftools/preprocessing/era5/IO.py index b745a09..061b6b0 100644 --- a/erftools/preprocessing/era5/IO.py +++ b/erftools/preprocessing/era5/IO.py @@ -2,292 +2,25 @@ import struct import os from pyproj import Proj, Transformer, CRS -from math import * - -def calculate_utm_zone(longitude): - """ - Calculate the UTM zone for a given longitude. - """ - return int((longitude + 180) // 6) + 1 - -def write_binary_simple_ERF(output_binary, lat_erf, lon_erf, x_grid, y_grid, z_grid, point_data): - - x_grid = np.asarray(x_grid) - y_grid = np.asarray(y_grid) - # Ensure grids are consistent - nrow, ncol = x_grid.shape - nz = len(z_grid[0,0,:]) - - #print(nrow, ncol) - #print(x_grid[0,:]) - - with open(output_binary, "wb") as file: - file.write(struct.pack('iiii', ncol, nrow, nz, len(point_data))) - - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - file.write(struct.pack('f', lat_erf[i,j,0])) - - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - file.write(struct.pack('f', lon_erf[i,j,0])) - - - # Write grid points using a nested for loop - for i in range(ncol): - x = x_grid[0, i] - file.write(struct.pack('f', x)) - - for j in range(nrow): - y = y_grid[j, 0] - file.write(struct.pack('f', y)) - - for k in range(nz): - zavg = np.mean(z_grid[:,:,k]) - file.write(struct.pack('f', zavg)) - - # Write point data (if any) - if point_data: - for name, data in point_data.items(): - for k in range(nz): # Iterate over the z-dimension - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - value = data[i, j, k] - file.write(struct.pack('f', value)) - - -def write_binary_vtk_structured_grid(filename, x_grid, y_grid, z_grid, - nz, k_to_delete, is_shift, - point_data=None, velocity=None): - """ - Writes a binary VTK file for a structured grid. - - Args: - filename (str): Name of the VTK file to write. - x_grid, y_grid, z_grid (numpy.ndarray): 3D arrays of grid coordinates. - point_data (dict, optional): Dictionary of scalar data associated with the grid points. - """ - - x_grid = np.asarray(x_grid) - y_grid = np.asarray(y_grid) - # Ensure grids are consistent - nx, ny = x_grid.shape - nzval = nz - len(k_to_delete) - - print(nx, ny) - - with open(filename, 'wb') as f: - # Write the VTK header - f.write(b'# vtk DataFile Version 3.0\n') - f.write(b'Generated by Python script\n') - f.write(b'BINARY\n') - f.write(b'DATASET STRUCTURED_GRID\n') - f.write(f'DIMENSIONS {nx} {ny} {nzval}\n'.encode()) - f.write(f'POINTS {nx * ny * nzval} float\n'.encode()) - - # Write grid points - #points = np.stack((x_grid.ravel(), y_grid.ravel(), z_grid.ravel()), axis=-1) - #f.write(struct.pack('>' + 'f' * points.size, *points.ravel())) - - # Write grid points using a nested for loop - for k in range(nz): - z = np.mean(z_grid[:, :, nz-1-k]) - if nz-1-k in k_to_delete: - print("Val is ", nz - 1 - k) - continue; - for j in range(ny): - for i in range(nx): - x = x_grid[i, j] - y = y_grid[i, j] - f.write(struct.pack('>fff', x, y, z)) - - - # Write point data (if any) - if point_data: - f.write(f'POINT_DATA {nx * ny * nzval}\n'.encode()) - for name, data in point_data.items(): - if(name == "latitude" or name=="longitude"): - continue; - f.write(f'SCALARS {name} float 1\n'.encode()) - f.write(b'LOOKUP_TABLE default\n') - for k in range(nz): # Iterate over the z-dimension - if nz-1-k in k_to_delete: - continue; - for j in range(ny): # Iterate over the y-dimension - for i in range(nx): # Iterate over the x-dimension - value = data[nx-1-i, j, nz-1-k] - f.write(struct.pack('>f', value)) - - # Write velocity vector field - if velocity is not None: - f.write("VECTORS velocity float\n".encode()) - for k in range(nz): - if nz-1-k in k_to_delete: - continue; - for j in range(ny): - for i in range(nx): - vx, vy, vz = velocity[nx-1-i, j, nz-1-k] - f.write(struct.pack('>fff',vx, vy, vz)) - - -def write_binary_vtk_cartesian_file(filename, x_grid, y_grid, z_grid, - nz, k_to_delete, is_shift, - point_data=None, velocity=None): - """ - Writes a binary VTK file for a structured grid. - - Args: - filename (str): Name of the VTK file to write. - x_grid, y_grid, z_grid (numpy.ndarray): 3D arrays of grid coordinates. - point_data (dict, optional): Dictionary of scalar data associated with the grid points. - """ - - x_grid = np.asarray(x_grid) - y_grid = np.asarray(y_grid) - # Ensure grids are consistent - nrow, ncol = x_grid.shape - nzval = nz - - print(ncol, nrow) - - with open(filename, 'wb') as f: - # Write the VTK header - f.write(b'# vtk DataFile Version 3.0\n') - f.write(b'Generated by Python script\n') - f.write(b'BINARY\n') - f.write(b'DATASET STRUCTURED_GRID\n') - f.write(f'DIMENSIONS {ncol} {nrow} {nzval}\n'.encode()) - f.write(f'POINTS {ncol * nrow * nzval} float\n'.encode()) - - # Write grid points - #points = np.stack((x_grid.ravel(), y_grid.ravel(), z_grid.ravel()), axis=-1) - #f.write(struct.pack('>' + 'f' * points.size, *points.ravel())) - - # Write grid points using a nested for loop - for k in range(nz): - z = np.mean(z_grid[:, :, k]) - for j in range(nrow): - for i in range(ncol): - x = x_grid[j, i] - y = y_grid[j, i] - f.write(struct.pack('>fff', x, y, z)) - - # Write point data (if any) - if point_data: - f.write(f'POINT_DATA {ncol * nrow * nzval}\n'.encode()) - for name, data in point_data.items(): - f.write(f'SCALARS {name} float 1\n'.encode()) - f.write(b'LOOKUP_TABLE default\n') - for k in range(nz): # Iterate over the z-dimension - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - value = data[i, j, k] - f.write(struct.pack('>f', value)) - - -def find_latlon_indices(domain_lons, domain_lats, lon, lat): - nlats = len(domain_lats) - nlons = len(domain_lons) - lat_idx = 0 - lon_idx = 0 - for i in range(0,nlats): - if(lat < domain_lats[i]): - lat_idx = i - break - - for j in range(0,nlons): - if(lon < domain_lons[j]): - lon_idx = j - break - - return lon_idx, lat_idx - -def find_erf_domain_extents(x_grid, y_grid, nx, ny): - - # Need to determine the box extents of the cartesian box - # (xmin, xmax), (y_min, y_max) that fits - # in this region. Currently this method only works for the - # northern hemisphere - - - # Top is [nx-1,:] - # Bpttom is [0,:] - # Left is [:,0] - # Right is [:,ny-1] - #print(x_grid[0,:]) - - ymax = min(y_grid[nx-1,:]) - 100e3; - #print("Value of ymin is ", ymax); - - # Intersect it with the leftmost longitude and - # rightmost longitude - - # Leftmost longitude is the line joined by - # x1,y1 = x_grid[0,-1], y_grid[0,-1] - # x2, y2 = x_grid[nx-1,-1], y_grid[nx-1,-1] - - #print("Values are ", x_grid[0,-1], y_grid[0,-1], x_grid[-1,-1], y_grid[-1,-1]) - - i1 = 0 - for i in range(0, nx-1): - if(y_grid[i,-1] < ymax and y_grid[i+1,-1] > ymax): - i1 = i - break - - xmax = min(x_grid[i1,-1], x_grid[0,-1]) - 100e3 - - for i in range(0, nx-1): - if(y_grid[i,0] < ymax and y_grid[i+1,0] > ymax): - i1 = i - break - xmin = max(x_grid[i1,0], x_grid[0,0]) + 100e3 - - for i in range(0, ny-1): - if(x_grid[0,i] < xmax and x_grid[0,i+1] > xmax): - i1 = i - break - y1 = y_grid[0,i1]; - - for i in range(0, ny-1): - if(x_grid[0,i] < xmin and x_grid[0,i+1] > xmin): - i1 = i - break - - y2 = y_grid[0,i1] - - ymin = max(y1,y2) + 100e3 - - filename = "Output/domain_extents.txt" +from math import sin, cos, atan2 - try: - # Use exclusive creation mode ("x") so only one rank succeeds - with open(filename, "x") as f: - print("geometry.prob_lo =", - np.ceil(xmin + 50e3), - np.ceil(ymin + 50e3), - 0.0, - file=f) - print("geometry.prob_hi =", - np.floor(xmax - 50e3), - np.floor(ymax - 50e3), - 25000.0, - file=f) - except FileExistsError: - # Another rank already wrote the file — just skip - pass +from erftools.utils.latlon import (find_erf_domain_extents, + find_latlon_indices) +from erftools.io import (write_binary_simple_erf, + write_binary_vtk_on_cartesian_grid) - return xmin, xmax, ymin, ymax +def write_binary_vtk_cartesian(date_time_forecast_str, + output_binary, + domain_lats, domain_lons, + x_grid, y_grid, z_grid, + nx, ny, nz, + k_to_delete, + lambert_conformal, + point_data=None): -def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lats, domain_lons, - x_grid, y_grid, z_grid, nx, ny, nz, - k_to_delete, lambert_conformal, point_data=None): - - - - xmin, xmax, ymin, ymax = find_erf_domain_extents(x_grid, y_grid, nx, ny) - #print("xmin, xmax, ymin, ymax are ", xmin, xmax, ymin, ymax); - + xmin, xmax, ymin, ymax = find_erf_domain_extents(x_grid, y_grid, nx, ny, + outfile='Output/domain_extents.txt') print("Value of nx and ny are ", nx, ny) @@ -320,7 +53,7 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat qr_erf = np.zeros((nx_erf, ny_erf, nz_erf)) theta_erf = np.zeros((nx_erf, ny_erf, nz_erf)) - lat_erf = np.zeros((nx_erf,ny_erf,nz_erf)) + lat_erf = np.zeros((nx_erf,ny_erf,nz_erf)) lon_erf = np.zeros((nx_erf,ny_erf,nz_erf)) scalars = { @@ -365,7 +98,6 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat uvel_3d = point_data["uvel"] vvel_3d = point_data["vvel"] - print("Values of nx_erf and ny_erf are", nx_erf, ny_erf); print("Shapes of xgrid and ygrid are", x_grid.shape, y_grid.shape); print("Shapes of xgrid_erf and ygrid_erf are", x_grid_erf.shape, y_grid_erf.shape); @@ -387,36 +119,35 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat for j in range(ny_erf): for i in range(nx_erf): lon, lat = transformer.transform(x_grid_erf[j,i], y_grid_erf[j,i]) - lon_idx, lat_idx = find_latlon_indices(domain_lons, domain_lats, lon, lat) - lat_erf[i,j,0] = lat; - lon_erf[i,j,0] = lon; + lon_idx, lat_idx = find_latlon_indices(domain_lons, + domain_lats, + lon, lat) + lat_erf[i,j,0] = lat + lon_erf[i,j,0] = lon lat0 = domain_lats[lat_idx-1] lat1 = domain_lats[lat_idx] lon0 = domain_lons[lon_idx-1] - lon1 = domain_lons[lon_idx] - + lon1 = domain_lons[lon_idx] + # fractional distances fx = (lon - lon0) / (lon1 - lon0) fy = (lat - lat0) / (lat1 - lat0) - if(i < nx_erf-1): x1 = x_grid[j,i] y1 = y_grid[j,i] - + x2 = x_grid[j,i+1] y2 = y_grid[j,i+1] elif(i == nx_erf-1): x1 = x_grid[j,i-1] y1 = y_grid[j,i-1] - + x2 = x_grid[j,i] y2 = y_grid[j,i] - theta = atan2(y2-y1, x2-x1) - - #print("theta is ", x1, x2, y1, y2, theta) + theta = atan2(y2-y1, x2-x1) kcount = 1 for k in range(nz): # Iterate over the z-dimension @@ -426,18 +157,18 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat scalars_to_plot[name][i,j,kcount] = lat; #print("Reaching here lat", lat) elif(name == "longitude"): - scalars_to_plot[name][i,j,kcount] = lon; + scalars_to_plot[name][i,j,kcount] = lon; #print("Reaching here lon", lon) elif(name == "uvel" or name == "vvel"): u_tmp = (fx*fy*uvel_3d[nx-1-lat_idx, lon_idx, nz-1-k] + fx*(1-fy)*uvel_3d[nx-1-lat_idx, lon_idx-1, nz-1-k] + (1-fx)*(1-fy)*uvel_3d[nx-1-lat_idx+1, lon_idx-1, nz-1-k] + (1-fx)*fy*uvel_3d[nx-1-lat_idx+1, lon_idx, nz-1-k]) v_tmp = (fx*fy*vvel_3d[nx-1-lat_idx, lon_idx, nz-1-k] + fx*(1-fy)*vvel_3d[nx-1-lat_idx, lon_idx-1, nz-1-k] + (1-fx)*(1-fy)*vvel_3d[nx-1-lat_idx+1, lon_idx-1, nz-1-k] + (1-fx)*fy*vvel_3d[nx-1-lat_idx+1, lon_idx, nz-1-k]) - + if(name == "uvel"): scalars_to_plot[name][i,j,kcount] = u_tmp*cos(theta) - v_tmp*sin(theta) elif(name == "vvel"): - scalars_to_plot[name][i,j,kcount] = u_tmp*sin(theta) + v_tmp*cos(theta) + scalars_to_plot[name][i,j,kcount] = u_tmp*sin(theta) + v_tmp*cos(theta) else: scalars_to_plot[name][i,j,kcount] = (fx*fy*data[nx-1-lat_idx, lon_idx, nz-1-k] + fx*(1-fy)*data[nx-1-lat_idx, lon_idx-1, nz-1-k] + (1-fx)*(1-fy)*data[nx-1-lat_idx+1, lon_idx-1, nz-1-k] + (1-fx)*fy*data[nx-1-lat_idx+1, lon_idx, nz-1-k]) @@ -450,14 +181,20 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat scalars_to_plot[name][i,j,0] = scalars_to_plot[name][i,j,1] if(name != "latitude" and name != "longitude"): scalars[name][i,j,0] = scalars[name][i,j,1] - + else: print("Variable not found in scalars list", name) #sys.exit() + output_cart_vtk = "./Output/VTK/3D/ERFDomain/" + "ERF_IC_" + date_time_forecast_str +".vtk" - tmp = [] - print("Writing write_binary_vtk_cartesian_file") - write_binary_vtk_cartesian_file(output_cart_vtk, x_grid_erf, y_grid_erf, z_grid_erf, nz_erf, tmp, False, scalars_to_plot) - print("Writing write_binary_simple_ERF") - write_binary_simple_ERF(output_binary, lat_erf, lon_erf, x_grid_erf, y_grid_erf, z_grid_erf, scalars) + print("Writing write_binary_vtk_on_cartesian_grid") + write_binary_vtk_on_cartesian_grid(output_cart_vtk, + x_grid_erf, y_grid_erf, z_grid_erf, + point_data=scalars_to_plot) + + print("Writing write_binary_simple_erf") + write_binary_simple_erf(output_binary, + x_grid_erf, y_grid_erf, z_grid_erf, + lat_erf=lat_erf, lon_erf=lon_erf, + point_data=scalars) diff --git a/erftools/preprocessing/era5/Plot_1D.py b/erftools/preprocessing/era5/Plot_1D.py deleted file mode 100644 index 1765146..0000000 --- a/erftools/preprocessing/era5/Plot_1D.py +++ /dev/null @@ -1,95 +0,0 @@ -import matplotlib.pyplot as plt -import numpy as np - -def plot_1d(temp_3d, pressure_3d, theta_3d, qv_3d, qsat_3d, z_grid, k_to_delete): - nz = z_grid.shape[2] - plt.figure(1) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(temp_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk',label="mean" if k == 0 else "") - plt.plot(np.max(temp_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(temp_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.ylim([0, 20000]) - plt.xlabel('T (K)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - - dirname = "./TypicalAtmosphereData/" - temperature_filename = dirname + "temp_vs_z_actual.txt" - - data = np.loadtxt(temperature_filename) - plt.plot(data[:,0]+8,data[:,1],'k',label='Typical atmos.') - plt.legend() - plt.savefig("./Images/temp_vs_z.png") - - plt.figure(2) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(pressure_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk', label="mean" if k == 0 else "") - plt.plot(np.max(pressure_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(pressure_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.ylim([0, 20000]) - plt.xlabel('p (mbar)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - - dirname = "./TypicalAtmosphereData/" - pressure_filename = dirname + "pressure_vs_z_actual.txt" - - data = np.loadtxt(pressure_filename) - plt.plot(data[:,0],data[:,1],'k',label='Typical atmos.') - plt.legend() - plt.savefig("./Images/pressure_vs_z.png") - - plt.figure(3) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(theta_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk',label="mean" if k == 0 else "") - plt.plot(np.max(theta_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(theta_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.ylim([0, 20000]) - plt.xlim([280, 600]) - plt.xlabel(r'$\theta$ (K)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - - dirname = "./TypicalAtmosphereData/" - theta_filename = dirname + "theta_vs_z_actual.txt" - - data = np.loadtxt(theta_filename) - plt.plot(data[:,0],data[:,1],'k',label='Typical atmos.') - plt.legend() - plt.savefig("./Images/theta_vs_z.png") - - plt.figure(4) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(qv_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk',label="mean" if k == 0 else "") - plt.plot(np.max(qv_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(qv_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.ylim([0, 20000]) - plt.xlabel(r'$q_v$ (kg/kg)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - plt.legend() - plt.savefig("./Images/qv_vs_z.png") - - plt.figure(5) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(qsat_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk',label="mean" if k == 0 else "") - plt.plot(np.max(qsat_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(qsat_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.xlabel(r'$q_{sat}$ (kg/kg)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - plt.legend() - plt.ylim([0, 20000]) - plt.xlim([0, 0.03]) - #plt.axis('tight') - plt.savefig("./Images/qsat_vs_z.png") diff --git a/erftools/preprocessing/era5/ReadERA5DataAndWriteERF_IC.py b/erftools/preprocessing/era5/ReadERA5DataAndWriteERF_IC.py index 328bc66..9849b33 100644 --- a/erftools/preprocessing/era5/ReadERA5DataAndWriteERF_IC.py +++ b/erftools/preprocessing/era5/ReadERA5DataAndWriteERF_IC.py @@ -1,43 +1,21 @@ +from importlib import resources import pygrib import numpy as np import struct -from pyproj import Proj, Transformer, CRS -import matplotlib.pyplot as plt +from pyproj import Transformer import sys import os from scipy.interpolate import interp1d -from erftools.preprocessing import calculate_utm_zone -from erftools.preprocessing import write_binary_vtk_structured_grid +from erftools.io import write_binary_vtk_on_native_grid from erftools.preprocessing import write_binary_vtk_cartesian from erftools.preprocessing import plot_1d +from erftools.constants import CONST_GRAV as const_g +from erftools.utils.projection import calculate_utm_zone +from erftools.utils.microphysics import p_sat -#from IO import * -#from Plot_1D import plot_1d -#from Download_ERA5Data import * - -const_g = 9.81 - -def p_sat(temp): - tC = temp - 273.15 # Convert temperature from Kelvin to Celsius - - # Create masks for conditions - mask_positive = tC > 0.0 - mask_negative = ~mask_positive - - # Initialize ps with zeros (same shape as temp) - ps = np.zeros_like(temp) - - # Compute ps for tC > 0 - ps[mask_positive] = 6.112 * np.exp(17.62 * tC[mask_positive] / (tC[mask_positive] + 243.12)) - - # Compute ps for tC <= 0 - ps[mask_negative] = 6.112 * np.exp(22.46 * tC[mask_negative] / (tC[mask_negative] + 272.62)) - - return ps - def ReadERA5_3DData(file_path, lambert_conformal): # Open the GRIB2 file pressure_levels = [] @@ -143,11 +121,11 @@ def ReadERA5_3DData(file_path, lambert_conformal): # Convert pressure levels to numpy array for indexing pressure_levels = np.array(pressure_levels) - nz = 37 + nz = len(pressure_levels) + assert nz == 37, 'Unexpected number of ERA5 pressure levels' print("The number of lats and lons are levels are %d, %d, %d"%(lats.shape[0], lats.shape[1], nz)); - # Extract unique latitude and longitude values unique_lats = np.unique(lats[:, 0]) # Take the first column for unique latitudes unique_lons = np.unique(lons[0, :]) # Take the first row for unique longitudes @@ -195,9 +173,6 @@ def ReadERA5_3DData(file_path, lambert_conformal): x_grid, y_grid = np.meshgrid(domain_lons, domain_lats) lon_grid, lat_grid = np.meshgrid(domain_lons, domain_lats) - #lambert_conformal = CRS.from_proj4( - #"+proj=lcc +lat_1=30 +lat_2=60 +lat_0=38.5 +lon_0=-97 +datum=WGS84 +units=m +no_defs") - transformer = Transformer.from_crs("EPSG:4326", lambert_conformal, always_xy=True) # Convert the entire grid to UTM @@ -207,14 +182,12 @@ def ReadERA5_3DData(file_path, lambert_conformal): print("size is ", len(qv_3d_hr3)) - dirname = "./TypicalAtmosphereData/" - pressure_filename = dirname + "pressure_vs_z_actual.txt" - - pressure_typical = np.loadtxt(pressure_filename) + with resources.open_text('erftools.data.typical_atmosphere', 'pressure_vs_z_actual.txt') as f: + pressure_typical = np.loadtxt(f) pressure_interp_func = interp1d(pressure_typical[:,1], pressure_typical[:,0], kind='linear', fill_value="extrapolate") # Find the index of the desired pressure level - for k in np.arange(36, -1, -1): + for k in np.arange(nz-1, -1, -1): # Extract temperature at the desired pressure level ght_at_lev = ght_3d_hr3[k] @@ -276,18 +249,18 @@ def ReadERA5_3DData(file_path, lambert_conformal): print("Max val is ", np.max(z_grid[:,:,k]), ) - - #pressure_3d[:, :, k] = (temp_3d[:, :, k]/theta_3d[:, :, k])**(1004.5/287.0)*1000.0 #pressure_3d[:, :, k] = 0.622*pv/qv_3d[:, :, k] + pv #pressure_3d[:, :, k] = 1000.0*np.exp(-const_g*(np.mean(z_grid[:,:,k])-0.0)/(287*temp_3d[:, :, k]*(1.0+1.6*qv_3d[:, :, k]))) # Assuming quantities at surface is same as the first cell - if(k==36): + if (k == nz-1): pressure_3d[:, :, k] = 1000.0 - 1000.0/(287*temp_3d[:, :, k]*(1.0+1.6*qv_3d[:, :, k]))*const_g*z_grid[:,:,k] else: pressure_3d[:, :, k] = pressure_3d[:, :, k+1] - pressure_3d[:, :, k+1]/(287*temp_3d[:, :, k+1]*(1.0+1.6*qv_3d[:, :, k+1]))*const_g*(z_grid[:,:,k]-z_grid[:,:,k+1]) + assert np.all(pressure_3d[:,:,k] > 0) + qsat_3d[:,:,k] = 0.622*ps/(pressure_3d[:, :, k]-ps) #pressure_typical_here = pressure_interp_func(np.mean(z_grid[:,:,k])); @@ -306,7 +279,6 @@ def ReadERA5_3DData(file_path, lambert_conformal): theta_3d[:,:,k] = temp_3d[:, :, k]*(1000.0/pressure_3d[:, :, k])**(287.0/1004.5) - # Find indices of elements that are zero or less #indices = np.argwhere(qv_3d[:, :, k] <= 0) indices = np.argwhere(rh_val <= 0) @@ -321,10 +293,10 @@ def ReadERA5_3DData(file_path, lambert_conformal): velocity[:,:,k,1] = vvel_at_lev velocity[:,:,k,2] = 0.0 - #print(f"Lat and lon are: {lat_grid[0,0]:.2f}, {lon_grid[0,0]:.2f}") #print(f"Temperature: {temp_3d[0,0,k]:.2f} K, Pressure: {pressure_3d[0,0,k]:.2f}, Geo height : {z_grid[0,0,k]:.2f} ") + #-- end of k-loop from top to bottom scalars = { "latitude": None, @@ -344,7 +316,6 @@ def ReadERA5_3DData(file_path, lambert_conformal): "qsat": qsat_3d, } - dir_path = "Images" os.makedirs(dir_path, exist_ok=True) @@ -355,9 +326,11 @@ def ReadERA5_3DData(file_path, lambert_conformal): output_binary = "./Output/ERA5Data_3D/ERF_IC_" + date_time_forecast_str + ".bin" - write_binary_vtk_structured_grid(output_vtk, x_grid, y_grid, z_grid, - nz, k_to_delete, True, - scalars, velocity) + write_binary_vtk_on_native_grid(output_vtk, + x_grid, y_grid, z_grid, + k_to_delete=k_to_delete, + point_data=scalars, + velocity=velocity) write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lats, domain_lons, x_grid, y_grid, z_grid, diff --git a/erftools/preprocessing/era5/ReadERA5DataAndWriteERF_SurfBC.py b/erftools/preprocessing/era5/ReadERA5DataAndWriteERF_SurfBC.py index df890d8..87a4492 100644 --- a/erftools/preprocessing/era5/ReadERA5DataAndWriteERF_SurfBC.py +++ b/erftools/preprocessing/era5/ReadERA5DataAndWriteERF_SurfBC.py @@ -13,27 +13,11 @@ from mpi4py import MPI import time -def CreateLCCMapping(area): - - lat1 = area[2] - lat2 = area[0] - lon1 = area[1] - lon2 = area[3] - - # Build CRS - delta = lat2 - lat1 - lon0 = (lon1 + lon2) / 2 - lat0 = (lat1 + lat2) / 2 - - lat_1 = lat1 + delta/6 - lat_2 = lat2 - delta/6 - - lambert_conformal = ( - f"+proj=lcc +lat_1={lat_1:.6f} +lat_2={lat_2:.6f} " - f"+lat_0={lat0:.6f} +lon_0={lon0:.6f} +datum=WGS84 +units=m +no_defs" - ) - - return lambert_conformal +from erftools.utils.latlon import (find_erf_domain_extents, + find_latlon_indices) +from erftools.io import (write_binary_vtk_on_native_grid, + write_binary_vtk_on_cartesian_grid, + write_binary_simple_erf) def read_user_input(filename): @@ -85,224 +69,6 @@ def Download_ERA5_SurfaceData(inputs): return [filename], area -def write_binary_simple_ERF(output_binary, x_grid, y_grid, z_grid, point_data): - - x_grid = np.asarray(x_grid) - y_grid = np.asarray(y_grid) - # Ensure grids are consistent - nrow, ncol = x_grid.shape - nz = len(z_grid[0,0,:]) - - with open(output_binary, "wb") as file: - file.write(struct.pack('iiii', ncol, nrow, nz, len(point_data))) - - - # Write grid points using a nested for loop - for i in range(ncol): - x = x_grid[0, i] - file.write(struct.pack('f', x)) - - for j in range(nrow): - y = y_grid[j, 0] - file.write(struct.pack('f', y)) - - for k in range(nz): - zavg = np.mean(z_grid[:,:,k]) - file.write(struct.pack('f', zavg)) - - # Write point data (if any) - if point_data: - for name, data in point_data.items(): - for k in range(nz): # Iterate over the z-dimension - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - value = data[i, j, k] - file.write(struct.pack('f', value)) - - -def write_binary_vtk_structured_grid(filename, x_grid, y_grid, z_grid, nz, k_to_delete, is_shift, point_data=None): - """ - Writes a binary VTK file for a structured grid. - - Args: - filename (str): Name of the VTK file to write. - x_grid, y_grid, z_grid (numpy.ndarray): 3D arrays of grid coordinates. - point_data (dict, optional): Dictionary of scalar data associated with the grid points. - """ - - x_grid = np.asarray(x_grid) - y_grid = np.asarray(y_grid) - # Ensure grids are consistent - nx, ny = x_grid.shape - nzval = nz - len(k_to_delete) - - print(nx, ny) - - with open(filename, 'wb') as f: - # Write the VTK header - f.write(b'# vtk DataFile Version 3.0\n') - f.write(b'Generated by Python script\n') - f.write(b'BINARY\n') - f.write(b'DATASET STRUCTURED_GRID\n') - f.write(f'DIMENSIONS {nx} {ny} {nzval}\n'.encode()) - f.write(f'POINTS {nx * ny * nzval} float\n'.encode()) - - # Write grid points - #points = np.stack((x_grid.ravel(), y_grid.ravel(), z_grid.ravel()), axis=-1) - #f.write(struct.pack('>' + 'f' * points.size, *points.ravel())) - - # Write grid points using a nested for loop - for k in range(nz): - for j in range(ny): - for i in range(nx): - x = x_grid[i, j] - y = y_grid[i, j] - z = np.mean(z_grid[:, :, k])+1e-6 - f.write(struct.pack('>fff', x, y, z)) - - # Write point data (if any) - if point_data: - f.write(f'POINT_DATA {nx * ny * nzval}\n'.encode()) - for name, data in point_data.items(): - f.write(f'SCALARS {name} float 1\n'.encode()) - f.write(b'LOOKUP_TABLE default\n') - for k in range(nz): # Iterate over the z-dimension - for j in range(ny): # Iterate over the y-dimension - for i in range(nx): # Iterate over the x-dimension - value = data[nx-1-i, j, k] - f.write(struct.pack('>f', value)) - -def write_binary_vtk_cartesian_file(filename, x_grid, y_grid, z_grid, nz, k_to_delete, is_shift, point_data=None, velocity=None): - """ - Writes a binary VTK file for a structured grid. - - Args: - filename (str): Name of the VTK file to write. - x_grid, y_grid, z_grid (numpy.ndarray): 3D arrays of grid coordinates. - point_data (dict, optional): Dictionary of scalar data associated with the grid points. - """ - - x_grid = np.asarray(x_grid) - y_grid = np.asarray(y_grid) - # Ensure grids are consistent - nrow, ncol = x_grid.shape - nzval = nz - - print(ncol, nrow) - - with open(filename, 'wb') as f: - # Write the VTK header - f.write(b'# vtk DataFile Version 3.0\n') - f.write(b'Generated by Python script\n') - f.write(b'BINARY\n') - f.write(b'DATASET STRUCTURED_GRID\n') - f.write(f'DIMENSIONS {ncol} {nrow} {nzval}\n'.encode()) - f.write(f'POINTS {ncol * nrow * nzval} float\n'.encode()) - - # Write grid points - #points = np.stack((x_grid.ravel(), y_grid.ravel(), z_grid.ravel()), axis=-1) - #f.write(struct.pack('>' + 'f' * points.size, *points.ravel())) - - # Write grid points using a nested for loop - for k in range(nz): - for j in range(nrow): - for i in range(ncol): - x = x_grid[j, i] - y = y_grid[j, i] - z = np.mean(z_grid[:, :, k]) - f.write(struct.pack('>fff', x, y, z)) - - # Write point data (if any) - if point_data: - f.write(f'POINT_DATA {ncol * nrow * nzval}\n'.encode()) - for name, data in point_data.items(): - f.write(f'SCALARS {name} float 1\n'.encode()) - f.write(b'LOOKUP_TABLE default\n') - for k in range(nz): # Iterate over the z-dimension - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - value = data[i, j, k] - f.write(struct.pack('>f', value)) - - -def find_latlon_indices(domain_lons, domain_lats, lon, lat): - nlats = len(domain_lats) - nlons = len(domain_lons) - lat_idx = 0 - lon_idx = 0 - for i in range(0,nlats): - if(lat < domain_lats[i]): - lat_idx = i - break - - for j in range(0,nlons): - if(lon < domain_lons[j]): - lon_idx = j - break - - return lon_idx, lat_idx - -def find_erf_domain_extents(x_grid, y_grid, nx, ny): - - # Need to determine the box extents of the cartesian box - # (xmin, xmax), (y_min, y_max) that fits - # in this region. Currently this method only works for the - # northern hemisphere - - - # Top is [nx-1,:] - # Bpttom is [0,:] - # Left is [:,0] - # Right is [:,ny-1] - #print(x_grid[0,:]) - - ymax = min(y_grid[nx-1,:]) - 100e3; - #print("Value of ymin is ", ymax); - - # Intersect it with the leftmost longitude and - # rightmost longitude - - # Leftmost longitude is the line joined by - # x1,y1 = x_grid[0,-1], y_grid[0,-1] - # x2, y2 = x_grid[nx-1,-1], y_grid[nx-1,-1] - - #print("Values are ", x_grid[0,-1], y_grid[0,-1], x_grid[-1,-1], y_grid[-1,-1]) - - i1 = 0 - for i in range(0, nx-1): - if(y_grid[i,-1] < ymax and y_grid[i+1,-1] > ymax): - i1 = i - break - - xmax = min(x_grid[i1,-1], x_grid[0,-1]) - 100e3 - - for i in range(0, nx-1): - if(y_grid[i,0] < ymax and y_grid[i+1,0] > ymax): - i1 = i - break - xmin = max(x_grid[i1,0], x_grid[0,0]) + 100e3 - - for i in range(0, ny-1): - if(x_grid[0,i] < xmax and x_grid[0,i+1] > xmax): - i1 = i - break - y1 = y_grid[0,i1]; - - for i in range(0, ny-1): - if(x_grid[0,i] < xmin and x_grid[0,i+1] > xmin): - i1 = i - break - - y2 = y_grid[0,i1] - - ymin = max(y1,y2) + 100e3 - - print("geometry.prob_lo = ", np.ceil(xmin+50e3), np.ceil(ymin+50e3), 0.0) - print("geometry.prob_hi = ", np.floor(xmax-50e3), np.floor(ymax-50e3), 25000.0) - - return xmin, xmax, ymin, ymax - - def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lats, domain_lons, x_grid, y_grid, z_grid, nx, ny, nz, k_to_delete, lambert_conformal, point_data=None): @@ -373,9 +139,13 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat output_cart_vtk = "./Output/VTK/Surface/ERFDomain/" + "ERF_Surface_Cart_" + date_time_forecast_str +".vtk" - tmp = [] - write_binary_vtk_cartesian_file(output_cart_vtk, x_grid_erf, y_grid_erf, z_grid_erf, nz_erf, tmp, False, scalars) - write_binary_simple_ERF(output_binary, x_grid_erf, y_grid_erf, z_grid_erf, scalars) + write_binary_vtk_on_cartesian_grid(output_cart_vtk, + x_grid_erf, y_grid_erf, z_grid_erf, + scalars) + + write_binary_simple_erf(output_binary, + x_grid_erf, y_grid_erf, z_grid_erf, + point_data=scalars) def ReadERA5_SurfaceData(file_path, lambert_conformal): # Open the GRIB2 file @@ -518,7 +288,12 @@ def ReadERA5_SurfaceData(file_path, lambert_conformal): output_binary = "./Output/ERA5Data_Surface/ERF_Surface_" + date_time_forecast_str + ".bin" - write_binary_vtk_structured_grid(output_vtk, x_grid, y_grid, z_grid, nz, k_to_delete, True, scalars) + write_binary_vtk_on_native_grid(output_vtk, + x_grid, y_grid, z_grid, + k_to_delete=k_to_delete, + skip_latlon=False, + zoffset=1e-6, + point_data=scalars) write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lats, domain_lons, x_grid, y_grid, z_grid, diff --git a/erftools/preprocessing/gfs/Download_GFSData.py b/erftools/preprocessing/gfs/Download_GFSData.py index 59913d4..eb37bb4 100644 --- a/erftools/preprocessing/gfs/Download_GFSData.py +++ b/erftools/preprocessing/gfs/Download_GFSData.py @@ -6,6 +6,7 @@ from tqdm import tqdm import threading + def read_user_input(filename): inputs = {} with open(filename, 'r') as f: @@ -21,13 +22,13 @@ def read_user_input(filename): inputs[key] = [value] return inputs -from datetime import datetime, timedelta +from datetime import datetime -def generate_urls_for_24_hours(data): - year = data['year'][0] - month = data['month'][0].zfill(2) - day = data['day'][0].zfill(2) - hour = data['time'][0].split(':')[0].zfill(2) +def generate_urls_for_24_hours(inp): + year = inp['year'][0] + month = inp['month'][0].zfill(2) + day = inp['day'][0].zfill(2) + hour = inp['time'][0].split(':')[0].zfill(2) yyyymmddhh = f"{year}{month}{day}{hour}" yyyymmdd = f"{year}{month}{day}" @@ -41,60 +42,55 @@ def generate_urls_for_24_hours(data): url = f"https://data-osdf.rda.ucar.edu/ncar/rda/d084001/{year}/{yyyymmdd}/{filename}" urls.append(url) filenames.append(filename) + return urls, filenames -def construct_url_filename(data): +def construct_url_filename(inp, product): - year = data['year'][0] # your read_user_input returns a list for year - month = data['month'][0].zfill(2) - day = data['day'][0].zfill(2) - hour = data['time'][0].split(':')[0].zfill(2) + year = inp['year'][0] # your read_user_input returns a list for year + month = inp['month'][0].zfill(2) + day = inp['day'][0].zfill(2) + hour = inp['time'][0].split(':')[0].zfill(2) yyyymmddhh = f"{year}{month}{day}{hour}" yyyymm = f"{year}{month}" yyyymmdd = f"{year}{month}{day}" - # Historical forecast data - filename = f"gfs.0p25.{yyyymmddhh}.f000.grib2" - url = f"https://data-osdf.rda.ucar.edu/ncar/rda/d084001/{year}/{yyyymmdd}/{filename}" + if product.lower() in ['forecast', 'd084001', 84.1]: + # Historical forecast data (0.25 deg x 0.25 deg grids, every 3h) + filename = f"gfs.0p25.{yyyymmddhh}.f000.grib2" + url = f"https://data-osdf.rda.ucar.edu/ncar/rda/d084001/{year}/{yyyymmdd}/{filename}" - # Final reanalaysis data - #filename = f"gdas1.fnl0p25.{yyyymmddhh}.f00.grib2" - #url = f"https://data-osdf.rda.ucar.edu/ncar/rda/d083003/{year}/{yyyymm}/{filename}" - - print("URL is ", url) + elif product.lower() in ['final', 'd083003', 83.3]: + # Final reanalaysis data (0.25 deg x 0.25 deg grids, every 6h) + filename = f"gdas1.fnl0p25.{yyyymmddhh}.f00.grib2" + url = f"https://data-osdf.rda.ucar.edu/ncar/rda/d083003/{year}/{yyyymm}/{filename}" return url, filename -def Download_GFS_Data(inputs): - data = read_user_input(inputs) - lat_max, lon_min, lat_min, lon_max = data.get('area') +def Download_GFS_Data(inputs, product='forecast'): + inp = read_user_input(inputs) + lat_max, lon_min, lat_min, lon_max = inp.get('area') + area = [lat_max, lon_min, lat_min, lon_max] - url, filename = construct_url_filename(data) + url, filename = construct_url_filename(inp, product) + if os.path.isfile(filename): + print('Gribfile found:',filename) + return filename, area print("Download URL:", url) print("Filename:", filename) - - import urllib.request try: req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'}) with urllib.request.urlopen(req) as response, open(filename, 'wb') as out_file: out_file.write(response.read()) - print("Download complete.") except Exception as e: print("Download failed:", e) + else: + print("Download complete.") - area = [lat_max, lon_min, lat_min, lon_max] return filename, area -def reporthook(block_num, block_size, total_size): - downloaded = block_num * block_size - percent = min(downloaded * 100 / total_size, 100) - sys.stdout.write(f"\r Downloaded: {percent:.1f}%") - sys.stdout.flush() - if downloaded >= total_size: - print() # Newline at end - def download_one_with_progress(url, filename, position): def hook(block_num, block_size, total_size): downloaded = block_num * block_size @@ -111,10 +107,10 @@ def hook(block_num, block_size, total_size): def Download_GFS_ForecastData(inputs): - data = read_user_input(inputs) - lat_max, lon_min, lat_min, lon_max = data.get('area') + inp = read_user_input(inputs) + lat_max, lon_min, lat_min, lon_max = inp.get('area') - urls, filenames = generate_urls_for_24_hours(data) + urls, filenames = generate_urls_for_24_hours(inp) with ThreadPoolExecutor(max_workers=4) as executor: for i, (url, fname) in enumerate(zip(urls, filenames)): diff --git a/erftools/preprocessing/gfs/IO.py b/erftools/preprocessing/gfs/IO.py index 9da7430..c41bc13 100644 --- a/erftools/preprocessing/gfs/IO.py +++ b/erftools/preprocessing/gfs/IO.py @@ -2,276 +2,24 @@ import struct import os from pyproj import Proj, Transformer, CRS -from math import * - -def calculate_utm_zone(longitude): - """ - Calculate the UTM zone for a given longitude. - """ - return int((longitude + 180) // 6) + 1 - -def write_binary_simple_ERF(output_binary, lat_erf, lon_erf, x_grid, y_grid, z_grid, point_data): - - x_grid = np.asarray(x_grid) - y_grid = np.asarray(y_grid) - # Ensure grids are consistent - nrow, ncol = x_grid.shape - nz = len(z_grid[0,0,:]) - - #print(nrow, ncol) - #print(x_grid[0,:]) - - with open(output_binary, "wb") as file: - file.write(struct.pack('iiii', ncol, nrow, nz, len(point_data))) - - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - file.write(struct.pack('f', lat_erf[i,j,0])) - - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - file.write(struct.pack('f', lon_erf[i,j,0])) - - - # Write grid points using a nested for loop - for i in range(ncol): - x = x_grid[0, i] - file.write(struct.pack('f', x)) - - for j in range(nrow): - y = y_grid[j, 0] - file.write(struct.pack('f', y)) - - for k in range(nz): - zavg = np.mean(z_grid[:,:,k]) - file.write(struct.pack('f', zavg)) - - # Write point data (if any) - if point_data: - for name, data in point_data.items(): - for k in range(nz): # Iterate over the z-dimension - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - value = data[i, j, k] - file.write(struct.pack('f', value)) - - -def write_binary_vtk_structured_grid(filename, x_grid, y_grid, z_grid, - nz, k_to_delete, is_shift, - point_data=None, velocity=None): - """ - Writes a binary VTK file for a structured grid. - - Args: - filename (str): Name of the VTK file to write. - x_grid, y_grid, z_grid (numpy.ndarray): 3D arrays of grid coordinates. - point_data (dict, optional): Dictionary of scalar data associated with the grid points. - """ - - x_grid = np.asarray(x_grid) - y_grid = np.asarray(y_grid) - # Ensure grids are consistent - nx, ny = x_grid.shape - nzval = nz - len(k_to_delete) - - print(nx, ny) - - with open(filename, 'wb') as f: - # Write the VTK header - f.write(b'# vtk DataFile Version 3.0\n') - f.write(b'Generated by Python script\n') - f.write(b'BINARY\n') - f.write(b'DATASET STRUCTURED_GRID\n') - f.write(f'DIMENSIONS {nx} {ny} {nzval}\n'.encode()) - f.write(f'POINTS {nx * ny * nzval} float\n'.encode()) - - # Write grid points - #points = np.stack((x_grid.ravel(), y_grid.ravel(), z_grid.ravel()), axis=-1) - #f.write(struct.pack('>' + 'f' * points.size, *points.ravel())) - - # Write grid points using a nested for loop - for k in range(nz): - z = np.mean(z_grid[:, :, nz-1-k]) - if nz-1-k in k_to_delete: - print("Val is ", nz - 1 - k) - continue; - for j in range(ny): - for i in range(nx): - x = x_grid[i, j] - y = y_grid[i, j] - f.write(struct.pack('>fff', x, y, z)) - - - # Write point data (if any) - if point_data: - f.write(f'POINT_DATA {nx * ny * nzval}\n'.encode()) - for name, data in point_data.items(): - if(name == "latitude" or name=="longitude"): - continue; - f.write(f'SCALARS {name} float 1\n'.encode()) - f.write(b'LOOKUP_TABLE default\n') - for k in range(nz): # Iterate over the z-dimension - if nz-1-k in k_to_delete: - continue; - for j in range(ny): # Iterate over the y-dimension - for i in range(nx): # Iterate over the x-dimension - value = data[nx-1-i, j, nz-1-k] - f.write(struct.pack('>f', value)) - - # Write velocity vector field - if velocity is not None: - f.write("VECTORS velocity float\n".encode()) - for k in range(nz): - if nz-1-k in k_to_delete: - continue; - for j in range(ny): - for i in range(nx): - vx, vy, vz = velocity[nx-1-i, j, nz-1-k] - f.write(struct.pack('>fff',vx, vy, vz)) - - -def write_binary_vtk_cartesian_file(filename, x_grid, y_grid, z_grid, - nz, k_to_delete, is_shift, - point_data=None, velocity=None): - """ - Writes a binary VTK file for a structured grid. - - Args: - filename (str): Name of the VTK file to write. - x_grid, y_grid, z_grid (numpy.ndarray): 3D arrays of grid coordinates. - point_data (dict, optional): Dictionary of scalar data associated with the grid points. - """ - - x_grid = np.asarray(x_grid) - y_grid = np.asarray(y_grid) - # Ensure grids are consistent - nrow, ncol = x_grid.shape - nzval = nz - - print(ncol, nrow) - - with open(filename, 'wb') as f: - # Write the VTK header - f.write(b'# vtk DataFile Version 3.0\n') - f.write(b'Generated by Python script\n') - f.write(b'BINARY\n') - f.write(b'DATASET STRUCTURED_GRID\n') - f.write(f'DIMENSIONS {ncol} {nrow} {nzval}\n'.encode()) - f.write(f'POINTS {ncol * nrow * nzval} float\n'.encode()) - - # Write grid points - #points = np.stack((x_grid.ravel(), y_grid.ravel(), z_grid.ravel()), axis=-1) - #f.write(struct.pack('>' + 'f' * points.size, *points.ravel())) - - # Write grid points using a nested for loop - for k in range(nz): - z = np.mean(z_grid[:, :, k]) - for j in range(nrow): - for i in range(ncol): - x = x_grid[j, i] - y = y_grid[j, i] - f.write(struct.pack('>fff', x, y, z)) - - # Write point data (if any) - if point_data: - f.write(f'POINT_DATA {ncol * nrow * nzval}\n'.encode()) - for name, data in point_data.items(): - f.write(f'SCALARS {name} float 1\n'.encode()) - f.write(b'LOOKUP_TABLE default\n') - for k in range(nz): # Iterate over the z-dimension - for j in range(nrow): # Iterate over the y-dimension - for i in range(ncol): # Iterate over the x-dimension - value = data[i, j, k] - f.write(struct.pack('>f', value)) - - -def find_latlon_indices(domain_lons, domain_lats, lon, lat): - nlats = len(domain_lats) - nlons = len(domain_lons) - lat_idx = 0 - lon_idx = 0 - for i in range(0,nlats): - if(lat < domain_lats[i]): - lat_idx = i - break - - for j in range(0,nlons): - if(lon < domain_lons[j]): - lon_idx = j - break - - return lon_idx, lat_idx - -def find_erf_domain_extents(x_grid, y_grid, nx, ny): - - # Need to determine the box extents of the cartesian box - # (xmin, xmax), (y_min, y_max) that fits - # in this region. Currently this method only works for the - # northern hemisphere - - - # Top is [nx-1,:] - # Bpttom is [0,:] - # Left is [:,0] - # Right is [:,ny-1] - #print(x_grid[0,:]) - - ymax = min(y_grid[nx-1,:]) - 100e3; - #print("Value of ymin is ", ymax); - - # Intersect it with the leftmost longitude and - # rightmost longitude - - # Leftmost longitude is the line joined by - # x1,y1 = x_grid[0,-1], y_grid[0,-1] - # x2, y2 = x_grid[nx-1,-1], y_grid[nx-1,-1] - - #print("Values are ", x_grid[0,-1], y_grid[0,-1], x_grid[-1,-1], y_grid[-1,-1]) +from math import sin, cos, atan2 - i1 = 0 - for i in range(0, nx-1): - if(y_grid[i,-1] < ymax and y_grid[i+1,-1] > ymax): - i1 = i - break - - xmax = min(x_grid[i1,-1], x_grid[0,-1]) - 100e3 - - for i in range(0, nx-1): - if(y_grid[i,0] < ymax and y_grid[i+1,0] > ymax): - i1 = i - break - xmin = max(x_grid[i1,0], x_grid[0,0]) + 100e3 - - for i in range(0, ny-1): - if(x_grid[0,i] < xmax and x_grid[0,i+1] > xmax): - i1 = i - break - y1 = y_grid[0,i1]; - - for i in range(0, ny-1): - if(x_grid[0,i] < xmin and x_grid[0,i+1] > xmin): - i1 = i - break - - y2 = y_grid[0,i1] - - ymin = max(y1,y2) + 100e3 - - print("geometry.prob_lo = ", np.ceil(xmin+50e3), np.ceil(ymin+50e3), 0.0) - print("geometry.prob_hi = ", np.floor(xmax-50e3), np.floor(ymax-50e3), 25000.0) - - return xmin, xmax, ymin, ymax - - -def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lats, domain_lons, - x_grid, y_grid, z_grid, nx, ny, nz, - k_to_delete, lambert_conformal, point_data=None): +from erftools.utils.latlon import (find_erf_domain_extents, + find_latlon_indices) +from erftools.io import (write_binary_simple_erf, + write_binary_vtk_on_cartesian_grid) +def write_binary_vtk_cartesian(date_time_forecast_str, + output_binary, + domain_lats, domain_lons, + x_grid, y_grid, z_grid, + nx, ny, nz, + k_to_delete, + lambert_conformal, + point_data=None): xmin, xmax, ymin, ymax = find_erf_domain_extents(x_grid, y_grid, nx, ny) - #print("xmin, xmax, ymin, ymax are ", xmin, xmax, ymin, ymax); - print("Value of nx and ny are ", nx, ny) @@ -304,7 +52,7 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat qr_erf = np.zeros((nx_erf, ny_erf, nz_erf)) theta_erf = np.zeros((nx_erf, ny_erf, nz_erf)) - lat_erf = np.zeros((nx_erf,ny_erf,nz_erf)) + lat_erf = np.zeros((nx_erf,ny_erf,nz_erf)) lon_erf = np.zeros((nx_erf,ny_erf,nz_erf)) scalars = { @@ -345,7 +93,6 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat uvel_3d = point_data["uvel"] vvel_3d = point_data["vvel"] - print("Values of nx_erf and ny_erf are", nx_erf, ny_erf); print("Shapes of xgrid and ygrid are", x_grid.shape, y_grid.shape); print("Shapes of xgrid_erf and ygrid_erf are", x_grid_erf.shape, y_grid_erf.shape); @@ -362,14 +109,11 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat for j in range(ny_erf): for i in range(nx_erf): lon, lat = transformer.transform(x_grid_erf[j,i], y_grid_erf[j,i]) - lon_idx, lat_idx = find_latlon_indices(domain_lons, domain_lats, 360.0+lon, lat) - lat_erf[i,j,0] = lat; - lon_erf[i,j,0] = lon; - #if(lat_idx > 110): - # print("Lat value out of range", lat_idx, lon_idx, x_grid_erf[i,j], y_grid_erf[i,j]) - # sys.exit() - #print("The values of lat and lon are", x_grid_erf[i,j], y_grid_erf[i,j], lon, lat, lon_idx, lat_idx) - #sys.exit() + lon_idx, lat_idx = find_latlon_indices(domain_lons, + domain_lats, + 360.0+lon, lat) + lat_erf[i,j,0] = lat + lon_erf[i,j,0] = lon kcount = 1 for k in range(nz): # Iterate over the z-dimension if nz-1-k in k_to_delete: @@ -378,7 +122,7 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat scalars_to_plot[name][i,j,kcount] = lat; #print("Reaching here lat", lat) elif(name == "longitude"): - scalars_to_plot[name][i,j,kcount] = lon; + scalars_to_plot[name][i,j,kcount] = lon; #print("Reaching here lon", lon) else: scalars_to_plot[name][i,j,kcount] = (data[nx-1-lat_idx, lon_idx, nz-1-k] + data[nx-1-lat_idx, lon_idx-1, nz-1-k] + @@ -393,7 +137,6 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat fx = (360.0 + lon - lon0) / (lon1 - lon0) fy = (lat - lat0) / (lat1 - lat0) - if(i < nx_erf-1): x1 = x_grid[j,i] y1 = y_grid[j,i] @@ -417,13 +160,14 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat scalars_to_plot[name][i,j,kcount] = lat; #print("Reaching here lat", lat) elif(name == "longitude"): - scalars_to_plot[name][i,j,kcount] = lon; + scalars_to_plot[name][i,j,kcount] = lon; #print("Reaching here lon", lon) elif(name == "uvel" or name == "vvel"): u_tmp = (fx*fy*uvel_3d[nx-1-lat_idx, lon_idx, nz-1-k] + fx*(1-fy)*uvel_3d[nx-1-lat_idx, lon_idx-1, nz-1-k] + (1-fx)*(1-fy)*uvel_3d[nx-1-lat_idx+1, lon_idx-1, nz-1-k] + (1-fx)*fy*uvel_3d[nx-1-lat_idx+1, lon_idx, nz-1-k]) v_tmp = (fx*fy*vvel_3d[nx-1-lat_idx, lon_idx, nz-1-k] + fx*(1-fy)*vvel_3d[nx-1-lat_idx, lon_idx-1, nz-1-k] + (1-fx)*(1-fy)*vvel_3d[nx-1-lat_idx+1, lon_idx-1, nz-1-k] + (1-fx)*fy*vvel_3d[nx-1-lat_idx+1, lon_idx, nz-1-k]) + if(name == "uvel"): scalars_to_plot[name][i,j,kcount] = u_tmp*cos(theta) - v_tmp*sin(theta) elif(name == "vvel"): @@ -431,7 +175,7 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat else: scalars_to_plot[name][i,j,kcount] = (fx*fy*data[nx-1-lat_idx, lon_idx, nz-1-k] + fx*(1-fy)*data[nx-1-lat_idx, lon_idx-1, nz-1-k] + (1-fx)*(1-fy)*data[nx-1-lat_idx+1, lon_idx-1, nz-1-k] + (1-fx)*fy*data[nx-1-lat_idx+1, lon_idx, nz-1-k]) - + if(name != "latitude" and name != "longitude"): scalars[name][i,j,kcount] = scalars_to_plot[name][i,j,kcount] @@ -440,15 +184,20 @@ def write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lat scalars_to_plot[name][i,j,0] = scalars_to_plot[name][i,j,1] if(name != "latitude" and name != "longitude"): scalars[name][i,j,0] = scalars[name][i,j,1] - + else: print("Variable not found in scalars list", name) #sys.exit() output_cart_vtk = "./Output/" + "ERF_IC_" + date_time_forecast_str +".vtk" - tmp = [] - print("Writing write_binary_vtk_cartesian_file") - write_binary_vtk_cartesian_file(output_cart_vtk, x_grid_erf, y_grid_erf, z_grid_erf, nz_erf, tmp, False, scalars_to_plot) - print("Writing write_binary_simple_ERF") - write_binary_simple_ERF(output_binary, lat_erf, lon_erf, x_grid_erf, y_grid_erf, z_grid_erf, scalars) + print("Writing write_binary_vtk_on_cartesian_grid") + write_binary_vtk_on_cartesian_grid(output_cart_vtk, + x_grid_erf, y_grid_erf, z_grid_erf, + point_data=scalars_to_plot) + + print("Writing write_binary_simple_erf") + write_binary_simple_erf(output_binary, + x_grid_erf, y_grid_erf, z_grid_erf, + lat_erf=lat_erf, lon_erf=lon_erf, + point_data=scalars) diff --git a/erftools/preprocessing/gfs/Plot_1D.py b/erftools/preprocessing/gfs/Plot_1D.py deleted file mode 100644 index c5d2372..0000000 --- a/erftools/preprocessing/gfs/Plot_1D.py +++ /dev/null @@ -1,95 +0,0 @@ -import matplotlib.pyplot as plt -import numpy as np - -def plot_1d(temp_3d, pressure_3d, theta_3d, qv_3d, qsat_3d, z_grid, k_to_delete): - nz = z_grid.shape[2] - plt.figure(1) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(temp_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk',label="mean" if k == 0 else "") - plt.plot(np.max(temp_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(temp_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.ylim([0, 20000]) - plt.xlabel('T (K)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - - dirname = "./TypicalAtmosphereData/" - temperature_filename = dirname + "temp_vs_z_actual.txt" - - data = np.loadtxt(temperature_filename) - plt.plot(data[:,0]+8,data[:,1],'k',label='Typical atmos.') - plt.legend() - plt.savefig("./Images/temp_vs_z.png") - - plt.figure(2) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(pressure_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk', label="mean" if k == 0 else "") - plt.plot(np.max(pressure_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(pressure_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.ylim([0, 20000]) - plt.xlabel('p (mbar)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - - dirname = "./TypicalAtmosphereData/" - pressure_filename = dirname + "pressure_vs_z_actual.txt" - - data = np.loadtxt(pressure_filename) - plt.plot(data[:,0],data[:,1],'k',label='Typical atmos.') - plt.legend() - plt.savefig("./Images/pressure_vs_z.png") - - plt.figure(3) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(theta_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk',label="mean" if k == 0 else "") - plt.plot(np.max(theta_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(theta_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.ylim([0, 20000]) - plt.xlim([280, 600]) - plt.xlabel(r'$\theta$ (K)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - - dirname = "./TypicalAtmosphereData/" - theta_filename = dirname + "theta_vs_z_actual.txt" - - data = np.loadtxt(theta_filename) - plt.plot(data[:,0],data[:,1],'k',label='Typical atmos.') - plt.legend() - plt.savefig("./Images/theta_vs_z.png") - - plt.figure(4) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(qv_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk',label="mean" if k == 0 else "") - plt.plot(np.max(qv_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(qv_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.ylim([0, 20000]) - plt.xlabel(r'$q_v$ (kg/kg)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - plt.legend() - plt.savefig("./Images/qv_vs_z.png") - - plt.figure(5) - for k in np.arange(0, nz, 1): - if nz - 1 - k in k_to_delete: - continue - plt.plot(np.mean(qsat_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'xk',label="mean" if k == 0 else "") - plt.plot(np.max(qsat_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), 'or',label="max" if k == 0 else "") - plt.plot(np.min(qsat_3d[:,:,nz-1-k]), np.mean(z_grid[:,:,nz-1-k]), '^b',label="min" if k == 0 else "") - - plt.xlabel(r'$q_{sat}$ (kg/kg)',fontsize=15) - plt.ylabel(r'$z$ (m)',fontsize=15) - plt.legend() - plt.ylim([0, 20000]) - plt.xlim([0, 0.03]) - #plt.axis('tight') - plt.savefig("./Images/qsat_vs_z.png") diff --git a/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC.py b/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC.py index 22cf966..e013d1c 100644 --- a/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC.py +++ b/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC.py @@ -1,43 +1,21 @@ +from importlib import resources import pygrib import numpy as np import struct -from pyproj import Proj, Transformer, CRS -import matplotlib.pyplot as plt +from pyproj import Transformer import sys import os from scipy.interpolate import interp1d -from erftools.preprocessing import calculate_utm_zone -from erftools.preprocessing import write_binary_vtk_structured_grid +from erftools.io import write_binary_vtk_on_native_grid from erftools.preprocessing import write_binary_vtk_cartesian from erftools.preprocessing import plot_1d +from erftools.constants import CONST_GRAV as const_g +from erftools.utils.projection import calculate_utm_zone +from erftools.utils.microphysics import p_sat -#from IO import * -#from Plot_1D import plot_1d -#from Download_GFSData import * - -const_g = 9.81 - -def p_sat(temp): - tC = temp - 273.15 # Convert temperature from Kelvin to Celsius - - # Create masks for conditions - mask_positive = tC > 0.0 - mask_negative = ~mask_positive - - # Initialize ps with zeros (same shape as temp) - ps = np.zeros_like(temp) - - # Compute ps for tC > 0 - ps[mask_positive] = 6.112 * np.exp(17.62 * tC[mask_positive] / (tC[mask_positive] + 243.12)) - - # Compute ps for tC <= 0 - ps[mask_negative] = 6.112 * np.exp(22.46 * tC[mask_negative] / (tC[mask_negative] + 272.62)) - - return ps - def ReadGFS_3DData(file_path, area, lambert_conformal): # Open the GRIB2 file pressure_levels = [] @@ -133,9 +111,6 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): temp_3d_hr3 = np.stack(temp_3d_hr3, axis=0) vort_3d_hr3 = np.stack(vort_3d_hr3, axis=0) - - - #pressure_3d_hr3 = np.stack(pressure_3d_hr3, axis=0) # Get the size of each dimension dim1, dim2, dim3 = ght_3d_hr3.shape @@ -155,7 +130,6 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): print("Min max lat lons are ", unique_lats[0], unique_lats[-1], unique_lons[0], unique_lons[-1]); - nlats = len(unique_lats) nlons = len(unique_lons) @@ -198,7 +172,6 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): print("Size of rh_3d_hr3 is ", rh_3d_hr3.shape[0]) - prev_mean = np.mean(ght_3d_hr3[0]) # start from the top level for k in range(1, ght_3d_hr3.shape[0]): current_mean = np.mean(ght_3d_hr3[k]) @@ -220,8 +193,6 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): print("The number of lats and lons are levels are %d, %d, %d"%(lats.shape[0], lats.shape[1], nz)); - #sys.exit("Stopping the script here.") - z_grid = np.zeros((nx, ny, nz)) rhod_3d = np.zeros((nx, ny, nz)) uvel_3d = np.zeros((nx, ny, nz)) @@ -241,7 +212,6 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): pressure_3d = np.zeros((nx, ny, nz)) theta_3d = np.zeros((nx, ny, nz)) - # Create meshgrid x_grid, y_grid = np.meshgrid(domain_lons, domain_lats) lon_grid, lat_grid = np.meshgrid(domain_lons, domain_lats) @@ -255,10 +225,8 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): print("size is ", len(qv_3d_hr3)) - dirname = "./TypicalAtmosphereData/" - pressure_filename = dirname + "pressure_vs_z_actual.txt" - - pressure_typical = np.loadtxt(pressure_filename) + with resources.open_text('erftools.data.typical_atmosphere', 'pressure_vs_z_actual.txt') as f: + pressure_typical = np.loadtxt(f) pressure_interp_func = interp1d(pressure_typical[:,1], pressure_typical[:,0], kind='linear', fill_value="extrapolate") # Find the index of the desired pressure level @@ -324,26 +292,19 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): print("Avg val is ", k, np.mean(z_grid[:,:,k]), ) - - #pressure_3d[:, :, k] = (temp_3d[:, :, k]/theta_3d[:, :, k])**(1004.5/287.0)*1000.0 #pressure_3d[:, :, k] = 0.622*pv/qv_3d[:, :, k] + pv #pressure_3d[:, :, k] = 1000.0*np.exp(-const_g*(np.mean(z_grid[:,:,k])-0.0)/(287*temp_3d[:, :, k]*(1.0+1.6*qv_3d[:, :, k]))) # Assuming quantities at surface is same as the first cell - if(k==nz-1): + if (k == nz-1): pressure_3d[:, :, k] = 1000.0 - 1000.0/(287*temp_3d[:, :, k]*(1.0+1.6*qv_3d[:, :, k]))*const_g*z_grid[:,:,k] else: pressure_3d[:, :, k] = pressure_3d[:, :, k+1] - pressure_3d[:, :, k+1]/(287*temp_3d[:, :, k+1]*(1.0+1.6*qv_3d[:, :, k+1]))*const_g*(z_grid[:,:,k]-z_grid[:,:,k+1]) - qsat_3d[:,:,k] = 0.622*ps/(pressure_3d[:, :, k]-ps) + assert np.all(pressure_3d[:,:,k] > 0) - if(k==5): - for i in np.arange(0,nx,1): - for j in np.arange(0,ny,1): - if(pressure_3d[i, j, k] <= 0.0): - print("Value here problematic ", i, j, pressure_3d[i, j, k],pressure_3d[i, j, k+1],temp_3d[i, j, k+1],qv_3d[i, j, k+1],z_grid[i,j,k],z_grid[i,j,k+1]) - + qsat_3d[:,:,k] = 0.622*ps/(pressure_3d[:, :, k]-ps) #pressure_typical_here = pressure_interp_func(np.mean(z_grid[:,:,k])); #indices = np.argwhere( (pressure_3d[:,:,k] <= 0.9*pressure_typical_here) | (pressure_3d[:,:,k] >= 1.02*pressure_typical_here)) @@ -361,7 +322,6 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): theta_3d[:,:,k] = temp_3d[:, :, k]*(1000.0/pressure_3d[:, :, k])**(287.0/1004.5) - # Find indices of elements that are zero or less #indices = np.argwhere(qv_3d[:, :, k] <= 0) indices = np.argwhere(rh_val <= 0) @@ -376,10 +336,10 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): velocity[:,:,k,1] = vvel_at_lev velocity[:,:,k,2] = 0.0 - #print(f"Lat and lon are: {lat_grid[0,0]:.2f}, {lon_grid[0,0]:.2f}") #print(f"Temperature: {temp_3d[0,0,k]:.2f} K, Pressure: {pressure_3d[0,0,k]:.2f}, Geo height : {z_grid[0,0,k]:.2f} ") + #-- end of k-loop from top to bottom scalars = { #"latitude": None, @@ -399,7 +359,6 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): "qsat": qsat_3d, } - dir_path = "Images" os.makedirs(dir_path, exist_ok=True) @@ -415,9 +374,11 @@ def ReadGFS_3DData(file_path, area, lambert_conformal): output_binary = "./Output/ERF_IC_" + date_time_forecast_str + ".bin" - write_binary_vtk_structured_grid(output_vtk, x_grid, y_grid, z_grid, - nz, k_to_delete, True, - scalars, velocity) + write_binary_vtk_on_native_grid(output_vtk, + x_grid, y_grid, z_grid, + k_to_delete=k_to_delete, + point_data=scalars, + velocity=velocity) write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lats, domain_lons, x_grid, y_grid, z_grid, diff --git a/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC_FourCastNetGFS.py b/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC_FourCastNetGFS.py index d8358b4..4389ac7 100644 --- a/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC_FourCastNetGFS.py +++ b/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC_FourCastNetGFS.py @@ -1,341 +1,314 @@ +from importlib import resources import pygrib import numpy as np import struct -from pyproj import Proj, Transformer, CRS +from pyproj import Transformer, CRS import matplotlib.pyplot as plt import sys import os from scipy.interpolate import interp1d -from erftools.preprocessing import calculate_utm_zone -from erftools.preprocessing import write_binary_vtk_structured_grid +from erftools.io import write_binary_vtk_on_native_grid from erftools.preprocessing import write_binary_vtk_cartesian from erftools.preprocessing import plot_1d +from erftools.constants import CONST_GRAV as const_g +from erftools.utils.projection import calculate_utm_zone -#from IO import * -#from Plot_1D import plot_1d -#from Download_GFSData import * - -const_g = 9.81 - -def p_sat(temp): - tC = temp - 273.15 # Convert temperature from Kelvin to Celsius - - # Create masks for conditions - mask_positive = tC > 0.0 - mask_negative = ~mask_positive - - # Initialize ps with zeros (same shape as temp) - ps = np.zeros_like(temp) - - # Compute ps for tC > 0 - ps[mask_positive] = 6.112 * np.exp(17.62 * tC[mask_positive] / (tC[mask_positive] + 243.12)) - - # Compute ps for tC <= 0 - ps[mask_negative] = 6.112 * np.exp(22.46 * tC[mask_negative] / (tC[mask_negative] + 272.62)) - - return ps - def ReadGFS_3DData_FourCastNetGFS(file_path, area, is_IC): - # Open the GRIB2 file - pressure_levels = [] - ght_3d_hr3 = [] - temp_3d_hr3 = [] - uvel_3d_hr3 = [] - vvel_3d_hr3 = [] - wvel_3d_hr3 = [] - qv_3d_hr3 = [] - qc_3d_hr3 = [] - qr_3d_hr3 = [] - rh_3d_hr3 = [] - theta_3d_hr3 = [] - vort_3d_hr3 = [] - pressure_3d_hr3 = [] - lats, lons = None, None # To store latitude and longitude grids - - printed_time = False - - datetime_str = "" - - with pygrib.open(file_path) as grbs: - for grb in grbs: - - if not printed_time: - year = grb.year - month = grb.month - day = grb.day - hour = grb.hour - minute = grb.minute if hasattr(grb, 'minute') else 0 - print(f"Date: {year}-{month:02d}-{day:02d}, Time: {hour:02d}:{minute:02d} UTC") - datetime_str = f"{year:04d}_{month:02d}_{day:02d}_{hour:02d}_{minute:02d}" - print(f"Datetime string: {datetime_str}") - printed_time = True - - #print(f"Variable: {grb.name}, Level: {grb.level}, Units: {grb.parameterUnits}") - if "Temperature" in grb.name: - # Append temperature values - temp_3d_hr3.append(grb.values) - - # Append pressure level - pressure_levels.append(grb.level) - - if "Geopotential height" in grb.name: - ght_3d_hr3.append(grb.values) + # Open the GRIB2 file + pressure_levels = [] + ght_3d_hr3 = [] + temp_3d_hr3 = [] + uvel_3d_hr3 = [] + vvel_3d_hr3 = [] + wvel_3d_hr3 = [] + qv_3d_hr3 = [] + qc_3d_hr3 = [] + qr_3d_hr3 = [] + rh_3d_hr3 = [] + theta_3d_hr3 = [] + vort_3d_hr3 = [] + pressure_3d_hr3 = [] + lats, lons = None, None # To store latitude and longitude grids + + printed_time = False + + datetime_str = "" + + with pygrib.open(file_path) as grbs: + for grb in grbs: - if "Potential temperature" in grb.name: - theta_3d_hr3.append(grb.values) + if not printed_time: + year = grb.year + month = grb.month + day = grb.day + hour = grb.hour + minute = grb.minute if hasattr(grb, 'minute') else 0 + print(f"Date: {year}-{month:02d}-{day:02d}, Time: {hour:02d}:{minute:02d} UTC") + datetime_str = f"{year:04d}_{month:02d}_{day:02d}_{hour:02d}_{minute:02d}" + print(f"Datetime string: {datetime_str}") + printed_time = True - if "Pressure" in grb.name: - pressure_3d_hr3.append(grb.values) + #print(f"Variable: {grb.name}, Level: {grb.level}, Units: {grb.parameterUnits}") + if "Temperature" in grb.name: + # Append temperature values + temp_3d_hr3.append(grb.values) - if "U component of wind" in grb.name: - uvel_3d_hr3.append(grb.values) + # Append pressure level + pressure_levels.append(grb.level) - if "V component of wind" in grb.name: - vvel_3d_hr3.append(grb.values) + if "Geopotential height" in grb.name: + ght_3d_hr3.append(grb.values) - if "Geometric vertical velocity" in grb.name: - wvel_3d_hr3.append(grb.values) + if "Potential temperature" in grb.name: + theta_3d_hr3.append(grb.values) - if "Specific humidity" in grb.name: - qv_3d_hr3.append(grb.values) + if "Pressure" in grb.name: + pressure_3d_hr3.append(grb.values) - if "Cloud mixing ratio" in grb.name: - qc_3d_hr3.append(grb.values) + if "U component of wind" in grb.name: + uvel_3d_hr3.append(grb.values) - if "Rain mixing ratio" in grb.name: - qr_3d_hr3.append(grb.values) + if "V component of wind" in grb.name: + vvel_3d_hr3.append(grb.values) - if "Relative humidity" in grb.name: - rh_3d_hr3.append(grb.values) + if "Geometric vertical velocity" in grb.name: + wvel_3d_hr3.append(grb.values) + + if "Specific humidity" in grb.name: + qv_3d_hr3.append(grb.values) - if "Absolute vorticity" in grb.name: - vort_3d_hr3.append(grb.values) + if "Cloud mixing ratio" in grb.name: + qc_3d_hr3.append(grb.values) - # Retrieve latitude and longitude grids (once) - if lats is None or lons is None: - lats, lons = grb.latlons() + if "Rain mixing ratio" in grb.name: + qr_3d_hr3.append(grb.values) - # Stack into a 3D array (level, lat, lon) - ght_3d_hr3 = np.stack(ght_3d_hr3, axis=0) - uvel_3d_hr3 = np.stack(uvel_3d_hr3, axis=0) - vvel_3d_hr3 = np.stack(vvel_3d_hr3, axis=0) - #wvel_3d_hr3 = np.stack(wvel_3d_hr3, axis=0) - #theta_3d_hr3 = np.stack(theta_3d_hr3, axis=0) - #qv_3d_hr3 = np.stack(qv_3d_hr3, axis=0) - #qc_3d_hr3 = np.stack(qc_3d_hr3, axis=0) - #qr_3d_hr3 = np.stack(qr_3d_hr3, axis=0) - rh_3d_hr3 = np.stack(rh_3d_hr3, axis=0) - temp_3d_hr3 = np.stack(temp_3d_hr3, axis=0) - #vort_3d_hr3 = np.stack(vort_3d_hr3, axis=0) + if "Relative humidity" in grb.name: + rh_3d_hr3.append(grb.values) + if "Absolute vorticity" in grb.name: + vort_3d_hr3.append(grb.values) - + # Retrieve latitude and longitude grids (once) + if lats is None or lons is None: + lats, lons = grb.latlons() + + # Stack into a 3D array (level, lat, lon) + ght_3d_hr3 = np.stack(ght_3d_hr3, axis=0) + uvel_3d_hr3 = np.stack(uvel_3d_hr3, axis=0) + vvel_3d_hr3 = np.stack(vvel_3d_hr3, axis=0) + #wvel_3d_hr3 = np.stack(wvel_3d_hr3, axis=0) + #theta_3d_hr3 = np.stack(theta_3d_hr3, axis=0) + #qv_3d_hr3 = np.stack(qv_3d_hr3, axis=0) + #qc_3d_hr3 = np.stack(qc_3d_hr3, axis=0) + #qr_3d_hr3 = np.stack(qr_3d_hr3, axis=0) + rh_3d_hr3 = np.stack(rh_3d_hr3, axis=0) + temp_3d_hr3 = np.stack(temp_3d_hr3, axis=0) + #vort_3d_hr3 = np.stack(vort_3d_hr3, axis=0) + + #pressure_3d_hr3 = np.stack(pressure_3d_hr3, axis=0) + # Get the size of each dimension + dim1, dim2, dim3 = ght_3d_hr3.shape - #pressure_3d_hr3 = np.stack(pressure_3d_hr3, axis=0) - # Get the size of each dimension - dim1, dim2, dim3 = ght_3d_hr3.shape + # Print the sizes + print(f"Size of dimension 1: {dim1}") + print(f"Size of dimension 2: {dim2}") + print(f"Size of dimension 3: {dim3}") - # Print the sizes - print(f"Size of dimension 1: {dim1}") - print(f"Size of dimension 2: {dim2}") - print(f"Size of dimension 3: {dim3}") + # Convert pressure levels to numpy array for indexing + pressure_levels = np.array(pressure_levels) - # Convert pressure levels to numpy array for indexing - pressure_levels = np.array(pressure_levels) + # Extract unique latitude and longitude values + unique_lats = np.unique(lats[:, 0]) # Take the first column for unique latitudes + unique_lons = np.unique(lons[0, :]) # Take the first row for unique longitudes - # Extract unique latitude and longitude values - unique_lats = np.unique(lats[:, 0]) # Take the first column for unique latitudes - unique_lons = np.unique(lons[0, :]) # Take the first row for unique longitudes + print("Min max lat lons are ", unique_lats[0], unique_lats[-1], unique_lons[0], unique_lons[-1]); - print("Min max lat lons are ", unique_lats[0], unique_lats[-1], unique_lons[0], unique_lons[-1]); + nlats = len(unique_lats) + nlons = len(unique_lons) - nlats = len(unique_lats) - nlons = len(unique_lons) + lat_max = area[0] + lon_min = 360.0 + area[1] + lat_min = area[2] + lon_max = 360.0 + area[3] - lat_max = area[0] - lon_min = 360.0 + area[1] - lat_min = area[2] - lon_max = 360.0 + area[3] + print("Lat/lon min/max are ", lat_min, lat_max, lon_min, lon_max) - print("Lat/lon min/max are ", lat_min, lat_max, lon_min, lon_max) + # Example: regular grid + lat_resolution = unique_lats[1] - unique_lats[0] + lon_resolution = unique_lons[1] - unique_lons[0] - # Example: regular grid - lat_resolution = unique_lats[1] - unique_lats[0] - lon_resolution = unique_lons[1] - unique_lons[0] + lat_start = int((lat_min - unique_lats[0]) / lat_resolution) + lat_end = int((lat_max - unique_lats[0]) / lat_resolution) + lon_start = int((lon_min - unique_lons[0]) / lon_resolution) + lon_end = int((lon_max - unique_lons[0]) / lon_resolution) - lat_start = int((lat_min - unique_lats[0]) / lat_resolution) - lat_end = int((lat_max - unique_lats[0]) / lat_resolution) - lon_start = int((lon_min - unique_lons[0]) / lon_resolution) - lon_end = int((lon_max - unique_lons[0]) / lon_resolution) + domain_lats = unique_lats[lat_start:lat_end+1] + domain_lons = unique_lons[lon_start:lon_end+1] - domain_lats = unique_lats[lat_start:lat_end+1] - domain_lons = unique_lons[lon_start:lon_end+1] + print("The min max are",(lat_start, lat_end, lon_start, lon_end)); - print("The min max are",(lat_start, lat_end, lon_start, lon_end)); + nx = domain_lats.shape[0] + ny = domain_lons.shape[0] - nx = domain_lats.shape[0] - ny = domain_lons.shape[0] + print("nx and ny here are ", nx, ny) - print("nx and ny here are ", nx, ny) + ght_3d_hr3 = ght_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] + uvel_3d_hr3 = uvel_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] + vvel_3d_hr3 = vvel_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] + rh_3d_hr3 = rh_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] + temp_3d_hr3 = temp_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] - + print("Size of rh_3d_hr3 is ", rh_3d_hr3.shape[0]) - ght_3d_hr3 = ght_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] - uvel_3d_hr3 = uvel_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] - vvel_3d_hr3 = vvel_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] - rh_3d_hr3 = rh_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] - temp_3d_hr3 = temp_3d_hr3[:, nlats-lat_end-1:nlats-lat_start, lon_start:lon_end+1] - print("Size of rh_3d_hr3 is ", rh_3d_hr3.shape[0]) + prev_mean = np.mean(ght_3d_hr3[0]) # start from the top level + for k in range(1, ght_3d_hr3.shape[0]): + current_mean = np.mean(ght_3d_hr3[k]) + print("Val is", k, current_mean) + if current_mean >= prev_mean: + nz_admissible = k + print(f"Mean starts increasing at index {k}") + break + prev_mean = current_mean + else: + print("Means are strictly decreasing through all levels.") + #nz = nz_admissible + nz = ght_3d_hr3.shape[0]; - prev_mean = np.mean(ght_3d_hr3[0]) # start from the top level - for k in range(1, ght_3d_hr3.shape[0]): - current_mean = np.mean(ght_3d_hr3[k]) - print("Val is", k, current_mean) - if current_mean >= prev_mean: - nz_admissible = k - print(f"Mean starts increasing at index {k}") - break - prev_mean = current_mean - else: - print("Means are strictly decreasing through all levels.") + print("The number of lats and lons are levels are %d, %d, %d"%(lats.shape[0], lats.shape[1], nz)); - #nz = nz_admissible - nz = ght_3d_hr3.shape[0]; + #sys.exit("Stopping the script here.") - print("The number of lats and lons are levels are %d, %d, %d"%(lats.shape[0], lats.shape[1], nz)); + z_grid = np.zeros((nx, ny, nz)) + rhod_3d = np.zeros((nx, ny, nz)) + uvel_3d = np.zeros((nx, ny, nz)) + vvel_3d = np.zeros((nx, ny, nz)) + #wvel_3d = np.zeros((nx, ny, nz)) + #theta_3d = np.zeros((nx, ny, nz)) + #qv_3d = np.zeros((nx, ny, nz)) + #qc_3d = np.zeros((nx, ny, nz)) + #qr_3d = np.zeros((nx, ny, nz)) + rh_3d = np.zeros((nx, ny, nz)) + temp_3d = np.zeros((nx, ny, nz)) + #qsat_3d = np.zeros((nx, ny, nz)) - #sys.exit("Stopping the script here.") + velocity = np.zeros((nx, ny, nz, 3)) - z_grid = np.zeros((nx, ny, nz)) - rhod_3d = np.zeros((nx, ny, nz)) - uvel_3d = np.zeros((nx, ny, nz)) - vvel_3d = np.zeros((nx, ny, nz)) - #wvel_3d = np.zeros((nx, ny, nz)) - #theta_3d = np.zeros((nx, ny, nz)) - #qv_3d = np.zeros((nx, ny, nz)) - #qc_3d = np.zeros((nx, ny, nz)) - #qr_3d = np.zeros((nx, ny, nz)) - rh_3d = np.zeros((nx, ny, nz)) - temp_3d = np.zeros((nx, ny, nz)) - #qsat_3d = np.zeros((nx, ny, nz)) + #vort_3d = np.zeros((nx, ny, nz)) + #pressure_3d = np.zeros((nx, ny, nz)) + #theta_3d = np.zeros((nx, ny, nz)) - velocity = np.zeros((nx, ny, nz, 3)) - #vort_3d = np.zeros((nx, ny, nz)) - #pressure_3d = np.zeros((nx, ny, nz)) - #theta_3d = np.zeros((nx, ny, nz)) + # Create meshgrid + x_grid, y_grid = np.meshgrid(domain_lons, domain_lats) + lon_grid, lat_grid = np.meshgrid(domain_lons, domain_lats) + lambert_conformal = CRS.from_proj4( + "+proj=lcc +lat_1=30 +lat_2=60 +lat_0=38.5 +lon_0=-97 +datum=WGS84 +units=m +no_defs") - # Create meshgrid - x_grid, y_grid = np.meshgrid(domain_lons, domain_lats) - lon_grid, lat_grid = np.meshgrid(domain_lons, domain_lats) + transformer = Transformer.from_crs("EPSG:4326", lambert_conformal, always_xy=True) - lambert_conformal = CRS.from_proj4( - "+proj=lcc +lat_1=30 +lat_2=60 +lat_0=38.5 +lon_0=-97 +datum=WGS84 +units=m +no_defs") + # Convert the entire grid to UTM + x_grid, y_grid = transformer.transform(lon_grid, lat_grid) - transformer = Transformer.from_crs("EPSG:4326", lambert_conformal, always_xy=True) + k_to_delete = [] - # Convert the entire grid to UTM - x_grid, y_grid = transformer.transform(lon_grid, lat_grid) + print("size is ", len(qv_3d_hr3)) - k_to_delete = [] + with resources.open_text('erftools.data.typical_atmosphere', 'pressure_vs_z_actual.txt') as f: + pressure_typical = np.loadtxt(f) + pressure_interp_func = interp1d(pressure_typical[:,1], pressure_typical[:,0], kind='linear', fill_value='extrapolate') - print("size is ", len(qv_3d_hr3)) - - dirname = "./TypicalAtmosphereData/" - pressure_filename = dirname + "pressure_vs_z_actual.txt" + # Find the index of the desired pressure level + for k in np.arange(nz-1, -1, -1): - pressure_typical = np.loadtxt(pressure_filename) - pressure_interp_func = interp1d(pressure_typical[:,1], pressure_typical[:,0], kind='linear', fill_value="extrapolate") + # Extract temperature at the desired pressure level + ght_at_lev = ght_3d_hr3[k] + #pressure_at_lev = pressure_3d_hr3[k] + uvel_at_lev = uvel_3d_hr3[k] + vvel_at_lev = vvel_3d_hr3[k] + rh_at_lev = rh_3d_hr3[k] + temp_at_lev = temp_3d_hr3[k] + # Print temperature for the specified level using nested loops + #for i in range(lats.shape[0]): # Latitude index + #for j in range(lats.shape[1]): # Longitude index + #lat = lats[i, j] + #lon = lons[i, j] + temp_3d[:, :, k] = temp_at_lev + z_grid[:,:,k] = ght_at_lev + #pressure_3d[:,:,k] = pressure_at_lev - # Find the index of the desired pressure level - for k in np.arange(nz-1, -1, -1): + uvel_3d[:, :, k] = uvel_at_lev + vvel_3d[:, :, k] = vvel_at_lev + rh_3d[:, :, k] = rh_at_lev + rh_val = rh_at_lev - # Extract temperature at the desired pressure level - ght_at_lev = ght_3d_hr3[k] - #pressure_at_lev = pressure_3d_hr3[k] - uvel_at_lev = uvel_3d_hr3[k] - vvel_at_lev = vvel_3d_hr3[k] - rh_at_lev = rh_3d_hr3[k] - temp_at_lev = temp_3d_hr3[k] - # Print temperature for the specified level using nested loops - #for i in range(lats.shape[0]): # Latitude index - #for j in range(lats.shape[1]): # Longitude index - #lat = lats[i, j] - #lon = lons[i, j] - temp_3d[:, :, k] = temp_at_lev - z_grid[:,:,k] = ght_at_lev - #pressure_3d[:,:,k] = pressure_at_lev + print("Avg val is ", k, np.mean(z_grid[:,:,k]), ) - uvel_3d[:, :, k] = uvel_at_lev - vvel_3d[:, :, k] = vvel_at_lev - rh_3d[:, :, k] = rh_at_lev - rh_val = rh_at_lev + # Find indices of elements that are zero or less + #indices = np.argwhere(qv_3d[:, :, k] <= 0) + indices = np.argwhere(rh_val <= 0) - print("Avg val is ", k, np.mean(z_grid[:,:,k]), ) + # Print indices and values + for index in indices: + row, col = index + value = rh_val[row, col] + #print(f"Element at index ({row}, {col}, {k}) is zero or less: {value}") - # Find indices of elements that are zero or less - #indices = np.argwhere(qv_3d[:, :, k] <= 0) - indices = np.argwhere(rh_val <= 0) + velocity[:,:,k,0] = uvel_at_lev + velocity[:,:,k,1] = vvel_at_lev + velocity[:,:,k,2] = 0.0 - # Print indices and values - for index in indices: - row, col = index - value = rh_val[row, col] - #print(f"Element at index ({row}, {col}, {k}) is zero or less: {value}") - - velocity[:,:,k,0] = uvel_at_lev - velocity[:,:,k,1] = vvel_at_lev - velocity[:,:,k,2] = 0.0 + #print(f"Lat and lon are: {lat_grid[0,0]:.2f}, {lon_grid[0,0]:.2f}") + #print(f"Temperature: {temp_3d[0,0,k]:.2f} K, Pressure: {pressure_3d[0,0,k]:.2f}, Geo height : {z_grid[0,0,k]:.2f} ") - #print(f"Lat and lon are: {lat_grid[0,0]:.2f}, {lon_grid[0,0]:.2f}") - #print(f"Temperature: {temp_3d[0,0,k]:.2f} K, Pressure: {pressure_3d[0,0,k]:.2f}, Geo height : {z_grid[0,0,k]:.2f} ") + scalars = { + "uvel": uvel_3d, + "vvel": vvel_3d, + "rh": rh_3d, + "temperature": temp_3d, + } - scalars = { - "uvel": uvel_3d, - "vvel": vvel_3d, - "rh": rh_3d, - "temperature": temp_3d, - } + dir_path = "Images" + os.makedirs(dir_path, exist_ok=True) - dir_path = "Images" - os.makedirs(dir_path, exist_ok=True) + dir_path = "Output" + os.makedirs(dir_path, exist_ok=True) - dir_path = "Output" - os.makedirs(dir_path, exist_ok=True) + # Extract the filename from the full path + filename = os.path.basename(file_path) - # Extract the filename from the full path - filename = os.path.basename(file_path) - - # Extract the forecast hour string, e.g., 'f024' - forecast_hour = filename.split('.')[-1] + # Extract the forecast hour string, e.g., 'f024' + forecast_hour = filename.split('.')[-1] - output_vtk = "./Output/GFS_" + datetime_str + "_" + forecast_hour + ".vtk" + output_vtk = "./Output/GFS_" + datetime_str + "_" + forecast_hour + ".vtk" - output_binary = "./Output/ERF_IC_" + datetime_str + "_" + forecast_hour + ".bin" + output_binary = "./Output/ERF_IC_" + datetime_str + "_" + forecast_hour + ".bin" - write_binary_vtk_structured_grid(output_vtk, x_grid, y_grid, z_grid, - nz, k_to_delete, True, - scalars, velocity) + write_binary_vtk_on_native_grid(output_vtk, + x_grid, y_grid, z_grid, + k_to_delete=k_to_delete, + point_data=scalars, + velocity=velocity) - write_binary_vtk_cartesian(output_binary, domain_lats, domain_lons, - x_grid, y_grid, z_grid, - nx, ny, nz, k_to_delete, scalars) + write_binary_vtk_cartesian(output_binary, domain_lats, domain_lons, + x_grid, y_grid, z_grid, + nx, ny, nz, k_to_delete, scalars) - scalars_for_ERF = { - "uvel": uvel_3d, - "vvel": vvel_3d, - } + scalars_for_ERF = { + "uvel": uvel_3d, + "vvel": vvel_3d, + } - #plot_1d(temp_3d, pressure_3d, theta_3d, qv_3d, qsat_3d, z_grid, k_to_delete) + #plot_1d(temp_3d, pressure_3d, theta_3d, qv_3d, qsat_3d, z_grid, k_to_delete) diff --git a/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC_OnlyUVW.py b/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC_OnlyUVW.py index d3d353e..f19ba1b 100644 --- a/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC_OnlyUVW.py +++ b/erftools/preprocessing/gfs/ReadGFSDataAndWriteERF_IC_OnlyUVW.py @@ -1,43 +1,21 @@ +from importlib import resources import pygrib import numpy as np import struct -from pyproj import Proj, Transformer, CRS +from pyproj import Transformer import matplotlib.pyplot as plt import sys import os from scipy.interpolate import interp1d -from erftools.preprocessing import calculate_utm_zone -from erftools.preprocessing import write_binary_vtk_structured_grid +from erftools.constants import CONST_GRAV as const_g +from erftools.utils.projection import calculate_utm_zone +from erftools.utils.microphysics import p_sat +from erftools.io import write_binary_vtk_on_native_grid from erftools.preprocessing import write_binary_vtk_cartesian from erftools.preprocessing import plot_1d - -#from IO import * -#from Plot_1D import plot_1d -#from Download_GFSData import * - -const_g = 9.81 - -def p_sat(temp): - tC = temp - 273.15 # Convert temperature from Kelvin to Celsius - - # Create masks for conditions - mask_positive = tC > 0.0 - mask_negative = ~mask_positive - - # Initialize ps with zeros (same shape as temp) - ps = np.zeros_like(temp) - - # Compute ps for tC > 0 - ps[mask_positive] = 6.112 * np.exp(17.62 * tC[mask_positive] / (tC[mask_positive] + 243.12)) - - # Compute ps for tC <= 0 - ps[mask_negative] = 6.112 * np.exp(22.46 * tC[mask_negative] / (tC[mask_negative] + 272.62)) - - return ps - def ReadGFS_3DData_UVW(file_path, area, lambert_conformal): # Open the GRIB2 file pressure_levels = [] @@ -253,10 +231,8 @@ def ReadGFS_3DData_UVW(file_path, area, lambert_conformal): print("size is ", len(qv_3d_hr3)) - dirname = "./TypicalAtmosphereData/" - pressure_filename = dirname + "pressure_vs_z_actual.txt" - - pressure_typical = np.loadtxt(pressure_filename) + with resources.open_text('erftools.data.typical_atmosphere', 'pressure_vs_z_actual.txt') as f: + pressure_typical = np.loadtxt(f) pressure_interp_func = interp1d(pressure_typical[:,1], pressure_typical[:,0], kind='linear', fill_value="extrapolate") # Find the index of the desired pressure level @@ -413,9 +389,11 @@ def ReadGFS_3DData_UVW(file_path, area, lambert_conformal): output_binary = "./Output/ERF_IC_" + date_time_forecast_str + ".bin" - write_binary_vtk_structured_grid(output_vtk, x_grid, y_grid, z_grid, - nz, k_to_delete, True, - scalars, velocity) + write_binary_vtk_on_native_grid(output_vtk, + x_grid, y_grid, z_grid, + k_to_delete=k_to_delete, + point_data=scalars, + velocity=velocity) write_binary_vtk_cartesian(date_time_forecast_str, output_binary, domain_lats, domain_lons, x_grid, y_grid, z_grid, diff --git a/erftools/preprocessing/hrrr.py b/erftools/preprocessing/hrrr.py index 8defd87..89177fc 100644 --- a/erftools/preprocessing/hrrr.py +++ b/erftools/preprocessing/hrrr.py @@ -5,10 +5,11 @@ import cartopy.crs as ccrs from herbie import Herbie -from ..constants import R_d, R_v, Cp_d, Cp_v, CONST_GRAV, p_0 -from ..EOS import getPgivenRTh, getThgivenRandT, getThgivenPandT -from ..utils import get_hi_faces, get_lo_faces, get_w_from_omega -from ..wrf.real import RealInit +from erftools.constants import R_d, R_v, Cp_d, Cp_v, CONST_GRAV, p_0 +from erftools.utils.EOS import getPgivenRTh, getThgivenRandT, getThgivenPandT +from erftools.utils.diag import get_w_from_omega +from erftools.utils.xarray import get_hi_faces, get_lo_faces +from erftools.wrf.real import RealInit hrrr_projection = ccrs.LambertConformal( @@ -549,8 +550,8 @@ def to_wrfbdy(self,bdy_width,dtype=float): ds = xr.Dataset() # interpolate staggered velocity fields - Ugrid = self.interpxy('U', self.xg_u[*idxs], self.yg_u[*idxs], dtype=dtype) - Vgrid = self.interpxy('V', self.xg_v[*idxs], self.yg_v[*idxs], dtype=dtype) + Ugrid = self.interpxy('U', self.xg_u[idxs], self.yg_u[idxs], dtype=dtype) + Vgrid = self.interpxy('V', self.xg_v[idxs], self.yg_v[idxs], dtype=dtype) ds['U'] = Ugrid.rename(west_east='west_east_stag') ds['V'] = Vgrid.rename(south_north='south_north_stag') @@ -567,12 +568,12 @@ def to_wrfbdy(self,bdy_width,dtype=float): 'QRAIN', ] for varn in unstag_interp_vars: - ds[varn] = self.interpxy(varn, self.xg[*idxs], self.yg[*idxs], dtype=dtype) + ds[varn] = self.interpxy(varn, self.xg[idxs], self.yg[idxs], dtype=dtype) # setup map scale factors sn_ew_idxs = idxs[::-1] - msf_u = self.grid.calc_msf(lat_u[*sn_ew_idxs]) - msf_v = self.grid.calc_msf(lat_v[*sn_ew_idxs]) + msf_u = self.grid.calc_msf(lat_u[sn_ew_idxs]) + msf_v = self.grid.calc_msf(lat_v[sn_ew_idxs]) ds['MAPFAC_U'] = (('south_north', 'west_east_stag'), msf_u.astype(dtype)) ds['MAPFAC_V'] = (('south_north_stag', 'west_east'), msf_v.astype(dtype)) diff --git a/erftools/preprocessing/plotting.py b/erftools/preprocessing/plotting.py new file mode 100644 index 0000000..df3f279 --- /dev/null +++ b/erftools/preprocessing/plotting.py @@ -0,0 +1,92 @@ +from importlib import resources +import os +import matplotlib.pyplot as plt +import numpy as np + +def plot_1d(temp_3d, pressure_3d, theta_3d, qv_3d, qsat_3d, z_grid, + k_to_delete=[], figdir='./Images'): + """Plot vertical profiles of atmospheric quantities""" + os.makedirs(figdir, exist_ok=True) + + nz = z_grid.shape[2] + + plt.figure(1) + for k in np.arange(nz-1, -1, -1): + if k in k_to_delete: + continue + plt.plot(np.mean(temp_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'xk', label="mean" if k == 0 else "") + plt.plot(np.max( temp_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'or', label="max" if k == 0 else "") + plt.plot(np.min( temp_3d[:,:,k]), np.mean(z_grid[:,:,k]), '^b', label="min" if k == 0 else "") + plt.ylim([0, 20000]) + plt.xlabel('T (K)',fontsize=15) + plt.ylabel(r'$z$ (m)',fontsize=15) + + with resources.open_text('erftools.data.typical_atmosphere', 'temp_vs_z_actual.txt') as f: + data = np.loadtxt(f) + plt.plot(data[:,0]+8,data[:,1],'k',label='Typical atmos.') + plt.legend() + plt.savefig(f"{figdir}/temp_vs_z.png") + + plt.figure(2) + for k in np.arange(nz-1, -1, -1): + if k in k_to_delete: + continue + plt.plot(np.mean(pressure_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'xk', label="mean" if k == 0 else "") + plt.plot(np.max( pressure_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'or', label="max" if k == 0 else "") + plt.plot(np.min( pressure_3d[:,:,k]), np.mean(z_grid[:,:,k]), '^b', label="min" if k == 0 else "") + plt.ylim([0, 20000]) + plt.xlabel('p (mbar)',fontsize=15) + plt.ylabel(r'$z$ (m)',fontsize=15) + + with resources.open_text('erftools.data.typical_atmosphere', 'pressure_vs_z_actual.txt') as f: + data = np.loadtxt(f) + plt.plot(data[:,0],data[:,1],'k',label='Typical atmos.') + plt.legend() + plt.savefig(f"{figdir}/pressure_vs_z.png") + + plt.figure(3) + for k in np.arange(nz-1, -1, -1): + if k in k_to_delete: + continue + plt.plot(np.mean(theta_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'xk', label="mean" if k == 0 else "") + plt.plot(np.max( theta_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'or', label="max" if k == 0 else "") + plt.plot(np.min( theta_3d[:,:,k]), np.mean(z_grid[:,:,k]), '^b', label="min" if k == 0 else "") + plt.ylim([0, 20000]) + plt.xlim([280, 600]) + plt.xlabel(r'$\theta$ (K)',fontsize=15) + plt.ylabel(r'$z$ (m)',fontsize=15) + + with resources.open_text('erftools.data.typical_atmosphere', 'theta_vs_z_actual.txt') as f: + data = np.loadtxt(f) + plt.plot(data[:,0],data[:,1],'k',label='Typical atmos.') + plt.legend() + plt.savefig(f"{figdir}/theta_vs_z.png") + + plt.figure(4) + for k in np.arange(nz-1, -1, -1): + if k in k_to_delete: + continue + plt.plot(np.mean(qv_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'xk', label="mean" if k == 0 else "") + plt.plot(np.max( qv_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'or', label="max" if k == 0 else "") + plt.plot(np.min( qv_3d[:,:,k]), np.mean(z_grid[:,:,k]), '^b', label="min" if k == 0 else "") + + plt.ylim([0, 20000]) + plt.xlabel(r'$q_v$ (kg/kg)',fontsize=15) + plt.ylabel(r'$z$ (m)',fontsize=15) + plt.legend() + plt.savefig(f"{figdir}/qv_vs_z.png") + + plt.figure(5) + for k in np.arange(nz-1, -1, -1): + if k in k_to_delete: + continue + plt.plot(np.mean(qsat_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'xk', label="mean" if k == 0 else "") + plt.plot(np.max( qsat_3d[:,:,k]), np.mean(z_grid[:,:,k]), 'or', label="max" if k == 0 else "") + plt.plot(np.min( qsat_3d[:,:,k]), np.mean(z_grid[:,:,k]), '^b', label="min" if k == 0 else "") + + plt.xlabel(r'$q_{sat}$ (kg/kg)',fontsize=15) + plt.ylabel(r'$z$ (m)',fontsize=15) + plt.legend() + plt.ylim([0, 20000]) + plt.xlim([0, 0.03]) + plt.savefig(f"{figdir}/qsat_vs_z.png") diff --git a/erftools/EOS.py b/erftools/utils/EOS.py similarity index 97% rename from erftools/EOS.py rename to erftools/utils/EOS.py index 1207db0..98c4f6f 100644 --- a/erftools/EOS.py +++ b/erftools/utils/EOS.py @@ -1,5 +1,5 @@ import numpy as np -from .constants import Gamma, p_0, R_d, R_v, Cp_d, Cp_v +from ..constants import Gamma, p_0, R_d, R_v, Cp_d, Cp_v def getThgivenPandT(T, P, rdOcp=R_d/Cp_d): return T * (p_0/P)**rdOcp diff --git a/erftools/utils.py b/erftools/utils/diag.py similarity index 62% rename from erftools/utils.py rename to erftools/utils/diag.py index 655edcd..c6230ce 100644 --- a/erftools/utils.py +++ b/erftools/utils/diag.py @@ -1,21 +1,7 @@ import numpy as np import xarray as xr -from .constants import CONST_GRAV - -def get_stag_dims(ds_cc): - stag_dims = {dim if dim != 'bottom_top' else 'bottom_top_stag': size - for dim,size in ds_cc.sizes.items()} - stag_dims['bottom_top_stag'] += 1 - return stag_dims - -def get_lo_faces(da,dim='bottom_top_stag'): - assert dim.endswith('_stag') - return da.isel({dim:slice(0,-1)}).rename({dim:dim[:-5]}) - -def get_hi_faces(da,dim='bottom_top_stag'): - assert dim.endswith('_stag') - return da.isel({dim:slice(1,None)}).rename({dim:dim[:-5]}) +from ..constants import CONST_GRAV def get_w_from_omega(omega_cc, rho_cc, stag_dims=None): if stag_dims is None: diff --git a/erftools/utils/latlon.py b/erftools/utils/latlon.py new file mode 100644 index 0000000..c54ea50 --- /dev/null +++ b/erftools/utils/latlon.py @@ -0,0 +1,98 @@ +import numpy as np + + +def find_latlon_indices(domain_lons, domain_lats, lon, lat): + nlats = len(domain_lats) + nlons = len(domain_lons) + lat_idx = 0 + lon_idx = 0 + for i in range(0,nlats): + if(lat < domain_lats[i]): + lat_idx = i + break + + for j in range(0,nlons): + if(lon < domain_lons[j]): + lon_idx = j + break + + return lon_idx, lat_idx + + +def find_erf_domain_extents(x_grid, y_grid, nx, ny, + outfile=None, + offset=100e3): + """Need to determine the box extents of the cartesian box + (xmin, xmax), (ymin, ymax) + that fits in this region. + + NOTE: Currently this method only works for the northern hemisphere + """ + + # Top is [nx-1,:] + # Bpttom is [0,:] + # Left is [:,0] + # Right is [:,ny-1] + + ymax = min(y_grid[nx-1,:]) - offset; + + # Intersect it with the leftmost longitude and rightmost longitude + + # Leftmost longitude is the line joined by + # x1, y1 = x_grid[0 ,-1], y_grid[0 ,-1] + # x2, y2 = x_grid[nx-1,-1], y_grid[nx-1,-1] + + #print("Values are ", x_grid[0,-1], y_grid[0,-1], x_grid[-1,-1], y_grid[-1,-1]) + + i1 = 0 + for i in range(0, nx-1): + if(y_grid[i,-1] < ymax and y_grid[i+1,-1] > ymax): + i1 = i + break + xmax = min(x_grid[i1,-1], x_grid[0,-1]) - offset + + for i in range(0, nx-1): + if(y_grid[i,0] < ymax and y_grid[i+1,0] > ymax): + i1 = i + break + xmin = max(x_grid[i1,0], x_grid[0,0]) + offset + + j1 = 0 + for j in range(0, ny-1): + if(x_grid[0,j] < xmax and x_grid[0,j+1] > xmax): + j1 = j + break + y1 = y_grid[0,j1]; + + for j in range(0, ny-1): + if(x_grid[0,j] < xmin and x_grid[0,j+1] > xmin): + j1 = j + break + y2 = y_grid[0,j1] + + ymin = max(y1,y2) + offset + + if outfile is not None: + try: + # Use exclusive creation mode ("x") so only one rank succeeds + with open(outfile, "x") as f: + print("geometry.prob_lo =", + np.ceil(xmin + 50e3), + np.ceil(ymin + 50e3), + 0.0, + file=f) + print("geometry.prob_hi =", + np.floor(xmax - 50e3), + np.floor(ymax - 50e3), + 25000.0, + file=f) + except FileExistsError: + # Another rank already wrote the file — just skip + pass + else: + print("geometry.prob_lo = ", + np.ceil(xmin+offset/2), np.ceil(ymin+offset/2), 0.0) + print("geometry.prob_hi = ", + np.floor(xmax-offset/2), np.floor(ymax-offset/2), 25000.0) + + return xmin, xmax, ymin, ymax diff --git a/erftools/utils/map.py b/erftools/utils/map.py new file mode 100644 index 0000000..2ff4af4 --- /dev/null +++ b/erftools/utils/map.py @@ -0,0 +1,89 @@ +from importlib import resources +import numpy as np +import pandas as pd +from pyproj import Transformer +import click + +from erftools.utils.projection import create_lcc_mapping +from erftools.io.vtk import write_vtk_map + + +def create_map(coordinates, area, shift_to_origin=False): + """Calculate Lambert conformal conic projection for the given area + and transform the specified coordinates + + The area is described by: + (lat_max, lon_min, lat_min, lon_max) + """ + lambert_conformal = create_lcc_mapping(area) + + # Create transformer FROM geographic (lon/lat) TO Lambert + transformer = Transformer.from_crs("EPSG:4326", + lambert_conformal, + always_xy=True) + + # region_id is optional + if coordinates.shape[1] == 2: + npts = len(coordinates) + coordinates = np.column_stack((coordinates, np.zeros(npts))) + + # Process each latitude and longitude, convert to projected coordinates + x_trans = [] + y_trans = [] + id_vec = [] + for lon, lat, region_id in coordinates: + x, y = transformer.transform(lon, lat) + x_trans.append(x) + y_trans.append(y) + id_vec.append(region_id) + + # Shift coordinates to ensure minimum x and y start at 0 + if shift_to_origin: + x_trans = np.array(x_trans) - min(x_trans) + y_trans = np.array(y_trans) - min(y_trans) + + return x_trans, y_trans, id_vec, lambert_conformal + + +@click.command() +@click.argument('output_file', type=click.Path(writable=True)) +@click.option('--area', nargs=4, type=float, + help='Bounding box: lat_max, lon_min, lat_min, lon_max') +def write_US_map_vtk(output_file, area, + mapfile='US_state_borders_coordinates.txt', + **kwargs): + """Write out map of the United States for visualization. + + A map of the US within the specified area is transformed with the + Lambert conformal conic projection and then written to a VTK file in + ASCII polydata format + """ + with resources.open_text('erftools.data', mapfile) as f: + coords = np.loadtxt(f) + x_trans, y_trans, id_vec, _ = create_map(coords, area, **kwargs) + write_vtk_map(x_trans, y_trans, id_vec, output_file) + + +@click.command() +@click.argument('input_file', type=click.Path(exists=True, readable=True)) +@click.argument('output_file', type=click.Path(writable=True)) +@click.option('--area', nargs=4, type=float, + help='Bounding box: lat_max, lon_min, lat_min, lon_max') +@click.option('--elev', type=float, default=5000.0, + help='Constant elevation (z) for all points') +def write_map_region_vtk(input_file, output_file, area, elev): + """Write out a bounded region for visualization. + + The bounded region within the specified area is transformed with the + Lambert conformal conic projection and then written to a VTK file in + ASCII polydata format + """ + if input_file.lower().endswith('.csv'): + coords = pd.read_csv(input_file).values + else: + coords = np.loadtxt(input_file) + x_trans, y_trans, id_vec, _ = create_map(coords, area) + write_vtk_map(x_trans, y_trans, id_vec, output_file, zlo=elev, + point_data={"Longitude": coords[:,0], + "Latitude": coords[:,1]}) + diff --git a/erftools/utils/microphysics.py b/erftools/utils/microphysics.py new file mode 100644 index 0000000..29862df --- /dev/null +++ b/erftools/utils/microphysics.py @@ -0,0 +1,20 @@ +import numpy as np + +def p_sat(temp): + tC = temp - 273.15 # Convert temperature from Kelvin to Celsius + + # Create masks for conditions + mask_positive = tC > 0.0 + mask_negative = ~mask_positive + + # Initialize ps with zeros (same shape as temp) + ps = np.zeros_like(temp) + + # Compute ps for tC > 0 + ps[mask_positive] = 6.112 * np.exp(17.62 * tC[mask_positive] / (tC[mask_positive] + 243.12)) + + # Compute ps for tC <= 0 + ps[mask_negative] = 6.112 * np.exp(22.46 * tC[mask_negative] / (tC[mask_negative] + 272.62)) + + return ps + diff --git a/erftools/utils/projection.py b/erftools/utils/projection.py new file mode 100644 index 0000000..2ac5f9e --- /dev/null +++ b/erftools/utils/projection.py @@ -0,0 +1,30 @@ +#from pyproj import CRS +#default_lambert_conformal = CRS.from_proj4( +# "+proj=lcc +lat_1=30 +lat_2=60 +lat_0=38.5 +lon_0=-97 +datum=WGS84 +units=m +no_defs") + +def create_lcc_mapping(area): + """Create a PROJ string describing a Lambert conformal conic (LCC) + projection centered in the given area described by: + (lat_max, lon_min, lat_min, lon_max) + """ + lat1 = area[2] + lat2 = area[0] + lon1 = area[1] + lon2 = area[3] + + # Build CRS + delta = lat2 - lat1 + lon0 = (lon1 + lon2) / 2 + lat0 = (lat1 + lat2) / 2 + + lat_1 = lat1 + delta/6 + lat_2 = lat2 - delta/6 + + return ( + f"+proj=lcc +lat_1={lat_1:.6f} +lat_2={lat_2:.6f} " + f"+lat_0={lat0:.6f} +lon_0={lon0:.6f} +datum=WGS84 +units=m +no_defs" + ) + +def calculate_utm_zone(longitude): + """Calculate the UTM zone for a given longitude.""" + return int((longitude + 180) // 6) + 1 diff --git a/erftools/utils/xarray.py b/erftools/utils/xarray.py new file mode 100644 index 0000000..de1f661 --- /dev/null +++ b/erftools/utils/xarray.py @@ -0,0 +1,15 @@ +import xarray as xr + +def get_stag_dims(ds_cc): + stag_dims = {dim if dim != 'bottom_top' else 'bottom_top_stag': size + for dim,size in ds_cc.sizes.items()} + stag_dims['bottom_top_stag'] += 1 + return stag_dims + +def get_lo_faces(da,dim='bottom_top_stag'): + assert dim.endswith('_stag') + return da.isel({dim:slice(0,-1)}).rename({dim:dim[:-5]}) + +def get_hi_faces(da,dim='bottom_top_stag'): + assert dim.endswith('_stag') + return da.isel({dim:slice(1,None)}).rename({dim:dim[:-5]}) diff --git a/erftools/wrf/real.py b/erftools/wrf/real.py index 41a9af7..f5f14da 100644 --- a/erftools/wrf/real.py +++ b/erftools/wrf/real.py @@ -4,7 +4,7 @@ from scipy.optimize import root_scalar from ..constants import R_d, Cp_d, Gamma, CONST_GRAV, p_0 -from ..utils import get_lo_faces, get_hi_faces +from ..utils.xarray import get_lo_faces, get_hi_faces class RealInit(object): diff --git a/notebooks/era5/README.md b/notebooks/era5/README.md index e96b3d5..fdc2f1f 100644 --- a/notebooks/era5/README.md +++ b/notebooks/era5/README.md @@ -32,3 +32,16 @@ The domain extents to be used for the ERF run is written into `Output/domain_ext Various example inputs for different hurricanes are provided in this folder. + +## VTK Visualization + +To output a map of the US based on the Lambert conformal conic projection +centered over a specified area (corresponding to the input above): +``` +write_US_map USMap_LambertProj.vtk --area 50 -130 10 -50 +``` + +To output the NOAA forecast cone over the same region: +``` +write_map_region HurricaneLaura_NOAA.csv NOAAForecastCone.vtk --area 50 -130 10 -50 +``` diff --git a/notebooks/era5/WriteICFromERA5Data.py b/notebooks/era5/WriteICFromERA5Data.py index 4bf5796..e5b7fc5 100644 --- a/notebooks/era5/WriteICFromERA5Data.py +++ b/notebooks/era5/WriteICFromERA5Data.py @@ -1,8 +1,11 @@ import sys import os +import glob import argparse +import time +import numpy as np + from mpi4py import MPI -import glob sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../'))) @@ -13,32 +16,9 @@ from erftools.preprocessing import ReadERA5_3DData from erftools.preprocessing import ReadERA5_SurfaceData -from pyproj import CRS, Transformer -from numpy import * -import time - -def CreateLCCMapping(area): +from erftools.utils.projection import create_lcc_mapping - lat1 = area[2] - lat2 = area[0] - lon1 = area[1] - lon2 = area[3] - # Build CRS - delta = lat2 - lat1 - lon0 = (lon1 + lon2) / 2 - lat0 = (lat1 + lat2) / 2 - - lat_1 = lat1 + delta/6 - lat_2 = lat2 - delta/6 - - lambert_conformal = ( - f"+proj=lcc +lat_1={lat_1:.6f} +lat_2={lat_2:.6f} " - f"+lat_0={lat0:.6f} +lon_0={lon0:.6f} +datum=WGS84 +units=m +no_defs" - ) - - return lambert_conformal - if __name__ == "__main__": comm = MPI.COMM_WORLD @@ -96,7 +76,10 @@ def CreateLCCMapping(area): filenames, area = Download_ERA5_SurfaceData(input_filename) comm.Barrier(); - lambert_conformal = CreateLCCMapping(area) + #lambert_conformal = CRS.from_proj4( + # "+proj=lcc +lat_1=30 +lat_2=60 +lat_0=38.5 +lon_0=-97 +datum=WGS84 +units=m +no_defs") + + lambert_conformal = create_lcc_mapping(area) # Each rank scans the local directory for matching files filenames = sorted(glob.glob("era5_surf_*.grib")) diff --git a/notebooks/era5/WriteNOAAForecastCone.py b/notebooks/era5/WriteNOAAForecastCone.py deleted file mode 100644 index c4628d2..0000000 --- a/notebooks/era5/WriteNOAAForecastCone.py +++ /dev/null @@ -1,104 +0,0 @@ -import sys -import os -import argparse - -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../'))) - -from erftools.preprocessing import Download_ERA5_Data -from erftools.preprocessing import Download_ERA5_ForecastData -from erftools.preprocessing import ReadERA5_3DData - -from pyproj import CRS, Transformer -from numpy import * -import pandas as pd -import pyvista as pv - -def read_user_input(filename): - data = {} - with open(filename, 'r') as f: - for line in f: - if ':' in line: - key, value = line.strip().split(':', 1) - key = key.strip().lower() - value = value.strip() - if key == 'area': - data[key] = [float(x) for x in value.split(',')] - elif key == 'time': - data[key] = value - else: - data[key] = int(value) - return data - - -def CreateLCCMapping(area): - - lat1 = area[2] - lat2 = area[0] - lon1 = area[1] - lon2 = area[3] - - # Build CRS - delta = lat2 - lat1 - lon0 = (lon1 + lon2) / 2 - lat0 = (lat1 + lat2) / 2 - - lat_1 = lat1 + delta/6 - lat_2 = lat2 - delta/6 - - lambert_conformal = ( - f"+proj=lcc +lat_1={lat_1:.6f} +lat_2={lat_2:.6f} " - f"+lat_0={lat0:.6f} +lon_0={lon0:.6f} +datum=WGS84 +units=m +no_defs" - ) - - return lambert_conformal - - -if __name__ == "__main__": - - # --- Parse arguments --- - parser = argparse.ArgumentParser(description="Write NOAA forecast cone to ASCII VTK") - parser.add_argument("input_filename", help="Some input file (not used here, can be metadata)") - parser.add_argument("input_csv_filename", help="CSV file with lon,lat points") - args = parser.parse_args() - - input_filename = args.input_filename - input_csv_filename = args.input_csv_filename - output_vtk_filename = "NOAAForecastCone.vtk" - - args = parser.parse_args() - - input_filename = args.input_filename - - user_inputs = read_user_input(input_filename) - print("User inputs:", user_inputs) - area = user_inputs.get("area", None) - - lambert_conformal = CreateLCCMapping(area) - - transformer = Transformer.from_crs("EPSG:4326", lambert_conformal, always_xy=True) - - # Read CSV - df = pd.read_csv(input_csv_filename) - - # Transform coordinates - lcc_coords = [transformer.transform(lon, lat) for lon, lat in zip(df.iloc[:,0], df.iloc[:,1])] - - # Create points array (z = 0) - points = [(x, y, 5000.0) for x, y in lcc_coords] - - # Create PolyData object - polydata = pv.PolyData(points) - - # Optionally connect the points into a polyline (the cone outline) - lines = [len(points)] + list(range(len(points))) # format: [n_points, p0, p1, ...] - polydata.lines = lines - - # Add original lon/lat as point data - polydata.point_data["Longitude"] = df.iloc[:, 0].to_numpy() - polydata.point_data["Latitude"] = df.iloc[:, 1].to_numpy() - - # Save to VTK - polydata.save(output_vtk_filename, binary=False) - - print(f"VTK file written to: {output_vtk_filename}") - diff --git a/notebooks/era5/WriteUSMapLambertProj.py b/notebooks/era5/WriteUSMapLambertProj.py deleted file mode 100644 index f99e0d7..0000000 --- a/notebooks/era5/WriteUSMapLambertProj.py +++ /dev/null @@ -1,155 +0,0 @@ -import sys -import os -import argparse - -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../'))) - -from erftools.preprocessing import Download_ERA5_Data -from erftools.preprocessing import Download_ERA5_ForecastData -from erftools.preprocessing import ReadERA5_3DData - -from pyproj import CRS, Transformer -from numpy import * -import pandas as pd -import pyvista as pv - -def read_user_input(filename): - data = {} - with open(filename, 'r') as f: - for line in f: - if ':' in line: - key, value = line.strip().split(':', 1) - key = key.strip().lower() - value = value.strip() - if key == 'area': - data[key] = [float(x) for x in value.split(',')] - elif key == 'time': - data[key] = value - else: - data[key] = int(value) - return data - - -def CreateLCCMapping(area): - - lat1 = area[2] - lat2 = area[0] - lon1 = area[1] - lon2 = area[3] - - # Build CRS - delta = lat2 - lat1 - lon0 = (lon1 + lon2) / 2 - lat0 = (lat1 + lat2) / 2 - - lat_1 = lat1 + delta/6 - lat_2 = lat2 - delta/6 - - lambert_conformal = ( - f"+proj=lcc +lat_1={lat_1:.6f} +lat_2={lat_2:.6f} " - f"+lat_0={lat0:.6f} +lon_0={lon0:.6f} +datum=WGS84 +units=m +no_defs" - ) - - return lambert_conformal - -def write_vtk_states(x, y, count, filename): - """ - Write a VTK file containing borders for all states. - - Parameters: - x (list or ndarray): List or array of x-coordinates. - y (list or ndarray): List or array of y-coordinates. - count (list or ndarray): List or array of state indices corresponding to each (x, y). - filename (str): Name of the output VTK file. - """ - x = asarray(x) - y = asarray(y) - count = asarray(count) - - if len(x) != len(y) or len(x) != len(count): - raise ValueError("The length of x, y, and count must be the same.") - - # Open VTK file for writing - with open(filename, 'w') as vtk_file: - # Write VTK header - vtk_file.write("# vtk DataFile Version 3.0\n") - vtk_file.write("State borders\n") - vtk_file.write("ASCII\n") - vtk_file.write("DATASET POLYDATA\n") - - # Group points by state - unique_states = unique(count) - points = [] # List of all points - lines = [] # List of all lines - - # Process each state - check = 0 - for state in unique_states: - if(check >=0): - state_indices = where(count == state)[0] # Indices for this state - state_points = [(x[i], y[i]) for i in state_indices] - start_idx = len(points) # Starting index for this state's points - points.extend(state_points) - - # Create line segments connecting adjacent points - for i in range(len(state_points) - 1): - lines.append((start_idx + i, start_idx + i + 1)) - check = check+1; - - # Write points - vtk_file.write(f"POINTS {len(points)} float\n") - for px, py in points: - vtk_file.write(f"{px} {py} 1e-12\n") - - # Write lines - vtk_file.write(f"LINES {len(lines)} {3 * len(lines)}\n") - for p1, p2 in lines: - vtk_file.write(f"2 {p1} {p2}\n") - - -def WriteUSMapVTKFile(area): - # Main script to process coordinates - coordinates = loadtxt('StateBordersCoordinates.txt') # Load lon, lat from a file - utm_x = [] - utm_y = [] - - lambert_conformal = CreateLCCMapping(area) - - # Create transformer FROM geographic (lon/lat) TO Lambert - transformer = Transformer.from_crs("EPSG:4326", lambert_conformal, always_xy=True) - - # Process each latitude and longitude - utm_x = [] - utm_y = [] - count_vec = [] - - for lon, lat, count in coordinates: - x, y = transformer.transform(lon, lat) # Convert (lon, lat) to Lambert - utm_x.append(x) - utm_y.append(y) - count_vec.append(count) - - # Write the shifted UTM coordinates to a VTK file - write_vtk_states(utm_x, utm_y, count_vec, "USMap_LambertProj.vtk") - return lambert_conformal - - -if __name__ == "__main__": - - # --- Parse arguments --- - parser = argparse.ArgumentParser(description="Write USMap in Lambert projection coordinates to ASCII VTK") - parser.add_argument("input_filename", help="Some input file (not used here, can be metadata)") - args = parser.parse_args() - - input_filename = args.input_filename - - args = parser.parse_args() - - input_filename = args.input_filename - - user_inputs = read_user_input(input_filename) - print("User inputs:", user_inputs) - area = user_inputs.get("area", None) - - lambert_conformal = WriteUSMapVTKFile(area) - diff --git a/notebooks/gfs/TypicalAtmosphereData/pressure_vs_z_actual.txt b/notebooks/gfs/TypicalAtmosphereData/pressure_vs_z_actual.txt deleted file mode 100644 index 52ab56f..0000000 --- a/notebooks/gfs/TypicalAtmosphereData/pressure_vs_z_actual.txt +++ /dev/null @@ -1,67 +0,0 @@ -1010.13 201 -977.22 402 -958.65 503 -938.4 804 -920.68 1005 -901.27 1106 -881.86 1407 -862.45 1608 -845.57 1709 -825.32 1910 -807.59 2010 -790.72 2312 -772.15 2412 -753.59 2714 -740.93 2814 -718.99 3015 -702.11 3417 -685.23 3618 -666.67 3819 -648.95 4020 -631.22 4322 -614.35 4724 -594.09 5025 -577.22 5226 -557.81 5729 -540.08 6030 -519.83 6533 -494.51 6935 -475.95 7236 -455.7 7839 -440.51 7940 -424.47 8442 -407.59 8744 -394.94 9045 -377.22 9447 -362.03 9950 -345.99 10452 -330.8 10754 -313.08 11357 -295.36 11859 -275.95 12462 -259.92 12864 -240.51 13568 -221.1 13970 -206.75 14472 -190.72 14874 -171.31 15678 -153.59 16683 -137.55 17588 -122.36 18291 -103.8 19397 -91.98 20101 -77.64 21307 -67.51 22312 -56.54 23518 -43.04 24824 -33.76 26332 -24.47 28141 -17.72 29849 -13.5 31859 -10.13 34573 -5.91 37085 -5.06 39397 -5.06 40905 -4.22 44121 -4.22 45930 -2.53 49246 diff --git a/notebooks/gfs/TypicalAtmosphereData/temp_vs_z_actual.txt b/notebooks/gfs/TypicalAtmosphereData/temp_vs_z_actual.txt deleted file mode 100644 index 3663d19..0000000 --- a/notebooks/gfs/TypicalAtmosphereData/temp_vs_z_actual.txt +++ /dev/null @@ -1,88 +0,0 @@ -288.818 97 -287.635 97 -286.453 388 -284.797 485 -283.142 777 -281.191 1068 -279.417 1359 -277.23 1748 -275.042 2039 -272.855 2330 -270.904 2621 -268.953 3010 -266.824 3204 -264.992 3495 -262.745 3981 -260.971 3981 -259.02 4466 -256.833 4563 -255.414 4951 -253.581 5340 -251.807 5631 -250.152 5922 -248.26 6117 -246.486 6408 -244.595 6602 -242.939 7087 -240.929 7282 -239.155 7573 -237.5 7864 -235.785 8155 -233.953 8350 -232.238 8641 -230.287 9029 -228.218 9223 -226.385 9515 -225.084 9709 -223.547 1e+04 -221.774 10291 -219.941 10583 -218.226 10777 -216.571 11262 -216.571 12816 -216.512 14369 -216.512 15825 -216.512 17476 -216.512 18835 -216.63 20000 -217.576 20971 -218.522 21942 -219.527 22913 -220.414 23981 -221.242 24757 -222.247 25534 -223.193 26408 -224.02 27476 -224.789 27864 -225.617 29029 -226.326 29709 -227.39 30583 -228.218 31748 -228.986 32330 -230.228 32718 -231.765 33301 -233.421 33981 -235.253 34563 -236.909 35243 -238.564 35922 -240.338 36505 -242.466 37379 -244.122 37864 -245.836 38350 -247.61 39029 -249.561 39709 -251.275 40388 -253.167 41165 -254.941 41748 -257.069 42427 -258.902 43010 -260.557 43786 -262.272 44272 -264.046 45049 -265.938 45534 -267.77 46408 -269.13 46893 -270.549 47476 -270.667 48350 -270.608 49320 -270.608 50291 diff --git a/notebooks/gfs/TypicalAtmosphereData/theta_vs_z_actual.txt b/notebooks/gfs/TypicalAtmosphereData/theta_vs_z_actual.txt deleted file mode 100644 index a0edebd..0000000 --- a/notebooks/gfs/TypicalAtmosphereData/theta_vs_z_actual.txt +++ /dev/null @@ -1,57 +0,0 @@ -290.352 46.6 -291.859 512.4 -293.97 1164.6 -296.08 1770.2 -298.492 2375.8 -300.905 3260.9 -303.92 3913 -305.729 4518.6 -308.141 5077.6 -310.854 5823 -314.472 6568.3 -316.884 7220.5 -319.598 7919.3 -322.613 8571.4 -325.628 9223.6 -328.945 9922.4 -331.96 10388.2 -334.673 10993.8 -340.402 11506.2 -346.734 11878.9 -353.367 12344.7 -360 12764 -366.935 13183.2 -373.266 13555.9 -379.296 13928.6 -385.93 14301.2 -392.261 14673.9 -398.894 15093.2 -406.131 15419.3 -412.161 15791.9 -419.698 16118 -427.538 16537.3 -433.266 16909.9 -440.804 17282.6 -448.342 17608.7 -457.688 18028 -466.734 18447.2 -476.382 18959.6 -484.523 19332.3 -493.266 19751.6 -502.01 20124.2 -511.055 20496.9 -519.497 20916.1 -528.543 21288.8 -537.588 21661.5 -545.729 21987.6 -553.266 22313.7 -560.201 22546.6 -567.739 22872.7 -575.578 23152.2 -582.513 23431.7 -589.146 23664.6 -596.683 23944.1 -603.92 24223.6 -610.854 24503.1 -617.186 24689.4 -623.518 24968.9 diff --git a/notebooks/gfs/WriteICFromGFSData.py b/notebooks/gfs/WriteICFromGFSData.py index e0ebc86..7638803 100644 --- a/notebooks/gfs/WriteICFromGFSData.py +++ b/notebooks/gfs/WriteICFromGFSData.py @@ -1,6 +1,8 @@ +from importlib import resources import sys import os import argparse +import numpy as np sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../'))) @@ -9,129 +11,7 @@ from erftools.preprocessing import ReadGFS_3DData from erftools.preprocessing import ReadGFS_3DData_UVW -from pyproj import CRS, Transformer -from numpy import * - -def CreateLCCMapping(area): - - lat1 = area[2] - lat2 = area[0] - lon1 = area[1] - lon2 = area[3] - - # Build CRS - delta = lat2 - lat1 - lon0 = (lon1 + lon2) / 2 - lat0 = (lat1 + lat2) / 2 - - lat_1 = lat1 + delta/6 - lat_2 = lat2 - delta/6 - - lambert_conformal = ( - f"+proj=lcc +lat_1={lat_1:.6f} +lat_2={lat_2:.6f} " - f"+lat_0={lat0:.6f} +lon_0={lon0:.6f} +datum=WGS84 +units=m +no_defs" - ) - - return lambert_conformal - - -def write_vtk_states(x, y, count, filename): - """ - Write a VTK file containing borders for all states. - - Parameters: - x (list or ndarray): List or array of x-coordinates. - y (list or ndarray): List or array of y-coordinates. - count (list or ndarray): List or array of state indices corresponding to each (x, y). - filename (str): Name of the output VTK file. - """ - x = asarray(x) - y = asarray(y) - count = asarray(count) - - if len(x) != len(y) or len(x) != len(count): - raise ValueError("The length of x, y, and count must be the same.") - - # Open VTK file for writing - with open(filename, 'w') as vtk_file: - # Write VTK header - vtk_file.write("# vtk DataFile Version 3.0\n") - vtk_file.write("State borders\n") - vtk_file.write("ASCII\n") - vtk_file.write("DATASET POLYDATA\n") - - # Group points by state - unique_states = unique(count) - points = [] # List of all points - lines = [] # List of all lines - - # Process each state - check = 0 - for state in unique_states: - if(check >=0): - state_indices = where(count == state)[0] # Indices for this state - state_points = [(x[i], y[i]) for i in state_indices] - start_idx = len(points) # Starting index for this state's points - points.extend(state_points) - - # Create line segments connecting adjacent points - for i in range(len(state_points) - 1): - lines.append((start_idx + i, start_idx + i + 1)) - check = check+1; - - # Write points - vtk_file.write(f"POINTS {len(points)} float\n") - for px, py in points: - vtk_file.write(f"{px} {py} 1e-12\n") - - # Write lines - vtk_file.write(f"LINES {len(lines)} {3 * len(lines)}\n") - for p1, p2 in lines: - vtk_file.write(f"2 {p1} {p2}\n") - - -def WriteUSMapVTKFile(area): - # Main script to process coordinates - coordinates = loadtxt('StateBordersCoordinates.txt') # Load lon, lat from a file - utm_x = [] - utm_y = [] - - lambert_conformal = CreateLCCMapping(area) - - #lambert_conformal = CRS.from_proj4( - # "+proj=lcc +lat_1=30 +lat_2=60 +lat_0=38.5 +lon_0=-97 +datum=WGS84 +units=m +no_defs" - #) - - # Create transformer FROM geographic (lon/lat) TO Lambert - transformer = Transformer.from_crs("EPSG:4326", lambert_conformal, always_xy=True) - - # Process each latitude and longitude - utm_x = [] - utm_y = [] - count_vec = [] - - for lon, lat, count in coordinates: - x, y = transformer.transform(lon, lat) # Convert (lon, lat) to Lambert - utm_x.append(x) - utm_y.append(y) - count_vec.append(count) - - #plt.scatter(utm_x, utm_y, s=10, c='blue', label='UTM Points') - #plt.xlabel('UTM X') - #plt.ylabel('UTM Y') - #plt.title('UTM Converted Points') - #plt.legend() - #plt.grid() - #plt.savefig("./Images/UTM_scatter.png") - #plt.show() - - # Shift coordinates to ensure minimum x and y start at 0 - #utm_x = array(utm_x) - min(utm_x) - #utm_y = array(utm_y) - min(utm_y) - - # Write the shifted UTM coordinates to a VTK file - write_vtk_states(utm_x, utm_y, count_vec, "USMap_LambertProj.vtk") - return lambert_conformal +from erftools.utils.map import write_US_map_vtk if __name__ == "__main__": @@ -151,7 +31,7 @@ def WriteUSMapVTKFile(area): if do_forecast: filenames, area = Download_GFS_ForecastData(input_filename) - lambert_conformal = WriteUSMapVTKFile(area) + lambert_conformal = write_US_map_vtk("USMap_LambertProj.vtk", area) # Create the directory if it doesn't exist os.makedirs("Output", exist_ok=True) for filename in filenames: @@ -160,7 +40,7 @@ def WriteUSMapVTKFile(area): else: filename, area = Download_GFS_Data(input_filename) - lambert_conformal = WriteUSMapVTKFile(area) + lambert_conformal = write_US_map_vtk("USMap_LambertProj.vtk", area) print("Filename is ", filename) print(f"Processing file: {filename}") ReadGFS_3DData(filename, area, lambert_conformal) diff --git a/notebooks/gfs_fourcastnet/TypicalAtmosphereData/pressure_vs_z_actual.txt b/notebooks/gfs_fourcastnet/TypicalAtmosphereData/pressure_vs_z_actual.txt deleted file mode 100644 index 52ab56f..0000000 --- a/notebooks/gfs_fourcastnet/TypicalAtmosphereData/pressure_vs_z_actual.txt +++ /dev/null @@ -1,67 +0,0 @@ -1010.13 201 -977.22 402 -958.65 503 -938.4 804 -920.68 1005 -901.27 1106 -881.86 1407 -862.45 1608 -845.57 1709 -825.32 1910 -807.59 2010 -790.72 2312 -772.15 2412 -753.59 2714 -740.93 2814 -718.99 3015 -702.11 3417 -685.23 3618 -666.67 3819 -648.95 4020 -631.22 4322 -614.35 4724 -594.09 5025 -577.22 5226 -557.81 5729 -540.08 6030 -519.83 6533 -494.51 6935 -475.95 7236 -455.7 7839 -440.51 7940 -424.47 8442 -407.59 8744 -394.94 9045 -377.22 9447 -362.03 9950 -345.99 10452 -330.8 10754 -313.08 11357 -295.36 11859 -275.95 12462 -259.92 12864 -240.51 13568 -221.1 13970 -206.75 14472 -190.72 14874 -171.31 15678 -153.59 16683 -137.55 17588 -122.36 18291 -103.8 19397 -91.98 20101 -77.64 21307 -67.51 22312 -56.54 23518 -43.04 24824 -33.76 26332 -24.47 28141 -17.72 29849 -13.5 31859 -10.13 34573 -5.91 37085 -5.06 39397 -5.06 40905 -4.22 44121 -4.22 45930 -2.53 49246 diff --git a/notebooks/gfs_fourcastnet/TypicalAtmosphereData/temp_vs_z_actual.txt b/notebooks/gfs_fourcastnet/TypicalAtmosphereData/temp_vs_z_actual.txt deleted file mode 100644 index 3663d19..0000000 --- a/notebooks/gfs_fourcastnet/TypicalAtmosphereData/temp_vs_z_actual.txt +++ /dev/null @@ -1,88 +0,0 @@ -288.818 97 -287.635 97 -286.453 388 -284.797 485 -283.142 777 -281.191 1068 -279.417 1359 -277.23 1748 -275.042 2039 -272.855 2330 -270.904 2621 -268.953 3010 -266.824 3204 -264.992 3495 -262.745 3981 -260.971 3981 -259.02 4466 -256.833 4563 -255.414 4951 -253.581 5340 -251.807 5631 -250.152 5922 -248.26 6117 -246.486 6408 -244.595 6602 -242.939 7087 -240.929 7282 -239.155 7573 -237.5 7864 -235.785 8155 -233.953 8350 -232.238 8641 -230.287 9029 -228.218 9223 -226.385 9515 -225.084 9709 -223.547 1e+04 -221.774 10291 -219.941 10583 -218.226 10777 -216.571 11262 -216.571 12816 -216.512 14369 -216.512 15825 -216.512 17476 -216.512 18835 -216.63 20000 -217.576 20971 -218.522 21942 -219.527 22913 -220.414 23981 -221.242 24757 -222.247 25534 -223.193 26408 -224.02 27476 -224.789 27864 -225.617 29029 -226.326 29709 -227.39 30583 -228.218 31748 -228.986 32330 -230.228 32718 -231.765 33301 -233.421 33981 -235.253 34563 -236.909 35243 -238.564 35922 -240.338 36505 -242.466 37379 -244.122 37864 -245.836 38350 -247.61 39029 -249.561 39709 -251.275 40388 -253.167 41165 -254.941 41748 -257.069 42427 -258.902 43010 -260.557 43786 -262.272 44272 -264.046 45049 -265.938 45534 -267.77 46408 -269.13 46893 -270.549 47476 -270.667 48350 -270.608 49320 -270.608 50291 diff --git a/notebooks/gfs_fourcastnet/TypicalAtmosphereData/theta_vs_z_actual.txt b/notebooks/gfs_fourcastnet/TypicalAtmosphereData/theta_vs_z_actual.txt deleted file mode 100644 index a0edebd..0000000 --- a/notebooks/gfs_fourcastnet/TypicalAtmosphereData/theta_vs_z_actual.txt +++ /dev/null @@ -1,57 +0,0 @@ -290.352 46.6 -291.859 512.4 -293.97 1164.6 -296.08 1770.2 -298.492 2375.8 -300.905 3260.9 -303.92 3913 -305.729 4518.6 -308.141 5077.6 -310.854 5823 -314.472 6568.3 -316.884 7220.5 -319.598 7919.3 -322.613 8571.4 -325.628 9223.6 -328.945 9922.4 -331.96 10388.2 -334.673 10993.8 -340.402 11506.2 -346.734 11878.9 -353.367 12344.7 -360 12764 -366.935 13183.2 -373.266 13555.9 -379.296 13928.6 -385.93 14301.2 -392.261 14673.9 -398.894 15093.2 -406.131 15419.3 -412.161 15791.9 -419.698 16118 -427.538 16537.3 -433.266 16909.9 -440.804 17282.6 -448.342 17608.7 -457.688 18028 -466.734 18447.2 -476.382 18959.6 -484.523 19332.3 -493.266 19751.6 -502.01 20124.2 -511.055 20496.9 -519.497 20916.1 -528.543 21288.8 -537.588 21661.5 -545.729 21987.6 -553.266 22313.7 -560.201 22546.6 -567.739 22872.7 -575.578 23152.2 -582.513 23431.7 -589.146 23664.6 -596.683 23944.1 -603.92 24223.6 -610.854 24503.1 -617.186 24689.4 -623.518 24968.9 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..053deee --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,70 @@ +[build-system] +requires = ["setuptools>=64", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "erftools" +version = "0.1.1" +description = "Python code to make life easier for users of the Energy Research & Forecasting model" +authors = [ + {name = "U.S. Department of Energy", email = "eliot.quon@nrel.gov"} +] +readme = "README.md" +license = "Apache-2.0" + +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Science/Research", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] + +requires-python = ">=3.9" + +dependencies = [ + "numpy>=2.0.2", + "scipy>=1.13.1", + "pandas>=2.3.2", + "xarray>=2024.7.0", + "netcdf4>=1.7.2", + "pydantic", + "f90nml", + "click", +] + +[project.optional-dependencies] +dev = [ + "jupyter", + "matplotlib", +] +performance = [ + "dask>=2024.8.0", + "mpi4py>=4.1.0", +] +grib = [ + "pygrib>=2.1.6", + "cdsapi>=0.7.6", + "herbie-data>=2024.8.0", + "cartopy>=0.23.0", + "pyproj>=3.6.1", +] +all = [ + "erftools[dev,performance,grib]" +] + +[tool.setuptools] +packages = ["erftools"] + +# Console scripts -- creates command line tools +[project.scripts] +write_US_map = "erftools.utils.map:write_US_map_vtk" +write_map_region = "erftools.utils.map:write_map_region_vtk" + +[project.urls] +Homepage = "https://erf.readthedocs.io/en/latest/" +Repository = "https://github.com/erf-model/erftools" +Issues = "https://github.com/erf-model/erftools/issues" + diff --git a/setup.py b/setup.py deleted file mode 100644 index fd343f0..0000000 --- a/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# This setup file is based on https://github.com/a2e-mmc/mmctools/blob/master/setup.py -# accessed on April 3, 2020. - -# Note: To use the 'upload' functionality of this file, you must: -# $ pip install twine - -import io -import os - -from setuptools import find_packages, setup - -# Package meta-data. -NAME = 'erftools' -DESCRIPTION = 'Scripts to make life easier for users of the Energy Research & Forecasting model' -URL = 'https://github.com/erf-model/erftools' -EMAIL = 'eliot.quon@nrel.gov' -AUTHOR = 'U.S. Department of Energy' -REQUIRES_PYTHON = '>=3.6.0' -VERSION = '0.1.1' - -# What packages are required for this module to be executed? -REQUIRED = [ - # core - 'numpy>=1.18.1', - 'pandas>=1.0.1', - 'xarray>=0.15.0', - 'netcdf4>=1.5.1', - 'pydantic', - 'f90nml', - 'cartopy', - #'matplotlib>=3', - #'scipy>=1.4.1', - #'dask>=2.10.1', - #'utm>=0.5.0', -] - -EXTRAS = { -} - -# The rest you shouldn't have to touch too much :) -# ------------------------------------------------ -# Except, perhaps the License and Trove Classifiers! -# If you do change the License, remember to change the Trove Classifier for that! - -here = os.path.abspath(os.path.dirname(__file__)) - -# Import the README and use it as the long-description. -# Note: this will only work if 'README.md' is present in your MANIFEST.in file! -try: - with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f: - long_description = '\n' + f.read() -except FileNotFoundError: - long_description = DESCRIPTION - -# Load the package's __version__.py module as a dictionary. -about = {} -if not VERSION: - project_slug = NAME.lower().replace("-", "_").replace(" ", "_") - with open(os.path.join(here, project_slug, '__version__.py')) as f: - exec(f.read(), about) -else: - about['__version__'] = VERSION - -# Where the magic happens: -setup( - name=NAME, - version=about['__version__'], - description=DESCRIPTION, - long_description=long_description, - long_description_content_type='text/markdown', - author=AUTHOR, - author_email=EMAIL, - python_requires=REQUIRES_PYTHON, - url=URL, - #packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]), - # If your package is a single module, use this instead of 'packages': - py_modules=[NAME], - # entry_points={ - # 'console_scripts': ['mycli=mymodule:cli'], - # }, - install_requires=REQUIRED, - extras_require=EXTRAS, - include_package_data=True, - license='Apache-2.0', - classifiers=[ - # Trove classifiers - # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers - 'License :: OSI Approved :: Apache Software License', - 'Topic :: Scientific/Engineering', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Development Status :: 4 - Beta', - ], - # $ setup.py publish support. - #cmdclass={ - # 'upload': UploadCommand, - #}, -)