diff --git a/src/mintpy/diff.py b/src/mintpy/diff.py index ae82a222c..264e3e7d7 100644 --- a/src/mintpy/diff.py +++ b/src/mintpy/diff.py @@ -275,12 +275,13 @@ def diff_ifgram_stack(file1, file2, out_file): # consider reference pixel if 'unwrapphase' in ds_name.lower(): - print(f'referencing to pixel ({obj1.refY},{obj1.refX}) ...') - ref1 = data1[:, obj1.refY, obj1.refX] - ref2 = data2[:, obj2.refY, obj2.refX] - for i in range(data1.shape[0]): - data1[i,:][data1[i, :] != 0.] -= ref1[i] - data2[i,:][data2[i, :] != 0.] -= ref2[i] + if all((obj1.refY is not None, obj1.refX is not None)): + print(f'referencing to pixel ({obj1.refY},{obj1.refX}) ...') + ref1 = data1[:, obj1.refY, obj1.refX] + ref2 = data2[:, obj2.refY, obj2.refX] + for i in range(data1.shape[0]): + data1[i,:][data1[i, :] != 0.] -= ref1[i] + data2[i,:][data2[i, :] != 0.] -= ref2[i] # operation and ignore zero values data1[data1 == 0] = np.nan diff --git a/src/mintpy/objects/gnss.py b/src/mintpy/objects/gnss.py index bba249480..dee625cf2 100644 --- a/src/mintpy/objects/gnss.py +++ b/src/mintpy/objects/gnss.py @@ -812,19 +812,21 @@ def __init__(self, site: str, data_dir=None, version='IGS20', url_prefix=None): self.file = os.path.join(self.data_dir, f'{self.site:s}.{version:s}.tenv3') elif version == 'IGS14' or version == 'IGS20': self.file = os.path.join(self.data_dir, f'{self.site:s}.tenv3') + elif version == 'IGS20': + self.file = os.path.join(self.data_dir, f'{self.site:s}.tenv3') else: raise ValueError(f'Un-supported GNSS version: {version}!') # get url # examples: http://geodesy.unr.edu/gps_timeseries/tenv3/IGS08/1LSU.IGS08.tenv3 # http://geodesy.unr.edu/gps_timeseries/tenv3/IGS14/CASU.tenv3 - # https://geodesy.unr.edu/gps_timeseries/IGS20/tenv3/IGS20/CAKG.tenv3 + # https://geodesy.unr.edu/gps_timeseries/IGS20/tenv3/IGS20/AHUP.tenv3 + if not self.url_prefix: - if version in ['IGS08', 'IGS14']: - self.url_prefix = f'https://geodesy.unr.edu/gps_timeseries/tenv3/{self.version}' if version == 'IGS20': - self.url_prefix = 'https://geodesy.unr.edu/gps_timeseries/IGS20/tenv3/IGS20' - + self.url_prefix = f'https://geodesy.unr.edu/gps_timeseries/{self.version}/tenv3/{self.version}' + else: + self.url_prefix = f'https://geodesy.unr.edu/gps_timeseries/tenv3/{self.version}' self.url = os.path.join(self.url_prefix, os.path.basename(self.file)) diff --git a/src/mintpy/prep_nisar.py b/src/mintpy/prep_nisar.py index b95f22a81..04820f324 100644 --- a/src/mintpy/prep_nisar.py +++ b/src/mintpy/prep_nisar.py @@ -1,7 +1,9 @@ +#!/usr/bin/env python3 ############################################################ # Program is part of MintPy # # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi # # Author: Sara Mirzaee, Jul 2023 # +# Emre Havazli, Feb 2026 # ############################################################ import datetime @@ -11,406 +13,902 @@ import h5py import numpy as np +from mintpy.constants import EARTH_RADIUS, SPEED_OF_LIGHT +from mintpy.utils import ptime, writefile from osgeo import gdal -from pyproj.transformer import Transformer +from pyproj import Transformer from scipy.interpolate import RegularGridInterpolator -from mintpy.constants import EARTH_RADIUS, SPEED_OF_LIGHT -from mintpy.utils import ptime, writefile +# --------------------------------------------------------------------- +# Constants / HDF5 paths (GUNW frequencyA, unwrappedInterferogram) +# --------------------------------------------------------------------- +DATASET_ROOT_UNW = "/science/LSAR/GUNW/grids/frequencyA/unwrappedInterferogram" +PARAMETERS = ( + "/science/LSAR/GUNW/metadata/processingInformation/parameters/" + "unwrappedInterferogram/frequencyA" +) +IDENTIFICATION = "/science/LSAR/identification" +RADARGRID_ROOT = "science/LSAR/GUNW/metadata/radarGrid" -DATASET_ROOT_UNW = '/science/LSAR/GUNW/grids/frequencyA/unwrappedInterferogram' -PARAMETERS = '/science/LSAR/GUNW/metadata/processingInformation/parameters/unwrappedInterferogram/frequencyA' -IDENTIFICATION = '/science/LSAR/identification' -RADARGRID_ROOT = 'science/LSAR/GUNW/metadata/radarGrid' DATASETS = { - 'xcoord' : f"{DATASET_ROOT_UNW}/POL/xCoordinates", - 'ycoord' : f"{DATASET_ROOT_UNW}/POL/yCoordinates", - 'unw' : f"{DATASET_ROOT_UNW}/POL/unwrappedPhase", - 'cor' : f"{DATASET_ROOT_UNW}/POL/coherenceMagnitude", - 'connComp' : f"{DATASET_ROOT_UNW}/POL/connectedComponents", - #'mask' : f"{DATASET_ROOT_UNW}/mask", - 'epsg' : f"{DATASET_ROOT_UNW}/projection", - 'xSpacing' : f"{DATASET_ROOT_UNW}/xCoordinateSpacing", - 'ySpacing' : f"{DATASET_ROOT_UNW}/yCoordinateSpacing", - 'polarization' : "/science/LSAR/GUNW/grids/frequencyA/listOfPolarizations", - 'range_look' : f"{PARAMETERS}/numberOfRangeLooks", - 'azimuth_look' : f"{PARAMETERS}/numberOfAzimuthLooks", + "xcoord": f"{DATASET_ROOT_UNW}/POL/xCoordinates", + "ycoord": f"{DATASET_ROOT_UNW}/POL/yCoordinates", + "unw": f"{DATASET_ROOT_UNW}/POL/unwrappedPhase", + "cor": f"{DATASET_ROOT_UNW}/POL/coherenceMagnitude", + "connComp": f"{DATASET_ROOT_UNW}/POL/connectedComponents", + "ion": f"{DATASET_ROOT_UNW}/POL/ionospherePhaseScreen", + "epsg": f"{DATASET_ROOT_UNW}/POL/projection", + "xSpacing": f"{DATASET_ROOT_UNW}/POL/xCoordinateSpacing", + "ySpacing": f"{DATASET_ROOT_UNW}/POL/yCoordinateSpacing", + "polarization": "/science/LSAR/GUNW/grids/frequencyA/listOfPolarizations", + "range_look": f"{PARAMETERS}/numberOfRangeLooks", + "azimuth_look": f"{PARAMETERS}/numberOfAzimuthLooks", } + PROCESSINFO = { - 'centerFrequency': "/science/LSAR/GUNW/grids/frequencyA/centerFrequency", - 'orbit_direction': f"{IDENTIFICATION}/orbitPassDirection", - 'platform' : f"{IDENTIFICATION}/missionId", - 'start_time' : f"{IDENTIFICATION}/referenceZeroDopplerStartTime", - 'end_time' : f"{IDENTIFICATION}/referenceZeroDopplerEndTime", - 'rdr_xcoord' : f"{RADARGRID_ROOT}/xCoordinates", - 'rdr_ycoord' : f"{RADARGRID_ROOT}/yCoordinates", - 'rdr_slant_range': f"{RADARGRID_ROOT}/slantRange", - 'rdr_height' : f"{RADARGRID_ROOT}/heightAboveEllipsoid", - 'rdr_incidence' : f"{RADARGRID_ROOT}/incidenceAngle", - 'bperp' : f"{RADARGRID_ROOT}/perpendicularBaseline", + "centerFrequency": "/science/LSAR/GUNW/grids/frequencyA/centerFrequency", + "orbit_direction": f"{IDENTIFICATION}/orbitPassDirection", + "platform": f"{IDENTIFICATION}/missionId", + "start_time": f"{IDENTIFICATION}/referenceZeroDopplerStartTime", + "end_time": f"{IDENTIFICATION}/referenceZeroDopplerEndTime", + "rdr_xcoord": f"{RADARGRID_ROOT}/xCoordinates", + "rdr_ycoord": f"{RADARGRID_ROOT}/yCoordinates", + "rdr_slant_range": f"{RADARGRID_ROOT}/referenceSlantRange", + "rdr_height": f"{RADARGRID_ROOT}/heightAboveEllipsoid", + "rdr_incidence": f"{RADARGRID_ROOT}/incidenceAngle", + "rdr_los_x": f"{RADARGRID_ROOT}/losUnitVectorX", + "rdr_los_y": f"{RADARGRID_ROOT}/losUnitVectorY", + "rdr_wet_tropo": f"{RADARGRID_ROOT}/wetTroposphericPhaseScreen", + "rdr_hs_tropo": f"{RADARGRID_ROOT}/hydrostaticTroposphericPhaseScreen", + "rdr_SET": f"{RADARGRID_ROOT}/slantRangeSolidEarthTidesPhase", + "bperp": f"{RADARGRID_ROOT}/perpendicularBaseline", } -#################################################################################### +# --------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------- +def _datasets_for_pol(polarization: str) -> dict: + """Return a per-call datasets dict without mutating the global DATASETS.""" + out = {} + for k, v in DATASETS.items(): + out[k] = ( + v.replace("POL", polarization) if isinstance(v, str) and "POL" in v else v + ) + return out + + +def _grid_bounds_from_xy(xcoord: np.ndarray, ycoord: np.ndarray): + """ + Compute pixel-edge bounds aligned to the xcoord/ycoord grid (pixel centers). + Returns bounds in (minx, miny, maxx, maxy) and dx, dy (signed spacings). + """ + if xcoord.size < 2 or ycoord.size < 2: + raise ValueError( + "xcoord/ycoord must have at least 2 elements to infer spacing." + ) + + dx = float(xcoord[1] - xcoord[0]) + dy = float(ycoord[1] - ycoord[0]) + + left = float(xcoord[0] - dx / 2.0) + right = float(xcoord[-1] + dx / 2.0) + + top = float(ycoord[0] - dy / 2.0) + bottom = float(ycoord[-1] + dy / 2.0) + + miny, maxy = (bottom, top) if bottom < top else (top, bottom) + minx, maxx = (left, right) if left < right else (right, left) + return (minx, miny, maxx, maxy), dx, dy + + +def _warp_to_grid_mem( + *, + src_path: str, + src_epsg: int, + dst_epsg: int, + xcoord: np.ndarray, + ycoord: np.ndarray, + resample_alg: str, +): + """ + Warp a raster to the exact xcoord/ycoord grid using MEM output. + Uses bounds derived from pixel-edge and xRes/yRes with targetAlignedPixels. + """ + bounds, dx, dy = _grid_bounds_from_xy(xcoord, ycoord) + + warp_opts = gdal.WarpOptions( + format="MEM", + outputBounds=bounds, + srcSRS=f"EPSG:{src_epsg}", + dstSRS=f"EPSG:{dst_epsg}", + xRes=abs(dx), + yRes=abs(dy), + targetAlignedPixels=True, + resampleAlg=resample_alg, + ) + dst = gdal.Warp("", src_path, options=warp_opts) + if dst is None: + raise RuntimeError(f"GDAL Warp failed for {src_path}") + arr = dst.ReadAsArray() + if arr is None: + raise RuntimeError(f"Failed reading warped array for {src_path}") + return arr + + +def _read_raster_epsg(path: str) -> int: + ds = gdal.Open(path, gdal.GA_ReadOnly) + if ds is None: + raise OSError(f"Cannot open raster: {path}") + srs = gdal.osr.SpatialReference(wkt=ds.GetProjection()) + epsg = srs.GetAttrValue("AUTHORITY", 1) + if epsg is None: + raise ValueError(f"Could not determine EPSG from raster projection: {path}") + return int(epsg) + + +def _make_rgi(grid_axes, values, method="linear"): + """ + RegularGridInterpolator wrapper: + - flips decreasing axes (SciPy requires increasing) + - bounds_error=False + fill_value=np.nan to avoid crashing + """ + axes = [np.asarray(a) for a in grid_axes] + vals = values + for dim, ax in enumerate(axes): + if ax[0] > ax[-1]: + axes[dim] = ax[::-1] + vals = np.flip(vals, axis=dim) + + return RegularGridInterpolator( + tuple(axes), + vals, + method=method, + bounds_error=False, + fill_value=np.nan, + ) + + +def _read_valid_unw_mask(gunw_file: str, xybbox, pol: str): + """ + Validity mask is ALWAYS based on finite unwrappedPhase (+ _FillValue check), + using: + /science/LSAR/GUNW/grids/frequencyA/unwrappedInterferogram/{pol}/unwrappedPhase + """ + path = f"{DATASET_ROOT_UNW}/{pol}/unwrappedPhase" + with h5py.File(gunw_file, "r") as ds: + dset = ds[path] + unw = dset[xybbox[1] : xybbox[3], xybbox[0] : xybbox[2]] + fill = dset.attrs.get("_FillValue", None) + + valid = np.isfinite(unw) + if fill is not None: + valid &= unw != fill + return valid + + +# --------------------------------------------------------------------- +# Primary workflow +# --------------------------------------------------------------------- def load_nisar(inps): """Prepare and load NISAR data and metadata into HDF5/MintPy format.""" - - print(f'update mode: {inps.update_mode}') + print(f"update mode: {inps.update_mode}") input_files = sorted(glob.glob(inps.input_glob)) print(f"Found {len(input_files)} unwrapped files") if inps.subset_lat: - bbox = (inps.subset_lon[0], inps.subset_lat[0], inps.subset_lon[1], inps.subset_lat[1]) + bbox = ( + inps.subset_lon[0], + inps.subset_lat[0], + inps.subset_lon[1], + inps.subset_lat[1], + ) else: bbox = None # extract metadata - metadata, bounds = extract_metadata(input_files, bbox) + pol = getattr(inps, "polarization", "HH") + metadata, bounds = extract_metadata(input_files, bbox=bbox, polarization=pol) # output filename stack_file = os.path.join(inps.out_dir, "inputs/ifgramStack.h5") geometry_file = os.path.join(inps.out_dir, "inputs/geometryGeo.h5") + ion_stack_file = os.path.join(inps.out_dir, "inputs/ionStack.h5") + tropo_stack_file = os.path.join(inps.out_dir, "inputs/tropoStack.h5") + set_stack_file = os.path.join(inps.out_dir, "inputs/setStack.h5") - # get date info: date12_list + # date pairs date12_list = _get_date_pairs(input_files) + # geometry metadata = prepare_geometry( outfile=geometry_file, metaFile=input_files[0], bbox=bounds, metadata=metadata, demFile=inps.dem_file, - maskFile=inps.mask_file + maskFile=inps.mask_file, + polarization=pol, + ) + + # standalone water mask (MintPy format) + if getattr(inps, "mask_file", None) not in [None, "None", "auto"]: + water_mask_file = os.path.join(inps.out_dir, "waterMask.h5") + prepare_water_mask( + outfile=water_mask_file, + metaFile=input_files[0], + metadata=metadata, + bbox=bounds, + maskFile=inps.mask_file, + polarization=pol, ) + # ifgram stack prepare_stack( outfile=stack_file, inp_files=input_files, metadata=metadata, + demFile=inps.dem_file, bbox=bounds, date12_list=date12_list, + polarization=pol, ) + # ionosphere stack + prepare_stack( + outfile=ion_stack_file, + inp_files=input_files, + metadata=metadata, + demFile=inps.dem_file, + bbox=bounds, + date12_list=date12_list, + polarization=pol, + ) + + # troposphere stack + prepare_stack( + outfile=tropo_stack_file, + inp_files=input_files, + metadata=metadata, + demFile=inps.dem_file, + bbox=bounds, + date12_list=date12_list, + polarization=pol, + ) print("Done.") + # SET stack + prepare_stack( + outfile=set_stack_file, + inp_files=input_files, + metadata=metadata, + demFile=inps.dem_file, + bbox=bounds, + date12_list=date12_list, + polarization=pol, + ) + print("Done.") return -def extract_metadata(input_files, bbox=None, polarization='HH'): +# --------------------------------------------------------------------- +# Metadata / subset utilities +# --------------------------------------------------------------------- +def extract_metadata(input_files, bbox=None, polarization="HH"): """Extract NISAR metadata for MintPy.""" meta_file = input_files[0] meta = {} - # update dataset - for key, value in DATASETS.items(): - if value: - value = value.replace("POL", polarization) - DATASETS[key] = value - - with h5py.File(meta_file, 'r') as ds: - pixel_height = ds[DATASETS['ySpacing']][()] - pixel_width = ds[DATASETS['xSpacing']][()] - x_origin = min(ds[DATASETS['xcoord']][()]) - y_origin = max(ds[DATASETS['ycoord']][()]) - xcoord = ds[DATASETS['xcoord']][()] - ycoord = ds[DATASETS['ycoord']][()] - meta["EPSG"] = ds[DATASETS['epsg']][()] - meta['WAVELENGTH'] = SPEED_OF_LIGHT / ds[PROCESSINFO['centerFrequency']][()] - meta["ORBIT_DIRECTION"] = ds[PROCESSINFO['orbit_direction']][()].decode('utf-8') - meta['POLARIZATION'] = polarization - meta["ALOOKS"] = ds[DATASETS['azimuth_look']][()] - meta["RLOOKS"] = ds[DATASETS['range_look']][()] - meta['PLATFORM'] = ds[PROCESSINFO['platform']][()].decode('utf-8') - meta['STARTING_RANGE'] = min(ds[PROCESSINFO['rdr_slant_range']][()].flatten()) - start_time = datetime.datetime.strptime(ds[PROCESSINFO['start_time']][()].decode('utf-8')[0:26], - "%Y-%m-%dT%H:%M:%S.%f") - end_time = datetime.datetime.strptime(ds[PROCESSINFO['end_time']][()].decode('utf-8')[0:26], - "%Y-%m-%dT%H:%M:%S.%f") + datasets = _datasets_for_pol(polarization) + + with h5py.File(meta_file, "r") as ds: + pixel_height = ds[datasets["ySpacing"]][()] + pixel_width = ds[datasets["xSpacing"]][()] + x_origin = float(np.min(ds[datasets["xcoord"]][()])) + y_origin = float(np.max(ds[datasets["ycoord"]][()])) + xcoord = ds[datasets["xcoord"]][()] + ycoord = ds[datasets["ycoord"]][()] + meta["EPSG"] = int(ds[datasets["epsg"]][()]) + meta["WAVELENGTH"] = SPEED_OF_LIGHT / ds[PROCESSINFO["centerFrequency"]][()] + meta["ORBIT_DIRECTION"] = ds[PROCESSINFO["orbit_direction"]][()].decode("utf-8") + meta["POLARIZATION"] = polarization + meta["ALOOKS"] = ds[datasets["azimuth_look"]][()] + meta["RLOOKS"] = ds[datasets["range_look"]][()] + meta["PLATFORM"] = ds[PROCESSINFO["platform"]][()].decode("utf-8") + meta["STARTING_RANGE"] = float( + np.min(ds[PROCESSINFO["rdr_slant_range"]][()].flatten()) + ) + + start_time = datetime.datetime.strptime( + ds[PROCESSINFO["start_time"]][()].decode("utf-8")[0:26], + "%Y-%m-%dT%H:%M:%S.%f", + ) + end_time = datetime.datetime.strptime( + ds[PROCESSINFO["end_time"]][()].decode("utf-8")[0:26], + "%Y-%m-%dT%H:%M:%S.%f", + ) t_mid = start_time + (end_time - start_time) / 2.0 meta["CENTER_LINE_UTC"] = ( - t_mid - datetime.datetime(t_mid.year, t_mid.month, t_mid.day) + t_mid - datetime.datetime(t_mid.year, t_mid.month, t_mid.day) ).total_seconds() - meta["X_FIRST"] = x_origin - pixel_width//2 - meta["Y_FIRST"] = y_origin - pixel_height//2 - meta["X_STEP"] = pixel_width - meta["Y_STEP"] = pixel_height + # These were previously using //2 (integer) which is wrong for float spacing. + meta["X_FIRST"] = x_origin - float(pixel_width) / 2.0 + meta["Y_FIRST"] = y_origin - float(pixel_height) / 2.0 + meta["X_STEP"] = float(pixel_width) + meta["Y_STEP"] = float(pixel_height) if meta["EPSG"] == 4326: - meta["X_UNIT"] = meta["Y_UNIT"] = 'degree' + meta["X_UNIT"] = meta["Y_UNIT"] = "degree" else: meta["X_UNIT"] = meta["Y_UNIT"] = "meters" - if str(meta["EPSG"]).startswith('326'): - meta["UTM_ZONE"] = str(meta["EPSG"])[3:] + 'N' + if str(meta["EPSG"]).startswith("326"): + meta["UTM_ZONE"] = str(meta["EPSG"])[3:] + "N" else: - meta["UTM_ZONE"] = str(meta["EPSG"])[3:] + 'S' + meta["UTM_ZONE"] = str(meta["EPSG"])[3:] + "S" meta["EARTH_RADIUS"] = EARTH_RADIUS - # NISAR Altitude - meta['HEIGHT'] = 747000 + # NISAR altitude + meta["HEIGHT"] = 747000 - # Range and Azimuth pixel size need revision, values are just to fill in - meta["RANGE_PIXEL_SIZE"] = abs(pixel_width) - meta["AZIMUTH_PIXEL_SIZE"] = abs(pixel_height) + # placeholder pixel sizes + meta["RANGE_PIXEL_SIZE"] = abs(float(pixel_width)) + meta["AZIMUTH_PIXEL_SIZE"] = abs(float(pixel_height)) - # get the common raster bound among input files + # bbox handling if bbox: - # assuming bbox is in lat/lon coordinates epsg_src = 4326 utm_bbox = bbox_to_utm(bbox, meta["EPSG"], epsg_src) else: utm_bbox = None - bounds = common_raster_bound(input_files, utm_bbox) - meta['bbox'] = ",".join([str(b) for b in bounds]) + + bounds = common_raster_bound(input_files, utm_bbox, polarization=polarization) + meta["bbox"] = ",".join([str(b) for b in bounds]) + col1, row1, col2, row2 = get_rows_cols(xcoord, ycoord, bounds) - length = row2 - row1 - width = col2 - col1 - meta['LENGTH'] = length - meta['WIDTH'] = width + meta["LENGTH"] = int(row2 - row1) + meta["WIDTH"] = int(col2 - col1) return meta, bounds def get_rows_cols(xcoord, ycoord, bounds): - """Get row and cols of the bounding box to subset""" - xindex = np.where(np.logical_and(xcoord >= bounds[0], xcoord <= bounds[2]))[0] - yindex = np.where(np.logical_and(ycoord >= bounds[1], ycoord <= bounds[3]))[0] - row1, row2 = min(yindex), max(yindex) - col1, col2 = min(xindex), max(xindex) - return col1, row1, col2, row2 + """ + Get (col1, row1, col2, row2) for subsetting given bounds=(xmin,ymin,xmax,ymax). + Robust to: + - bounds slightly outside coordinate extent + - collapsed/invalid overlap bounds + - empty index selections + Returns indices suitable for Python slicing: arr[row1:row2, col1:col2] + """ + xcoord = np.asarray(xcoord) + ycoord = np.asarray(ycoord) + + xmin, ymin, xmax, ymax = map(float, bounds) + + # Ensure bounds ordering + if xmin > xmax: + xmin, xmax = xmax, xmin + if ymin > ymax: + ymin, ymax = ymax, ymin + + # Clip bounds to coordinate extent + x_minc, x_maxc = float(np.nanmin(xcoord)), float(np.nanmax(xcoord)) + y_minc, y_maxc = float(np.nanmin(ycoord)), float(np.nanmax(ycoord)) + xmin = max(xmin, x_minc) + xmax = min(xmax, x_maxc) + ymin = max(ymin, y_minc) + ymax = min(ymax, y_maxc) + + # If bounds collapse after clipping, fall back to full extent + if xmin >= xmax or ymin >= ymax: + xmin, xmax = x_minc, x_maxc + ymin, ymax = y_minc, y_maxc + + # Primary selection + xindex = np.where((xcoord >= xmin) & (xcoord <= xmax))[0] + yindex = np.where((ycoord >= ymin) & (ycoord <= ymax))[0] + + # If empty (can happen due to floating roundoff), fall back to nearest + if xindex.size == 0: + i1 = int(np.nanargmin(np.abs(xcoord - xmin))) + i2 = int(np.nanargmin(np.abs(xcoord - xmax))) + col1, col2 = (i1, i2) if i1 <= i2 else (i2, i1) + else: + col1, col2 = int(np.min(xindex)), int(np.max(xindex)) -def get_raster_corners(input_file): - """Get the (west, south, east, north) bounds of the image.""" - with h5py.File(input_file, 'r') as ds: - xcoord = ds[DATASETS['xcoord']][:] - ycoord = ds[DATASETS['ycoord']][:] - west = max(min(ds[PROCESSINFO['rdr_xcoord']][:]), min(xcoord)) - east = min(max(ds[PROCESSINFO['rdr_xcoord']][:]), max(xcoord)) - north = min(max(ds[PROCESSINFO['rdr_ycoord']][:]), max(ycoord)) - south = max(min(ds[PROCESSINFO['rdr_ycoord']][:]), min(ycoord)) - return west, south, east, north + if yindex.size == 0: + j1 = int(np.nanargmin(np.abs(ycoord - ymin))) + j2 = int(np.nanargmin(np.abs(ycoord - ymax))) + row1, row2 = (j1, j2) if j1 <= j2 else (j2, j1) + else: + row1, row2 = int(np.min(yindex)), int(np.max(yindex)) + + # Make slice end exclusive (+1) + col2 = min(col2 + 1, xcoord.size) + row2 = min(row2 + 1, ycoord.size) + + return col1, row1, col2, row2 -def common_raster_bound(input_files, utm_bbox=None): +def get_raster_corners(input_file, polarization="HH"): + """Get the (west, south, east, north) bounds of the image.""" + datasets = _datasets_for_pol(polarization) + with h5py.File(input_file, "r") as ds: + xcoord = ds[datasets["xcoord"]][:] + ycoord = ds[datasets["ycoord"]][:] + west = max(np.min(ds[PROCESSINFO["rdr_xcoord"]][:]), np.min(xcoord)) + east = min(np.max(ds[PROCESSINFO["rdr_xcoord"]][:]), np.max(xcoord)) + north = min(np.max(ds[PROCESSINFO["rdr_ycoord"]][:]), np.max(ycoord)) + south = max(np.min(ds[PROCESSINFO["rdr_ycoord"]][:]), np.min(ycoord)) + return float(west), float(south), float(east), float(north) + + +def common_raster_bound(input_files, utm_bbox=None, polarization="HH"): """Get common bounds among all data""" x_bounds = [] y_bounds = [] for file in input_files: - west, south, east, north = get_raster_corners(file) + west, south, east, north = get_raster_corners(file, polarization=polarization) x_bounds.append([west, east]) y_bounds.append([south, north]) - if not utm_bbox is None: - x_bounds.append([utm_bbox[0], utm_bbox[2]]) - y_bounds.append([utm_bbox[1], utm_bbox[3]]) - - bounds = max(x_bounds)[0], max(y_bounds)[0], min(x_bounds)[1], min(y_bounds)[1] - return bounds + common = [max(np.min(x_bounds, axis=0)), min(np.max(x_bounds, axis=0))] + common.append(max(np.min(y_bounds, axis=0))) + common.append(min(np.max(y_bounds, axis=0))) + common = [common[0], common[2], common[1], common[3]] + + if utm_bbox: + common = [ + max(common[0], utm_bbox[0]), + max(common[1], utm_bbox[1]), + min(common[2], utm_bbox[2]), + min(common[3], utm_bbox[3]), + ] + return common + + +def bbox_to_utm(bbox, dst_epsg, src_epsg=4326): + """Convert a lat/lon bounding box to UTM (dst_epsg).""" + transformer = Transformer.from_crs( + f"EPSG:{src_epsg}", f"EPSG:{dst_epsg}", always_xy=True + ) + x1, y1 = transformer.transform(bbox[0], bbox[1]) + x2, y2 = transformer.transform(bbox[2], bbox[3]) + xmin, xmax = (x1, x2) if x1 < x2 else (x2, x1) + ymin, ymax = (y1, y2) if y1 < y2 else (y2, y1) + return (xmin, ymin, xmax, ymax) -def bbox_to_utm(bbox, epsg_dst, epsg_src=4326): - """Convert a list of points to a specified UTM coordinate system. - If epsg_src is 4326 (lat/lon), assumes points_xy are in degrees. +def read_subset(gunw_file, bbox, polarization="HH", geometry=False): + """ + Read subset for unwrapped interferogram products. + If geometry=True, returns bbox indices only (xybbox) without reading data arrays. """ - xmin, ymin, xmax, ymax = bbox - t = Transformer.from_crs(epsg_src, epsg_dst, always_xy=True) - xs = [xmin, xmax] - ys = [ymin, ymax] - xt, yt = t.transform(xs, ys) - xys = list(zip(xt, yt)) - return *xys[0], *xys[1] - - -def read_subset(inp_file, bbox, geometry=False): - """Read a subset of data using bounding box in rows and cols""" - dataset = {} - with h5py.File(inp_file, 'r') as ds: - xcoord = ds[DATASETS['xcoord']][:] - ycoord = ds[DATASETS['ycoord']][:] - col1, row1, col2, row2 = get_rows_cols(xcoord, ycoord, bbox) + datasets = _datasets_for_pol(polarization) + with h5py.File(gunw_file, "r") as ds: + xcoord = ds[datasets["xcoord"]][()] + ycoord = ds[datasets["ycoord"]][()] + if bbox: + col1, row1, col2, row2 = get_rows_cols(xcoord, ycoord, bbox) + else: + row1, row2 = 0, len(ycoord) + col1, col2 = 0, len(xcoord) + + xybbox = (col1, row1, col2, row2) if geometry: - # dataset['mask'] = ds[DATASETS['mask']][row1:row2, col1:col2] - dataset['xybbox'] = (col1, row1, col2, row2) - else: - dataset['unw_data'] = ds[DATASETS['unw']][row1:row2, col1:col2] - dataset['cor_data'] = ds[DATASETS['cor']][row1:row2, col1:col2] - dataset['conn_comp'] = (ds[DATASETS['connComp']][row1:row2, col1:col2]).astype(np.float32) - dataset['conn_comp'][dataset['conn_comp'] > 254] = np.nan - dataset['pbase'] = np.nanmean(ds[PROCESSINFO['bperp']][()]) - return dataset - - -def read_and_interpolate_geometry(gunw_file, dem_file, xybbox, mask_file=None): - """Read the DEM and mask, change projection and interpolate to data grid, - interpolate slant range and incidence angle to data grid""" - dem_dataset = gdal.Open(dem_file, gdal.GA_ReadOnly) - proj = gdal.osr.SpatialReference(wkt=dem_dataset.GetProjection()) - dem_src_epsg = int(proj.GetAttrValue('AUTHORITY', 1)) - del dem_dataset + return {"xybbox": xybbox} + + unw_dset = ds[datasets["unw"]] + cor_dset = ds[datasets["cor"]] + cc_dset = ds[datasets["connComp"]] + ion_dset = ds[datasets["ion"]] + + unw_data = unw_dset[row1:row2, col1:col2].astype(np.float32) + cor_data = cor_dset[row1:row2, col1:col2].astype(np.float32) + conn_comp = cc_dset[row1:row2, col1:col2].astype(np.float32) + ion_data = ion_dset[row1:row2, col1:col2].astype(np.float32) + + # fill handling + fill_unw = unw_dset.attrs.get("_FillValue", None) + fill_cor = cor_dset.attrs.get("_FillValue", None) + fill_cc = cc_dset.attrs.get("_FillValue", None) + fill_ion = ion_dset.attrs.get("_FillValue", None) + + if fill_unw is not None: + unw_data[unw_data == fill_unw] = np.nan + if fill_cor is not None: + cor_data[cor_data == fill_cor] = np.nan + if fill_cc is not None: + conn_comp[conn_comp == fill_cc] = np.nan + if fill_ion is not None: + ion_data[ion_data == fill_ion] = np.nan + + # perpendicular baseline (kept placeholder: zeros) + pbase = 0.0 + + return { + "unw_data": unw_data, + "cor_data": cor_data, + "conn_comp": conn_comp, + "ion_data": ion_data, + "pbase": pbase, + "xybbox": xybbox, + } + +# --------------------------------------------------------------------- +# Geometry (DEM warp + 3D interpolation at valid pixels) +# --------------------------------------------------------------------- +def read_and_interpolate_geometry( + gunw_file, dem_file, xybbox, polarization="HH", mask_file=None +): + """ + Warp DEM to the interferogram grid (aligned), then interpolate slant range & incidence. + Interpolation is evaluated at valid pixels only (validity from unwrappedPhase finite + _FillValue). + """ + dem_src_epsg = _read_raster_epsg(dem_file) + + datasets = _datasets_for_pol(polarization) rdr_coords = {} - with h5py.File(gunw_file, 'r') as ds: - dst_epsg = ds[DATASETS['epsg']][()] - xcoord = ds[DATASETS['xcoord']][xybbox[0]:xybbox[2]] - ycoord = ds[DATASETS['ycoord']][xybbox[1]:xybbox[3]] - - rdr_coords['xcoord_radar_grid'] = ds[PROCESSINFO['rdr_xcoord']][()] - rdr_coords['ycoord_radar_grid'] = ds[PROCESSINFO['rdr_ycoord']][()] - rdr_coords['height_radar_grid'] = ds[PROCESSINFO['rdr_height']][()] - rdr_coords['slant_range'] = ds[PROCESSINFO['rdr_slant_range']][()] - rdr_coords['perp_baseline'] = ds[PROCESSINFO['bperp']][()] - rdr_coords['incidence_angle'] = ds[PROCESSINFO['rdr_incidence']][()] - - subset_rows = len(ycoord) - subset_cols = len(xcoord) - - Y_2d, X_2d = np.meshgrid(ycoord, xcoord, indexing='ij') - bounds = (min(xcoord), min(ycoord), max(xcoord), max(ycoord)) - output_projection = f"EPSG:{dst_epsg}" - - # Warp DEM to the interferograms grid - input_projection = f"EPSG:{dem_src_epsg}" - output_dem = os.path.join(os.path.dirname(dem_file), 'dem_transformed.tif' ) - gdal.Warp(output_dem, dem_file, outputBounds=bounds, format='GTiff', - srcSRS=input_projection, dstSRS=output_projection, resampleAlg=gdal.GRA_Bilinear, - width=subset_cols, height=subset_rows, - options=['COMPRESS=DEFLATE']) - - dem_subset_array = gdal.Open(output_dem, gdal.GA_ReadOnly).ReadAsArray() - - # Interpolate slant_range and incidence_angle - slant_range, incidence_angle = interpolate_geometry(X_2d, Y_2d, dem_subset_array, rdr_coords) - - # Read and warp mask if necessary - if mask_file in ['auto', 'None', None]: - print('*** No mask was found ***') - mask_subset_array = np.ones(dem_subset_array.shape, dtype='byte') + with h5py.File(gunw_file, "r") as ds: + dst_epsg = int(ds[datasets["epsg"]][()]) + xcoord = ds[datasets["xcoord"]][xybbox[0] : xybbox[2]] + ycoord = ds[datasets["ycoord"]][xybbox[1] : xybbox[3]] + + rdr_coords["xcoord_radar_grid"] = ds[PROCESSINFO["rdr_xcoord"]][()] + rdr_coords["ycoord_radar_grid"] = ds[PROCESSINFO["rdr_ycoord"]][()] + rdr_coords["height_radar_grid"] = ds[PROCESSINFO["rdr_height"]][()] + rdr_coords["slant_range"] = ds[PROCESSINFO["rdr_slant_range"]][()] + rdr_coords["incidence_angle"] = ds[PROCESSINFO["rdr_incidence"]][()] + rdr_coords["los_x"] = ds[PROCESSINFO["rdr_los_x"]][()] + rdr_coords["los_y"] = ds[PROCESSINFO["rdr_los_y"]][()] + + # Warp DEM to exact grid + dem_subset_array = _warp_to_grid_mem( + src_path=dem_file, + src_epsg=dem_src_epsg, + dst_epsg=dst_epsg, + xcoord=xcoord, + ycoord=ycoord, + resample_alg="bilinear", + ) + + # Build meshgrid in output CRS + Y_2d, X_2d = np.meshgrid(ycoord, xcoord, indexing="ij") + + # Valid pixels from unwrappedPhase + valid = _read_valid_unw_mask(gunw_file, xybbox, polarization) + + # Interpolate geometry at valid pixels only + slant_range, incidence_angle, azimuth_angle = interpolate_geometry( + X_2d, Y_2d, dem_subset_array, rdr_coords, valid + ) + + # Mask handling (optional external mask warped to grid; otherwise ones) + if mask_file in ["auto", "None", None]: + mask_subset_array = np.ones(dem_subset_array.shape, dtype="byte") else: - try: - mask_dataset = gdal.Open(mask_file, gdal.GA_ReadOnly) - proj = gdal.osr.SpatialReference(wkt=mask_dataset.GetProjection()) - mask_src_epsg = int(proj.GetAttrValue('AUTHORITY', 1)) - del mask_dataset + mask_src_epsg = _read_raster_epsg(mask_file) + mask_subset_array = _warp_to_grid_mem( + src_path=mask_file, + src_epsg=mask_src_epsg, + dst_epsg=dst_epsg, + xcoord=xcoord, + ycoord=ycoord, + resample_alg="near", + ).astype("byte") + + return ( + dem_subset_array, + slant_range, + incidence_angle, + azimuth_angle, + mask_subset_array, + ) + + +def interpolate_geometry(X_2d, Y_2d, dem, rdr_coords, valid_mask): + """Interpolate slant range, incidence angle, and azimuth angle at valid pixels only.""" + length, width = Y_2d.shape + out_slant = np.full((length, width), np.nan, dtype=np.float32) + out_incid = np.full((length, width), np.nan, dtype=np.float32) + out_az = np.full((length, width), np.nan, dtype=np.float32) + + ii, jj = np.where(valid_mask) + if ii.size == 0: + return out_slant, out_incid, out_az + + pts = np.column_stack( + [ + dem[ii, jj].astype(np.float64), + Y_2d[ii, jj].astype(np.float64), + X_2d[ii, jj].astype(np.float64), + ] + ) + + grid = ( + rdr_coords["height_radar_grid"], + rdr_coords["ycoord_radar_grid"], + rdr_coords["xcoord_radar_grid"], + ) + + slant_itp = _make_rgi(grid, rdr_coords["slant_range"], method="linear") + inc_itp = _make_rgi(grid, rdr_coords["incidence_angle"], method="linear") + losx_itp = _make_rgi(grid, rdr_coords["los_x"], method="linear") + losy_itp = _make_rgi(grid, rdr_coords["los_y"], method="linear") - # Warp mask to the interferograms grid - input_projection = f"EPSG:{mask_src_epsg}" - output_mask = os.path.join(os.path.dirname(mask_file), 'mask_transformed.tif' ) - gdal.Warp(output_mask, mask_file, outputBounds=bounds, format='GTiff', - srcSRS=input_projection, dstSRS=output_projection, resampleAlg=gdal.GRA_Byte, - width=subset_cols, height=subset_rows, - options=['COMPRESS=DEFLATE']) + sl = slant_itp(pts) + inc = inc_itp(pts) + losx = losx_itp(pts) + losy = losy_itp(pts) - mask_subset_array = gdal.Open(output_mask, gdal.GA_ReadOnly).ReadAsArray() + # Azimuth angle from horizontal LOS unit vector components. + az = np.degrees(np.arctan2(-losy, -losx)) - except: - raise OSError('*** Mask is not gdal readable ***') + out_slant[ii, jj] = sl.astype(np.float32) + out_incid[ii, jj] = inc.astype(np.float32) + out_az[ii, jj] = az.astype(np.float32) + return out_slant, out_incid, out_az - return dem_subset_array, slant_range, incidence_angle, mask_subset_array +def read_and_interpolate_troposphere( + gunw_file, dem_file, xybbox, polarization="HH", mask_file=None +): + """Warp DEM to aligned grid and interpolate combined tropo at valid pixels only.""" + dem_src_epsg = _read_raster_epsg(dem_file) + datasets = _datasets_for_pol(polarization) + rdr_coords = {} + with h5py.File(gunw_file, "r") as ds: + dst_epsg = int(ds[datasets["epsg"]][()]) + xcoord = ds[datasets["xcoord"]][xybbox[0] : xybbox[2]] + ycoord = ds[datasets["ycoord"]][xybbox[1] : xybbox[3]] + + rdr_coords["xcoord_radar_grid"] = ds[PROCESSINFO["rdr_xcoord"]][()] + rdr_coords["ycoord_radar_grid"] = ds[PROCESSINFO["rdr_ycoord"]][()] + rdr_coords["height_radar_grid"] = ds[PROCESSINFO["rdr_height"]][()] + rdr_coords["wet_tropo"] = ds[PROCESSINFO["rdr_wet_tropo"]][()] + rdr_coords["hydrostatic_tropo"] = ds[PROCESSINFO["rdr_hs_tropo"]][()] + + dem_subset_array = _warp_to_grid_mem( + src_path=dem_file, + src_epsg=dem_src_epsg, + dst_epsg=dst_epsg, + xcoord=xcoord, + ycoord=ycoord, + resample_alg="bilinear", + ) -def interpolate_geometry(X_2d, Y_2d, dem, rdr_coords): - """Interpolate slant range and incidence angle""" - pnts = np.stack((dem.flatten(), Y_2d.flatten(), X_2d.flatten()), axis=-1) + Y_2d, X_2d = np.meshgrid(ycoord, xcoord, indexing="ij") + valid = _read_valid_unw_mask(gunw_file, xybbox, polarization) + + total_tropo = interpolate_troposphere( + X_2d, Y_2d, dem_subset_array, rdr_coords, valid + ) + return total_tropo + + +def interpolate_troposphere(X_2d, Y_2d, dem, rdr_coords, valid_mask): + """Interpolate total tropo (hydrostatic + wet) at valid pixels only.""" length, width = Y_2d.shape + out = np.full((length, width), np.nan, dtype=np.float32) + + ii, jj = np.where(valid_mask) + if ii.size == 0: + return out + + pts = np.column_stack( + [ + dem[ii, jj].astype(np.float64), + Y_2d[ii, jj].astype(np.float64), + X_2d[ii, jj].astype(np.float64), + ] + ) - # build the interpolator - interpolator = RegularGridInterpolator((rdr_coords['height_radar_grid'], - rdr_coords['ycoord_radar_grid'], - rdr_coords['xcoord_radar_grid']), - rdr_coords['slant_range'], - method='linear') + total = rdr_coords["hydrostatic_tropo"] + rdr_coords["wet_tropo"] + grid = ( + rdr_coords["height_radar_grid"], + rdr_coords["ycoord_radar_grid"], + rdr_coords["xcoord_radar_grid"], + ) + itp = _make_rgi(grid, total, method="linear") + val = itp(pts) + out[ii, jj] = val.astype(np.float32) + return out - interpolated_slant_range = interpolator(pnts) - interpolator = RegularGridInterpolator((rdr_coords['height_radar_grid'], - rdr_coords['ycoord_radar_grid'], - rdr_coords['xcoord_radar_grid']), - rdr_coords['incidence_angle'], - method='linear') - interpolated_incidence_angle = interpolator(pnts) +def read_and_interpolate_SET( + gunw_file, dem_file, xybbox, polarization="HH", mask_file=None +): + """Warp DEM to aligned grid and interpolate SET phase at valid pixels only.""" + dem_src_epsg = _read_raster_epsg(dem_file) + datasets = _datasets_for_pol(polarization) + rdr_coords = {} + + with h5py.File(gunw_file, "r") as ds: + dst_epsg = int(ds[datasets["epsg"]][()]) + xcoord = ds[datasets["xcoord"]][xybbox[0] : xybbox[2]] + ycoord = ds[datasets["ycoord"]][xybbox[1] : xybbox[3]] + + rdr_coords["xcoord_radar_grid"] = ds[PROCESSINFO["rdr_xcoord"]][()] + rdr_coords["ycoord_radar_grid"] = ds[PROCESSINFO["rdr_ycoord"]][()] + rdr_coords["height_radar_grid"] = ds[PROCESSINFO["rdr_height"]][()] + rdr_coords["rdr_SET"] = ds[PROCESSINFO["rdr_SET"]][()] + + dem_subset_array = _warp_to_grid_mem( + src_path=dem_file, + src_epsg=dem_src_epsg, + dst_epsg=dst_epsg, + xcoord=xcoord, + ycoord=ycoord, + resample_alg="bilinear", + ) + + Y_2d, X_2d = np.meshgrid(ycoord, xcoord, indexing="ij") + valid = _read_valid_unw_mask(gunw_file, xybbox, polarization) - return interpolated_slant_range.reshape(length, width), interpolated_incidence_angle.reshape(length, width) + set_phase = interpolate_set(X_2d, Y_2d, dem_subset_array, rdr_coords, valid) + return set_phase + + +def interpolate_set(X_2d, Y_2d, dem, rdr_coords, valid_mask): + """Interpolate SET phase at valid pixels only.""" + length, width = Y_2d.shape + out = np.full((length, width), np.nan, dtype=np.float32) + + ii, jj = np.where(valid_mask) + if ii.size == 0: + return out + + pts = np.column_stack( + [ + dem[ii, jj].astype(np.float64), + Y_2d[ii, jj].astype(np.float64), + X_2d[ii, jj].astype(np.float64), + ] + ) + grid = ( + rdr_coords["height_radar_grid"], + rdr_coords["ycoord_radar_grid"], + rdr_coords["xcoord_radar_grid"], + ) + itp = _make_rgi(grid, rdr_coords["rdr_SET"], method="linear") + val = itp(pts) + out[ii, jj] = val.astype(np.float32) + return out -###################################### +# --------------------------------------------------------------------- +# MintPy file builders +# --------------------------------------------------------------------- def _get_date_pairs(filenames): str_list = [Path(f).stem for f in filenames] - return [str(f.split('_')[13]) + '_' + str(f.split('_')[11]) for f in str_list] + return [ + str(f.split("_")[11].split("T")[0]) + "_" + str(f.split("_")[13].split("T")[0]) + for f in str_list + ] def prepare_geometry( - outfile, - metaFile, - metadata, - bbox, - demFile, - maskFile + outfile, metaFile, metadata, bbox, demFile, maskFile, polarization="HH" ): """Prepare the geometry file.""" print("-" * 50) print(f"preparing geometry file: {outfile}") - # copy metadata to meta meta = {key: value for key, value in metadata.items()} - # Read waterMask, LayoverShadowMask, xybbox: - geo_ds = read_subset(metaFile, bbox, geometry=True) - dem_subset_array, slant_range, incidence_angle, mask = read_and_interpolate_geometry(metaFile, demFile, - geo_ds['xybbox'], mask_file=maskFile) + geo_ds = read_subset(metaFile, bbox, polarization=polarization, geometry=True) + dem_subset_array, slant_range, incidence_angle, azimuth_angle, mask = ( + read_and_interpolate_geometry( + metaFile, + demFile, + geo_ds["xybbox"], + polarization=polarization, + mask_file=maskFile, + ) + ) length, width = dem_subset_array.shape - ds_name_dict = { "height": [np.float32, (length, width), dem_subset_array], "incidenceAngle": [np.float32, (length, width), incidence_angle], "slantRangeDistance": [np.float32, (length, width), slant_range], - #"shadowMask": [np.bool_, (length, width), geo_ds['mask']], - #"waterMask": [np.bool_, (length, width), geo_ds['water_mask']], + "azimuthAngle": [np.float32, (length, width), azimuth_angle], } if maskFile: - ds_name_dict['waterMask'] = [np.bool_, (length, width), mask] + valid = _read_valid_unw_mask(metaFile, geo_ds["xybbox"], polarization) + ds_name_dict["waterMask"] = [ + np.bool_, + (length, width), + mask.astype(bool) & valid, + ] - # initiate HDF5 file meta["FILE_TYPE"] = "geometry" - meta['STARTING_RANGE'] = np.nanmin(slant_range) + meta["STARTING_RANGE"] = float(np.nanmin(slant_range)) writefile.layout_hdf5(outfile, ds_name_dict, metadata=meta) - return meta +def prepare_water_mask(outfile, metaFile, metadata, bbox, maskFile, polarization="HH"): + """Prepare a standalone MintPy waterMask.h5 aligned to the NISAR grid.""" + print("-" * 50) + print(f"preparing water mask file: {outfile}") + + if not maskFile or maskFile in ["auto", "None", None]: + raise ValueError("maskFile must be a raster path (e.g., waterMask.msk)") + + meta = {key: value for key, value in metadata.items()} + + # get subset indices + geo_ds = read_subset(metaFile, bbox, polarization=polarization, geometry=True) + xybbox = geo_ds["xybbox"] + + # get target grid axes + EPSG from the NISAR file + datasets = _datasets_for_pol(polarization) + with h5py.File(metaFile, "r") as ds: + dst_epsg = int(ds[datasets["epsg"]][()]) + xcoord = ds[datasets["xcoord"]][xybbox[0] : xybbox[2]] + ycoord = ds[datasets["ycoord"]][xybbox[1] : xybbox[3]] + + # warp mask raster onto NISAR grid + mask_src_epsg = _read_raster_epsg(maskFile) + mask_arr = _warp_to_grid_mem( + src_path=maskFile, + src_epsg=mask_src_epsg, + dst_epsg=dst_epsg, + xcoord=xcoord, + ycoord=ycoord, + resample_alg="near", + ).astype("byte") + + # Convention in this script: nonzero => True (valid), 0 => False (masked) + water_mask_bool = mask_arr.astype(bool) + + # constrain to valid NISAR pixels (finite/unfilled unwrappedPhase) + valid = _read_valid_unw_mask(metaFile, xybbox, polarization) + water_mask_bool &= valid + + length, width = water_mask_bool.shape + ds_name_dict = {"waterMask": [np.bool_, (length, width), water_mask_bool]} + + meta["FILE_TYPE"] = "waterMask" + meta["LENGTH"] = int(length) + meta["WIDTH"] = int(width) + + writefile.layout_hdf5(outfile, ds_name_dict, metadata=meta) + return outfile + + def prepare_stack( outfile, inp_files, metadata, + demFile, bbox, - date12_list + date12_list, + polarization="HH", ): - """Prepare the input unw stack.""" + """Prepare the input stacks.""" print("-" * 50) print(f"preparing ifgramStack file: {outfile}") - # copy metadata to meta - meta = {key: value for key, value in metadata.items()} - # get list of *.unw file + meta = {key: value for key, value in metadata.items()} num_pair = len(inp_files) - print(f"number of inputs/unwrapped interferograms: {num_pair}") - # read baseline data pbase = np.zeros(num_pair, dtype=np.float32) + cols, rows = meta["WIDTH"], meta["LENGTH"] - # size info - cols, rows = meta['WIDTH'], meta['LENGTH'] - - # define (and fill out some) dataset structure date12_arr = np.array([x.split("_") for x in date12_list], dtype=np.bytes_) drop_ifgram = np.ones(num_pair, dtype=np.bool_) + ds_name_dict = { "date": [date12_arr.dtype, (num_pair, 2), date12_arr], "bperp": [np.float32, (num_pair,), pbase], @@ -420,32 +918,66 @@ def prepare_stack( "connectComponent": [np.float32, (num_pair, rows, cols), None], } - # initiate HDF5 file - meta["FILE_TYPE"] = "ifgramStack" + if "inputs/geometryGeo.h5" in outfile: + meta["FILE_TYPE"] = "geometry" + else: + meta["FILE_TYPE"] = "ifgramStack" + writefile.layout_hdf5(outfile, ds_name_dict, metadata=meta) - # writing data to HDF5 file print(f"writing data to HDF5 file {outfile} with a mode ...") - with h5py.File(outfile, "a") as f: - prog_bar = ptime.progressBar(maxValue=num_pair) - for i, file in enumerate(inp_files): - dataset = read_subset(file, bbox) - - # read/write *.unw file - f["unwrapPhase"][i] = dataset['unw_data'] - - # read/write *.cor file - f["coherence"][i] = dataset['cor_data'] - - # read/write *.unw.conncomp file - f["connectComponent"][i] = dataset['conn_comp'] - - # read/write perpendicular baseline file - f['bperp'][i] = dataset['pbase'] - - prog_bar.update(i + 1, suffix=date12_list[i]) - prog_bar.close() + if "inputs/ifgramStack.h5" in outfile: + with h5py.File(outfile, "a") as f: + prog_bar = ptime.progressBar(maxValue=num_pair) + for i, file in enumerate(inp_files): + dataset = read_subset(file, bbox, polarization=polarization) + f["unwrapPhase"][i] = dataset["unw_data"] + f["coherence"][i] = dataset["cor_data"] + f["connectComponent"][i] = dataset["conn_comp"] + f["bperp"][i] = dataset["pbase"] + prog_bar.update(i + 1, suffix=date12_list[i]) + prog_bar.close() + + elif "inputs/ionStack.h5" in outfile: + with h5py.File(outfile, "a") as f: + prog_bar = ptime.progressBar(maxValue=num_pair) + for i, file in enumerate(inp_files): + dataset = read_subset(file, bbox, polarization=polarization) + f["unwrapPhase"][i] = dataset["ion_data"] + f["coherence"][i] = dataset["cor_data"] + f["connectComponent"][i] = dataset["conn_comp"] + f["bperp"][i] = dataset["pbase"] + prog_bar.update(i + 1, suffix=date12_list[i]) + prog_bar.close() + + elif "inputs/tropoStack.h5" in outfile: + with h5py.File(outfile, "a") as f: + prog_bar = ptime.progressBar(maxValue=num_pair) + for i, file in enumerate(inp_files): + geo_ds = read_subset( + file, bbox, polarization=polarization, geometry=True + ) + total_tropo = read_and_interpolate_troposphere( + file, demFile, geo_ds["xybbox"], polarization=polarization + ) + f["unwrapPhase"][i] = total_tropo + prog_bar.update(i + 1, suffix=date12_list[i]) + prog_bar.close() + + elif "inputs/setStack.h5" in outfile: + with h5py.File(outfile, "a") as f: + prog_bar = ptime.progressBar(maxValue=num_pair) + for i, file in enumerate(inp_files): + geo_ds = read_subset( + file, bbox, polarization=polarization, geometry=True + ) + set_phase = read_and_interpolate_SET( + file, demFile, geo_ds["xybbox"], polarization=polarization + ) + f["unwrapPhase"][i] = set_phase + prog_bar.update(i + 1, suffix=date12_list[i]) + prog_bar.close() print(f"finished writing to HDF5 file: {outfile}") return outfile diff --git a/src/mintpy/view.py b/src/mintpy/view.py index 054e9d51d..6bf9f2ad3 100644 --- a/src/mintpy/view.py +++ b/src/mintpy/view.py @@ -1316,7 +1316,7 @@ def format_coord(x, y): elif 20 < num_subplot <= 50: subplot_title = title_str.replace('_','\n').replace('-','\n') else: - subplot_title = f'{title_ind}' + subplot_title = f'{title_ind}\n{title_str}' # plot title if subplot_title: