Skip to content

Commit 03fd1a8

Browse files
author
trchudley
committed
minor coreg fixes
1 parent 9bf2390 commit 03fd1a8

File tree

3 files changed

+14
-235
lines changed

3 files changed

+14
-235
lines changed

batch/batch_download_and_coregister_is2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@
204204
# check whether coreg worked, and construct filename appropriately
205205
if metadata["coreg_status"] == "failed":
206206
out_fpath = out_fname + ".tif"
207-
if metadata["coreg_status"] == "coregistered":
207+
elif metadata["coreg_status"] == "coregistered":
208208
out_fpath = out_fname + "_coreg.tif"
209209
elif metadata["coreg_status"] == "dz_only":
210210
out_fpath = out_fname + "_coreg_dz.tif"

src/pdemtools/_accessor.py

Lines changed: 12 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -233,14 +233,19 @@ def coregister_is2(
233233
# ADD ICESAT-2 SPECIFIC TO METADATA
234234
if "request_date_dt" in points_df.columns:
235235

236-
dt_days_max = int(
237-
points_df["request_date_dt"].dt.round("d").dt.days.abs().max()
238-
)
239-
metadata_dict["points_dt_days_max"] = dt_days_max
236+
if metadata_dict["coreg_status"] == "failed":
237+
metadata_dict["points_dt_days_max"] = None
238+
metadata_dict["points_dt_days_count"] = None
240239

241-
dt_days = points_df["request_date_dt"].dt.round("d").dt.days
242-
dt_days_counts_dict = dt_days.value_counts().to_dict()
243-
metadata_dict["points_dt_days_count"] = dt_days_counts_dict
240+
else:
241+
dt_days_max = int(
242+
points_df["request_date_dt"].dt.round("d").dt.days.abs().max()
243+
)
244+
metadata_dict["points_dt_days_max"] = dt_days_max
245+
246+
dt_days = points_df["request_date_dt"].dt.round("d").dt.days
247+
dt_days_counts_dict = dt_days.value_counts().to_dict()
248+
metadata_dict["points_dt_days_count"] = dt_days_counts_dict
244249

245250
metadata_dict["coregistration_type"] = "reference_icesat2"
246251

@@ -384,19 +389,6 @@ def coregister_dems(
384389

385390
resolution = get_resolution(self._obj)
386391

387-
# new_dem_array, metadata_dict = coregisterdems(
388-
# reference.values,
389-
# self._obj.values,
390-
# reference.x.values,
391-
# reference.y.values,
392-
# stable_mask.values,
393-
# resolution,
394-
# max_horiz_offset=max_horiz_offset,
395-
# rmse_step_thresh=rmse_step_thresh,
396-
# max_iterations=max_iterations,
397-
# verbose=verbose,
398-
# )
399-
400392
new_dem_array, metadata_dict = coregister(
401393
self._obj.values,
402394
reference.values,

src/pdemtools/_coreg.py

Lines changed: 1 addition & 214 deletions
Original file line numberDiff line numberDiff line change
@@ -265,6 +265,7 @@ def coregister(
265265
perr = np.full((3, 1), np.nan)
266266
d0 = np.nan
267267
status = "failed"
268+
points_n = None
268269

269270
else:
270271
status = "coregistered"
@@ -296,220 +297,6 @@ def coregister(
296297
return dem2out, metadata_dict
297298

298299

299-
# def coregisterdems(
300-
# dem1, # Reference DEM
301-
# dem2, # DEM to be coregistered
302-
# x,
303-
# y,
304-
# mask,
305-
# res,
306-
# max_horiz_offset=50,
307-
# rmse_step_thresh=-0.001,
308-
# max_iterations=5,
309-
# verbose=True,
310-
# ):
311-
# """
312-
# Simplified version of Erik Husby's coregisterdems() Python function.
313-
314-
# INPUTS:
315-
# dem_1, dem_2: 2D arrays (of same shape) of dems. dem2 is the dem to be coregistered
316-
# mask: mask of regions to be used in coregistration process (1=VALID FOR COREGISTRATION)
317-
318-
# OUTPUTS:
319-
# trans: the [dz,dx,dy] transformation parameter
320-
# trans_err: 1-sigma errors of trans
321-
# rms: root mean square of the transformation in the vertical from the residuals
322-
323-
# If the registration fails due to lack of overlap, NaNs are returned in p and perr.
324-
# If the registration fails to converge or exceeds the maximum shift, the median
325-
# vertical offset is applied.
326-
327-
# """
328-
329-
# # Verbose print lambda function
330-
# print_verbose = lambda msg: print(msg) if verbose else None
331-
332-
# print_verbose("THIS IS THE OLD COREGISTERDEMS FUNCTION - WE SHOULD MOVE TO A SINGLE COREGISTER FUNCTION")
333-
334-
# # initial trans and RMSE settings
335-
# p = np.zeros((3, 1)) # p is prior iteration trans var
336-
# pn = p.copy() # pn is current iteration trans var
337-
# perr = np.zeros((3, 1)) # perr is prior iteration regression errors
338-
# pnerr = perr.copy() # pnerr is current iteration regression errors
339-
# d0 = np.inf # initial RMSE
340-
341-
# # Edge case markers
342-
# meddz = None
343-
# return_meddz = False
344-
# critical_failure = False
345-
346-
# it = 0
347-
# while True:
348-
# it += 1
349-
# print_verbose(f"Planimetric Correction Iteration {it}")
350-
351-
# print_verbose(f"Offset (z,x,y): {pn[0, 0]:.3f}, {pn[1, 0]:.3f}, {pn[2, 0]:.3f}")
352-
# # print(f"pn: {pn}")
353-
354-
# # Break loop if conditions reached
355-
# if np.any(np.abs(pn[1:]) > max_horiz_offset):
356-
# print(
357-
# f"Maximum horizontal offset ({max_horiz_offset}) exceeded."
358-
# "Consider raising the threshold if offsets are large."
359-
# )
360-
# return_meddz = True
361-
# break
362-
363-
# # Apply offsets
364-
# if pn[1] != 0 and pn[2] != 0:
365-
# dem2n = shift_dem(dem2, pn.T[0], x, y, verbose=verbose).astype("float32")
366-
# else:
367-
# dem2n = dem2 - pn[0].astype("float32")
368-
369-
# # # Calculate slopes - original method from PGC
370-
# # sy, sx = np.gradient(dem2n, res)
371-
# # sx = -sx
372-
373-
# print(type(dem2n))
374-
375-
# # Calculate slope - using Florinsky slope method (p = sx, q = sy)
376-
# sy = q_f(dem2n, res)
377-
# sx = p_f(dem2n, res)
378-
# sy = -sy
379-
# sx = -sx
380-
381-
# # Difference grids.
382-
# dz = dem2n - dem1
383-
384-
# # Mask (in full script, both m1 and m2 are applied)
385-
# dz[mask == 0] = np.nan
386-
387-
# # If no overlap between scenes, break the loop
388-
# if np.all(np.isnan(dz)):
389-
# print("No overlapping data between DEMs")
390-
# critical_failure = True
391-
# break
392-
393-
# # Filter NaNs and outliers.
394-
# n = (
395-
# ~np.isnan(sx)
396-
# & ~np.isnan(sy)
397-
# & (np.abs(dz - np.nanmedian(dz)) <= 3 * np.nanstd(dz))
398-
# )
399-
# n_count = np.count_nonzero(n)
400-
401-
# if n_count < 10:
402-
# print(f"Too few ({n_count}) registration points: 10 required")
403-
# critical_failure = True
404-
# break
405-
406-
# # Get RMSE
407-
# d1 = np.sqrt(np.mean(np.power(dz[n], 2)))
408-
# print_verbose(f"RMSE = {d1}")
409-
410-
# # Keep median dz if first iteration.
411-
# if it == 1:
412-
# meddz = np.median(dz[n])
413-
# meddz_err = np.std(dz[n] / np.sqrt(n_count))
414-
# d00 = np.sqrt(np.mean(np.power(dz[n] - meddz, 2)))
415-
416-
# # Get improvement in RMSE
417-
# rmse_step = d1 - d0 # initial d0 == inf
418-
419-
# # break if rmse above threshold
420-
# if rmse_step > rmse_step_thresh or np.isnan(d0):
421-
# print_verbose(
422-
# f"RMSE step in this iteration ({rmse_step:.5f}) is above threshold "
423-
# f"({rmse_step_thresh}), stopping and returning values of prior iteration."
424-
# )
425-
# # If fails after first registration attempt,
426-
# # set dx and dy to zero and subtract the median offset.
427-
# if it == 2:
428-
# print("Second iteration regression failure")
429-
# return_meddz = True
430-
# break
431-
# elif it == max_iterations:
432-
# print_verbose(f"Maximum number of iterations ({max_iterations}) reached")
433-
# break
434-
435-
# # Keep this adjustment.
436-
# dem2out = dem2n.copy()
437-
# p = pn.copy()
438-
# perr = pnerr.copy()
439-
# d0 = d1
440-
441-
# # Build design matrix.
442-
# X = np.column_stack((np.ones(n_count, dtype=np.float32), sx[n], sy[n]))
443-
# sx, sy = None, None # release for data amangement
444-
445-
# # Solve for new adjustment.
446-
# p1 = np.reshape(np.linalg.lstsq(X, dz[n], rcond=None)[0], (-1, 1))
447-
448-
# # Calculate p errors.
449-
# _, R = np.linalg.qr(X)
450-
# RI = np.linalg.lstsq(R, np.identity(3, dtype=np.float32), rcond=None)[0]
451-
# nu = X.shape[0] - X.shape[1] # residual degrees of freedom
452-
# yhat = np.matmul(X, p1) # predicted responses at each data point
453-
# r = dz[n] - yhat.T[0] # residuals
454-
# normr = np.linalg.norm(r)
455-
456-
# dz = None # release for memory managment
457-
458-
# rmse = normr / np.sqrt(nu)
459-
# tval = stats.t.ppf((1 - 0.32 / 2), nu)
460-
461-
# se = rmse * np.sqrt(np.sum(np.square(np.abs(RI)), axis=1, keepdims=True))
462-
# p1err = tval * se
463-
464-
# # Update shifts.
465-
# pn = p + p1
466-
# pnerr = np.sqrt(np.square(perr) + np.square(p1err))
467-
468-
# # END OF LOOP
469-
470-
# if return_meddz:
471-
# print(f"Returning median vertical offset: {meddz:.3f}")
472-
# dem2out = dem2 - meddz
473-
# p = np.array([[meddz, 0, 0]]).T
474-
# perr = np.array([[meddz_err, 0, 0]]).T
475-
# d0 = d00
476-
# status = "dz_only"
477-
478-
# elif critical_failure:
479-
# print("Regression critical failure, returning original DEM, NaN trans, and RMSE")
480-
# dem2out = dem2
481-
# p = np.full((3, 1), np.nan)
482-
# perr = np.full((3, 1), np.nan)
483-
# d0 = np.nan
484-
# status = "failed"
485-
486-
# else:
487-
# status = "coregistered"
488-
489-
# print(f"Final offset (z,x,y): {p[0, 0]:.3f}, {p[1, 0]:.3f}, {p[2, 0]:.3f}")
490-
# print(f"Final RMSE = {d0:.3f}")
491-
492-
# # Construct metadata:
493-
# metadata_dict = {
494-
# "coreg_status": status,
495-
# "x_offset": p[1, 0],
496-
# "y_offset": p[2, 0],
497-
# "z_offset": p[0, 0],
498-
# "x_offset_err": perr[1, 0],
499-
# "y_offset_err": perr[2, 0],
500-
# "z_offset_err": perr[0, 0],
501-
# "rmse": d0,
502-
# }
503-
# # Convert all numerical values to regular Python floats
504-
# metadata_dict = {
505-
# key: float(value) if isinstance(value, (np.float64, np.float32)) else value
506-
# for key, value in metadata_dict.items()
507-
# }
508-
509-
# # Return
510-
# return dem2out, metadata_dict # p.T[0], perr.T[0], d0
511-
512-
513300
def shift_dem(dem, trans, x, y, verbose=True):
514301
"""
515302
Shifts DEM according to translation factors ascertained in coregisterdems function

0 commit comments

Comments
 (0)