from matplotlib import pyplot as plt
import zarr
import colorcet
import moraine.cli as mc
from moraine.utils_ import is_cuda_available
import numpy as np
import holoviews as hv
from bokeh.models import WheelZoomTool
from holoviews import optsDeep Learning Operator
n2f
n2f (rslc:str, intf:str, image_pairs:numpy.ndarray, chunks:tuple=None, out_chunks:tuple=None, depths:tuple=(0, 0), model:str=None, cuda:bool=False, processes=None, n_workers=None, threads_per_worker=None, rmm_pool_size=0.9, **dask_cluster_arg)
Noise2Fringe interferogram filtering.
| Type | Default | Details | |
|---|---|---|---|
| rslc | str | input: rslc stack, shape (nlines, width, nimages) | |
| intf | str | output: filtered intfergrams stack, shape (nlines, width, nimage_pairs ) | |
| image_pairs | ndarray | input: image pairs | |
| chunks | tuple | None | parallel processing azimuth/range chunk size, optional. Default: rslc.chunks[:2] |
| out_chunks | tuple | None | output chunks |
| depths | tuple | (0, 0) | width of the boundary |
| model | str | None | path to the model in onnx format, use the model comes with this package by default |
| cuda | bool | False | if use cuda for processing, false by default |
| processes | NoneType | None | use process for dask worker over thread, the default is True for cpu, only applied if cuda==False |
| n_workers | NoneType | None | number of dask worker, the default is 1 for cpu, number of GPUs for cuda |
| threads_per_worker | NoneType | None | number of threads per dask worker, the default is 1 for cpu, only applied if cuda==False |
| rmm_pool_size | float | 0.9 | set the rmm pool size, only applied when cuda==True |
| dask_cluster_arg | VAR_KEYWORD |
logger = mc.get_logger()rslc = './raw/rslc.zarr/'
intf = './dl/n2f_intf.zarr'rslc_zarr = zarr.open(rslc,mode='r')
tnet = mr.TempNet.from_bandwidth(rslc_zarr.shape[-1], 1)n2f(rslc,intf,tnet.image_pairs,chunks=(2500,1834))2025-10-23 21:38:46 - log_args - INFO - running function: n2f
2025-10-23 21:38:46 - log_args - INFO - fetching args:
2025-10-23 21:38:46 - log_args - INFO - rslc = './raw/rslc.zarr/'
2025-10-23 21:38:46 - log_args - INFO - intf = './dl/n2f_intf.zarr'
2025-10-23 21:38:46 - log_args - INFO - image_pairs = array([[ 0, 1],
[ 1, 2],
[ 2, 3],
[ 3, 4],
[ 4, 5],
[ 5, 6],
[ 6, 7],
[ 7, 8],
[ 8, 9],
[ 9, 10],
[10, 11],
[11, 12],
[12, 13],
[13, 14],
[14, 15],
[15, 16]], dtype=int32)
2025-10-23 21:38:46 - log_args - INFO - chunks = (2500, 1834)
2025-10-23 21:38:46 - log_args - INFO - out_chunks = None
2025-10-23 21:38:46 - log_args - INFO - depths = (0, 0)
2025-10-23 21:38:46 - log_args - INFO - model = None
2025-10-23 21:38:46 - log_args - INFO - cuda = False
2025-10-23 21:38:46 - log_args - INFO - processes = None
2025-10-23 21:38:46 - log_args - INFO - n_workers = None
2025-10-23 21:38:46 - log_args - INFO - threads_per_worker = None
2025-10-23 21:38:46 - log_args - INFO - rmm_pool_size = 0.9
2025-10-23 21:38:46 - log_args - INFO - dask_cluster_arg = {}
2025-10-23 21:38:46 - log_args - INFO - fetching args done.
2025-10-23 21:38:46 - zarr_info - INFO - ./raw/rslc.zarr/ zarray shape, chunks, dtype: (2500, 1834, 17), (1000, 1000, 1), complex64
2025-10-23 21:38:46 - n2f - INFO - processing azimuth chunk size: 2500
2025-10-23 21:38:46 - n2f - INFO - processing range chunk size: 1834
2025-10-23 21:38:46 - n2f - INFO - starting dask cluster.
2025-10-23 21:38:47 - n2f - INFO - dask cluster started.
2025-10-23 21:38:47 - dask_cluster_info - INFO - dask cluster: LocalCluster(dashboard_link='http://127.0.0.1:8787/status', workers=1, threads=1, memory=256.00 GiB)
2025-10-23 21:38:47 - darr_info - INFO - rslc dask array shape, chunksize, dtype: (2500, 1834, 17), (2500, 1834, 1), complex64
2025-10-23 21:38:47 - n2f - INFO - got filtered interferograms.
2025-10-23 21:38:47 - darr_info - INFO - intf dask array shape, chunksize, dtype: (2500, 1834, 16), (2500, 1834, 1), complex64
2025-10-23 21:38:47 - n2f - INFO - saving filtered interferograms.
2025-10-23 21:38:47 - zarr_info - INFO - ./dl/n2f_intf.zarr zarray shape, chunks, dtype: (2500, 1834, 16), (1000, 1000, 1), complex64
2025-10-23 21:38:47 - n2f - INFO - computing graph setted. doing all the computing.
2025-10-23 21:40:42 - n2f - INFO - computing finished.leted | 1min 54.7s
2025-10-23 21:40:42 - n2f - INFO - dask cluster closed.
CPU times: user 8.16 s, sys: 5.44 s, total: 13.6 s
Wall time: 1min 56s
n2ft
n2ft (x:str, y:str, rslc:str, intf:str, image_pairs:numpy.ndarray, chunks:int=None, out_chunks:int=None, k:int=128, model:str=None, cuda:bool=False, processes=None, n_workers=None, threads_per_worker=None, rmm_pool_size=0.9, **dask_cluster_arg)
Noise2Fringe interferogram filtering.
| Type | Default | Details | |
|---|---|---|---|
| x | str | input: x coordinate, e.g., longitude, shape (n,) | |
| y | str | input: y coordinate, e.g., latitude, shape (n,) | |
| rslc | str | input: rslc stack, shape (n, nimages) | |
| intf | str | output: filtered intfergrams stack, shape (n, nimage_pairs ) | |
| image_pairs | ndarray | input: image pairs | |
| chunks | int | None | parallel processing point chunk size, optional. Default: rslc.chunks[0] |
| out_chunks | int | None | output point chunk size, Default: rslc.chunks[0] |
| k | int | 128 | halo size for chunkwise processing |
| model | str | None | path to the model in onnx format, use the model comes with this package by default |
| cuda | bool | False | if use cuda for processing, false by default |
| processes | NoneType | None | use process for dask worker over thread, the default is True for cpu, only applied if cuda==False |
| n_workers | NoneType | None | number of dask worker, the default is 1 for cpu, number of GPUs for cuda |
| threads_per_worker | NoneType | None | number of threads per dask worker, the default is 1 for cpu, only applied if cuda==False |
| rmm_pool_size | float | 0.9 | set the rmm pool size, only applied when cuda==True |
| dask_cluster_arg | VAR_KEYWORD |
Usage:
base = '../Tutorials/CLI/ps_processing/'
ps_can_rslc = base+'hix/ps_can_rslc.zarr/'
ps_can_lon = base+'hix/ps_can_lon.zarr'
ps_can_lat = base+'hix/ps_can_lat.zarr'
ps_can_filtered_intf = './dl/ps_can_n2ft_intf.zarr'ps_can_rslc_zarr = zarr.open(ps_can_rslc,mode='r')
tnet = mr.TempNet.from_bandwidth(ps_can_rslc_zarr.shape[-1], 1)n2ft(ps_can_lon, ps_can_lat, ps_can_rslc, ps_can_filtered_intf, tnet.image_pairs, chunks=50000)2025-10-23 21:40:42 - log_args - INFO - running function: n2ft
2025-10-23 21:40:42 - log_args - INFO - fetching args:
2025-10-23 21:40:42 - log_args - INFO - x = '../Tutorials/CLI/ps_processing/hix/ps_can_lon.zarr'
2025-10-23 21:40:42 - log_args - INFO - y = '../Tutorials/CLI/ps_processing/hix/ps_can_lat.zarr'
2025-10-23 21:40:42 - log_args - INFO - rslc = '../Tutorials/CLI/ps_processing/hix/ps_can_rslc.zarr/'
2025-10-23 21:40:42 - log_args - INFO - intf = './dl/ps_can_n2ft_intf.zarr'
2025-10-23 21:40:42 - log_args - INFO - image_pairs = array([[ 0, 1],
[ 1, 2],
[ 2, 3],
[ 3, 4],
[ 4, 5],
[ 5, 6],
[ 6, 7],
[ 7, 8],
[ 8, 9],
[ 9, 10],
[10, 11],
[11, 12],
[12, 13],
[13, 14],
[14, 15],
[15, 16]], dtype=int32)
2025-10-23 21:40:42 - log_args - INFO - chunks = 50000
2025-10-23 21:40:42 - log_args - INFO - out_chunks = None
2025-10-23 21:40:42 - log_args - INFO - k = 128
2025-10-23 21:40:42 - log_args - INFO - model = None
2025-10-23 21:40:42 - log_args - INFO - cuda = False
2025-10-23 21:40:42 - log_args - INFO - processes = None
2025-10-23 21:40:42 - log_args - INFO - n_workers = None
2025-10-23 21:40:42 - log_args - INFO - threads_per_worker = None
2025-10-23 21:40:42 - log_args - INFO - rmm_pool_size = 0.9
2025-10-23 21:40:42 - log_args - INFO - dask_cluster_arg = {}
2025-10-23 21:40:42 - log_args - INFO - fetching args done.
2025-10-23 21:40:42 - n2ft - INFO - load coordinates
2025-10-23 21:40:42 - n2ft - INFO - Done
2025-10-23 21:40:42 - zarr_info - INFO - ../Tutorials/CLI/ps_processing/hix/ps_can_rslc.zarr/ zarray shape, chunks, dtype: (634220, 17), (200000, 1), complex64
2025-10-23 21:40:42 - n2ft - INFO - processing point chunk size: 50000
2025-10-23 21:40:42 - n2ft - INFO - distributing every processing chunk with halo data
2025-10-23 21:40:52 - n2ft - INFO - processing chunk size with halo data: [53974, 59381, 62060, 59002, 58017, 59242, 59163, 61308, 60924, 58898, 55829, 56220, 40679]
2025-10-23 21:40:52 - n2ft - INFO - starting dask cluster.
2025-10-23 21:40:52 - n2ft - INFO - dask cluster started.
2025-10-23 21:40:52 - dask_cluster_info - INFO - dask cluster: LocalCluster(dashboard_link='http://127.0.0.1:8787/status', workers=1, threads=1, memory=256.00 GiB)
2025-10-23 21:40:52 - darr_info - INFO - rslc dask array shape, chunksize, dtype: (634220, 17), (634220, 1), complex64
2025-10-23 21:40:53 - n2ft - INFO - got filtered interferograms.
2025-10-23 21:40:53 - darr_info - INFO - intf dask array shape, chunksize, dtype: (634220, 16), (634220, 1), complex64
2025-10-23 21:40:53 - n2ft - INFO - saving filtered interferograms.
2025-10-23 21:40:53 - zarr_info - INFO - ./dl/ps_can_n2ft_intf.zarr zarray shape, chunks, dtype: (634220, 16), (200000, 1), complex64
2025-10-23 21:40:53 - n2ft - INFO - computing graph setted. doing all the computing.
[ ] | 0% Completed | 0.2s
/users/kangl/miniforge3/envs/work2/lib/python3.12/site-packages/distributed/client.py:3371: UserWarning: Sending large graph of size 20.23 MiB.
This may cause some slowdown.
Consider loading the data with Dask directly
or using futures or delayed objects to embed the data into the graph without repetition.
See also https://docs.dask.org/en/stable/best-practices.html#load-data-with-dask for more information.
warnings.warn(
2025-10-23 21:44:02 - n2ft - INFO - computing finished.eted | 3min 9.7s
2025-10-23 21:44:03 - n2ft - INFO - dask cluster closed.
CPU times: user 7.85 s, sys: 4.36 s, total: 12.2 s
Wall time: 3min 20s
if is_cuda_available():
n2ft(ps_can_lon, ps_can_lat, ps_can_rslc, ps_can_filtered_intf, tnet.image_pairs, chunks=50000, cuda=True)2025-10-23 21:44:03 - log_args - INFO - running function: n2ft
2025-10-23 21:44:03 - log_args - INFO - fetching args:
2025-10-23 21:44:03 - log_args - INFO - x = '../Tutorials/CLI/ps_processing/hix/ps_can_lon.zarr'
2025-10-23 21:44:03 - log_args - INFO - y = '../Tutorials/CLI/ps_processing/hix/ps_can_lat.zarr'
2025-10-23 21:44:03 - log_args - INFO - rslc = '../Tutorials/CLI/ps_processing/hix/ps_can_rslc.zarr/'
2025-10-23 21:44:03 - log_args - INFO - intf = './dl/ps_can_n2ft_intf.zarr'
2025-10-23 21:44:03 - log_args - INFO - image_pairs = array([[ 0, 1],
[ 1, 2],
[ 2, 3],
[ 3, 4],
[ 4, 5],
[ 5, 6],
[ 6, 7],
[ 7, 8],
[ 8, 9],
[ 9, 10],
[10, 11],
[11, 12],
[12, 13],
[13, 14],
[14, 15],
[15, 16]], dtype=int32)
2025-10-23 21:44:03 - log_args - INFO - chunks = 50000
2025-10-23 21:44:03 - log_args - INFO - out_chunks = None
2025-10-23 21:44:03 - log_args - INFO - k = 128
2025-10-23 21:44:03 - log_args - INFO - model = None
2025-10-23 21:44:03 - log_args - INFO - cuda = True
2025-10-23 21:44:03 - log_args - INFO - processes = None
2025-10-23 21:44:03 - log_args - INFO - n_workers = None
2025-10-23 21:44:03 - log_args - INFO - threads_per_worker = None
2025-10-23 21:44:03 - log_args - INFO - rmm_pool_size = 0.9
2025-10-23 21:44:03 - log_args - INFO - dask_cluster_arg = {}
2025-10-23 21:44:03 - log_args - INFO - fetching args done.
2025-10-23 21:44:03 - n2ft - INFO - load coordinates
2025-10-23 21:44:03 - n2ft - INFO - Done
2025-10-23 21:44:03 - zarr_info - INFO - ../Tutorials/CLI/ps_processing/hix/ps_can_rslc.zarr/ zarray shape, chunks, dtype: (634220, 17), (200000, 1), complex64
2025-10-23 21:44:03 - n2ft - INFO - processing point chunk size: 50000
2025-10-23 21:44:03 - n2ft - INFO - distributing every processing chunk with halo data
2025-10-23 21:44:09 - n2ft - INFO - processing chunk size with halo data: [53974, 59381, 62060, 59002, 58017, 59242, 59163, 61308, 60924, 58898, 55829, 56220, 40679]
2025-10-23 21:44:09 - n2ft - INFO - starting dask cluster.
2025-10-23 21:44:12 - n2ft - INFO - dask cluster started.
2025-10-23 21:44:12 - dask_cluster_info - INFO - dask cluster: LocalCUDACluster(dashboard_link='http://127.0.0.1:8787/status', workers=2, threads=2, memory=256.00 GiB)
2025-10-23 21:44:12 - darr_info - INFO - rslc dask array shape, chunksize, dtype: (634220, 17), (634220, 1), complex64
2025-10-23 21:44:12,307 - distributed.worker - WARNING - Setting CPU affinity for GPU failed. Please refer to the following link for troubleshooting information: https://docs.rapids.ai/api/dask-cuda/nightly/troubleshooting/#setting-cpu-affinity-failure
2025-10-23 21:44:12,308 - distributed.worker - WARNING - Setting CPU affinity for GPU failed. Please refer to the following link for troubleshooting information: https://docs.rapids.ai/api/dask-cuda/nightly/troubleshooting/#setting-cpu-affinity-failure
2025-10-23 21:44:12 - n2ft - INFO - got filtered interferograms.
2025-10-23 21:44:12 - darr_info - INFO - intf dask array shape, chunksize, dtype: (634220, 16), (634220, 1), complex64
2025-10-23 21:44:12 - n2ft - INFO - saving filtered interferograms.
2025-10-23 21:44:12 - zarr_info - INFO - ./dl/ps_can_n2ft_intf.zarr zarray shape, chunks, dtype: (634220, 16), (200000, 1), complex64
2025-10-23 21:44:12 - n2ft - INFO - computing graph setted. doing all the computing.
2025-10-23 21:44:48 - n2ft - INFO - computing finished.eted | 36.0s
2025-10-23 21:44:51 - n2ft - INFO - dask cluster closed.
CPU times: user 1.26 s, sys: 896 ms, total: 2.15 s
Wall time: 47.5 s
ps_can_x = base+'hix/ps_can_e.zarr'
ps_can_y = base+'hix/ps_can_n.zarr'
ps_can_filtered_intf_pyramid_dir = './dl/ps_can_n2ft_intf_pyramid'
dates = ["20210802", "20210816", "20210830", "20210913", "20211011", "20211025", "20220606", "20220620",
"20220704", "20220718", "20220801", "20220815", "20220829", "20220912", "20220926", "20221010",
"20221024",]mc.pc_pyramid(ps_can_filtered_intf, ps_can_filtered_intf_pyramid_dir, x=ps_can_x, y=ps_can_y, ras_resolution=20)2025-10-23 21:44:51 - log_args - INFO - running function: pc_pyramid
2025-10-23 21:44:51 - log_args - INFO - fetching args:
2025-10-23 21:44:51 - log_args - INFO - pc = './dl/ps_can_n2ft_intf.zarr'
2025-10-23 21:44:51 - log_args - INFO - out_dir = './dl/ps_can_n2ft_intf_pyramid'
2025-10-23 21:44:51 - log_args - INFO - x = '../Tutorials/CLI/ps_processing/hix/ps_can_e.zarr'
2025-10-23 21:44:51 - log_args - INFO - y = '../Tutorials/CLI/ps_processing/hix/ps_can_n.zarr'
2025-10-23 21:44:51 - log_args - INFO - yx = None
2025-10-23 21:44:51 - log_args - INFO - ras_resolution = 20
2025-10-23 21:44:51 - log_args - INFO - ras_chunks = (256, 256)
2025-10-23 21:44:51 - log_args - INFO - pc_chunks = 65536
2025-10-23 21:44:51 - log_args - INFO - processes = False
2025-10-23 21:44:51 - log_args - INFO - n_workers = 1
2025-10-23 21:44:51 - log_args - INFO - threads_per_worker = 2
2025-10-23 21:44:51 - log_args - INFO - dask_cluster_arg = {}
2025-10-23 21:44:51 - log_args - INFO - fetching args done.
2025-10-23 21:44:51 - pc_pyramid - INFO - clean out dir
2025-10-23 21:44:51 - zarr_info - INFO - ./dl/ps_can_n2ft_intf.zarr zarray shape, chunks, dtype: (634220, 16), (200000, 1), complex64
2025-10-23 21:44:51 - pc_pyramid - INFO - rendering point cloud data coordinates:
2025-10-23 21:44:51 - pc_pyramid - INFO - rasterizing point cloud data to grid with bounds: [np.float64(-16498472.34405057), np.float64(8649597.04965627), np.float64(-16470132.34405057), np.float64(8674957.04965627)].
2025-10-23 21:44:51 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/x.zarr zarray shape, chunks, dtype: (634220,), (65536,), float64
2025-10-23 21:44:51 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/y.zarr zarray shape, chunks, dtype: (634220,), (65536,), float64
2025-10-23 21:44:52 - pc_pyramid - INFO - pc data coordinates rendering ends.
2025-10-23 21:44:52 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_0.zarr zarray shape, chunks, dtype: (1269, 1418), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_1.zarr zarray shape, chunks, dtype: (635, 709), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_2.zarr zarray shape, chunks, dtype: (318, 355), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_3.zarr zarray shape, chunks, dtype: (159, 178), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_4.zarr zarray shape, chunks, dtype: (80, 89), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_5.zarr zarray shape, chunks, dtype: (40, 45), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_6.zarr zarray shape, chunks, dtype: (20, 23), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_7.zarr zarray shape, chunks, dtype: (10, 12), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_8.zarr zarray shape, chunks, dtype: (5, 6), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_9.zarr zarray shape, chunks, dtype: (3, 3), (256, 256), int64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/idx_10.zarr zarray shape, chunks, dtype: (2, 2), (256, 256), int64
2025-10-23 21:44:53 - pc_pyramid - INFO - rasterized idx rendering ends
2025-10-23 21:44:53 - pc_pyramid - INFO - dask local cluster started to render pc data.
2025-10-23 21:44:53 - dask_cluster_info - INFO - dask cluster: LocalCluster(dashboard_link='http://10.211.48.7:8787/status', workers=1, threads=2, memory=256.00 GiB)
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/pc.zarr zarray shape, chunks, dtype: (634220, 16), (65536, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/0.zarr zarray shape, chunks, dtype: (1269, 1418, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/1.zarr zarray shape, chunks, dtype: (635, 709, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/2.zarr zarray shape, chunks, dtype: (318, 355, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/3.zarr zarray shape, chunks, dtype: (159, 178, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/4.zarr zarray shape, chunks, dtype: (80, 89, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/5.zarr zarray shape, chunks, dtype: (40, 45, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/6.zarr zarray shape, chunks, dtype: (20, 23, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/7.zarr zarray shape, chunks, dtype: (10, 12, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/8.zarr zarray shape, chunks, dtype: (5, 6, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/9.zarr zarray shape, chunks, dtype: (3, 3, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - zarr_info - INFO - dl/ps_can_n2ft_intf_pyramid/10.zarr zarray shape, chunks, dtype: (2, 2, 16), (256, 256, 1), complex64
2025-10-23 21:44:53 - pc_pyramid - INFO - computing graph setted. doing all the computing.
2025-10-23 21:44:55 - pc_pyramid - INFO - computing finished. 1.8s
2025-10-23 21:44:55 - pc_pyramid - INFO - dask cluster closed.
rslc_pyramid_dir = base+'hix/ps_can_rslc_pyramid'
ps_can_intf_plot = mc.pc_plot(rslc_pyramid_dir,post_proc_ras='intf_seq', post_proc_pc='intf_seq',n_kdim=1,level_increase=0)
ps_can_intf_plot = ps_can_intf_plot[0]*ps_can_intf_plot[1]
ps_can_filtered_intf_plot = mc.pc_plot(ps_can_filtered_intf_pyramid_dir,post_proc_ras='phase', post_proc_pc='phase',level_increase=0)
ps_can_filtered_intf_plot = ps_can_filtered_intf_plot[0]*ps_can_filtered_intf_plot[1]ps_can_intf_plot = ps_can_intf_plot.redim(
i=hv.Dimension('i', label='Interferogram', range=(0,15), value_format=(lambda i: dates[i]+'_'+dates[i+1])),
x=hv.Dimension('r', label='Range'),
y=hv.Dimension('az',label='Azimuth'),
z=hv.Dimension('Phase',range=(-np.pi,np.pi))
)
ps_can_filtered_intf_plot = ps_can_filtered_intf_plot.redim(
i=hv.Dimension('i', label='Interferogram', range=(0,15), value_format=(lambda i: dates[i]+'_'+dates[i+1])),
x=hv.Dimension('r', label='Range'),
y=hv.Dimension('az',label='Azimuth'),
z=hv.Dimension('Phase',range=(-np.pi,np.pi))
)Specify plotting options and plot:
hv.output(widget_location='bottom')
ps_can_intf_plot.opts(
opts.Image(
cmap='colorwheel',width=600, height=400, colorbar=True,
default_tools=['pan',WheelZoomTool(zoom_on_axis=False),'save','reset','hover'],
active_tools=['wheel_zoom']
),
opts.Points(
color='Phase', cmap='colorwheel',width=600, height=400, colorbar=True,
default_tools=['pan',WheelZoomTool(zoom_on_axis=False),'save','reset','hover'],
active_tools=['wheel_zoom']
)
)
ps_can_filtered_intf_plot.opts(
opts.Image(
cmap='colorwheel',width=600, height=400, colorbar=True,
default_tools=['pan',WheelZoomTool(zoom_on_axis=False),'save','reset','hover'],
active_tools=['wheel_zoom']
),
opts.Points(
color='Phase', cmap='colorwheel',width=600, height=400, colorbar=True,
default_tools=['pan',WheelZoomTool(zoom_on_axis=False),'save','reset','hover'],
active_tools=['wheel_zoom']
)
)
ps_can_intf_plot+ps_can_filtered_intf_plot