Skip to content

Commit

Permalink
sentinel1 denoising out of the class
Browse files Browse the repository at this point in the history
  • Loading branch information
agrouaze committed Dec 5, 2024
1 parent 1ed0703 commit af2b2ad
Show file tree
Hide file tree
Showing 2 changed files with 102 additions and 68 deletions.
143 changes: 75 additions & 68 deletions src/xsar/sentinel1_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,74 @@
}


def add_denoised(ds,sar_meta_denoised,dataset_recalibration, clip=False, vars=None,apply_recalibration=False,):
"""add denoised vars to dataset
Parameters
----------
ds : xarray.DataSet
dataset with non denoised vars, named `%s_raw`.
sar_meta_denoised : dict
with pol as key, and bool as values (True is DN is predenoised at L1 level)
clip : bool, optional
If True, negative signal will be clipped to 0. (default to False )
vars : list, optional
variables names to add, by default `['sigma0' , 'beta0' , 'gamma0']`
Returns
-------
xarray.Dataset
dataset with denoised variables
"""
if vars is None:
vars = ["sigma0", "beta0", "gamma0"]
for varname in vars:
varname_raw = varname + "_raw"
noise = "ne%sz" % varname[0]
if varname_raw not in ds:
continue
if all(sar_meta_denoised.values()): # previously self.sar_meta.denoised.values()
# already denoised, just add an alias
ds[varname] = ds[varname_raw]
elif len(set(sar_meta_denoised.values())) != 1:
# TODO: to be implemented
raise NotImplementedError(
"semi denoised products not yet implemented")
else:
varname_raw_corrected = varname_raw + "__corrected"
if (apply_recalibration is True) & (
varname_raw_corrected in dataset_recalibration.variables
):
denoised = (
dataset_recalibration[varname_raw_corrected] - ds[noise]
)
denoised.attrs["history"] = merge_yaml(
[ds[varname_raw].attrs["history"],
ds[noise].attrs["history"]],
section=varname,
)
denoised.attrs["comment_recalibration"] = (
"kersten recalibration applied"
)
else:
denoised = ds[varname_raw] - ds[noise]
denoised.attrs["history"] = merge_yaml(
[ds[varname_raw].attrs["history"],
ds[noise].attrs["history"]],
section=varname,
)
denoised.attrs["comment_recalibration"] = (
"kersten recalibration not applied"
)

if clip:
denoised = denoised.clip(min=0)
denoised.attrs["comment"] = "clipped, no values <0"
else:
denoised.attrs["comment"] = "not clipped, some values can be <0"
ds[varname] = denoised
return ds

# noinspection PyTypeChecker
class Sentinel1Dataset(BaseDataset):
"""
Expand Down Expand Up @@ -159,7 +227,7 @@ def __init__(
geoloc = self.sar_meta.geoloc
geoloc.attrs["history"] = "annotations"

#  offboresight angle
# offboresight angle
geoloc["offboresightAngle"] = (
geoloc.elevationAngle
- (
Expand Down Expand Up @@ -802,7 +870,7 @@ def add_gains(self, new_aux_cal_name, new_aux_pp1_name):
os.path.basename(self.sar_meta.manifest_attrs["aux_pp1"])
)

#  1 - compute offboresight angle
# 1 - compute offboresight angle
roll = self.datatree["antenna_pattern"]["roll"]
azimuthTime = self.datatree["antenna_pattern"]["azimuthTime"]
interp_roll = interp1d(
Expand Down Expand Up @@ -910,7 +978,7 @@ def add_gains(self, new_aux_cal_name, new_aux_pp1_name):
self._dataset_recalibration["offboresigthAngle"]
)

# 3- get gains gproc and map them
# 3-get gains gproc and map them
dict_gproc_old = get_gproc_gains(
path_aux_pp1_old,
mode=self.sar_meta.manifest_attrs["swath_type"],
Expand Down Expand Up @@ -1039,7 +1107,10 @@ def apply_calibration_and_denoising(self):
self._dataset_recalibration
)

self._dataset = self._add_denoised(self._dataset)
self._dataset = add_denoised(self._dataset,sar_meta_denoised=self.sar_meta.denoised,clip=False,
vars=None,
apply_recalibration=self.apply_recalibration,
dataset_recalibration=self._dataset_recalibration)

for var_name, lut_name in self._map_var_lut.items():
var_name_raw = var_name + "_raw"
Expand Down Expand Up @@ -1715,71 +1786,7 @@ def _get_noise(self, var_name):
)
return dataarr.to_dataset(name=name)

def _add_denoised(self, ds, clip=False, vars=None):
"""add denoised vars to dataset
Parameters
----------
ds : xarray.DataSet
dataset with non denoised vars, named `%s_raw`.
clip : bool, optional
If True, negative signal will be clipped to 0. (default to False )
vars : list, optional
variables names to add, by default `['sigma0' , 'beta0' , 'gamma0']`
Returns
-------
xarray.DataSet
dataset with denoised vars
"""
if vars is None:
vars = ["sigma0", "beta0", "gamma0"]
for varname in vars:
varname_raw = varname + "_raw"
noise = "ne%sz" % varname[0]
if varname_raw not in ds:
continue
if all(self.sar_meta.denoised.values()):
# already denoised, just add an alias
ds[varname] = ds[varname_raw]
elif len(set(self.sar_meta.denoised.values())) != 1:
# TODO: to be implemented
raise NotImplementedError(
"semi denoised products not yet implemented")
else:
varname_raw_corrected = varname_raw + "__corrected"
if (self.apply_recalibration) & (
varname_raw_corrected in self._dataset_recalibration.variables
):
denoised = (
self._dataset_recalibration[varname_raw_corrected] - ds[noise]
)
denoised.attrs["history"] = merge_yaml(
[ds[varname_raw].attrs["history"],
ds[noise].attrs["history"]],
section=varname,
)
denoised.attrs["comment_recalibration"] = (
"kersten recalibration applied"
)
else:
denoised = ds[varname_raw] - ds[noise]
denoised.attrs["history"] = merge_yaml(
[ds[varname_raw].attrs["history"],
ds[noise].attrs["history"]],
section=varname,
)
denoised.attrs["comment_recalibration"] = (
"kersten recalibration not applied"
)

if clip:
denoised = denoised.clip(min=0)
denoised.attrs["comment"] = "clipped, no values <0"
else:
denoised.attrs["comment"] = "not clipped, some values can be <0"
ds[varname] = denoised
return ds

@property
def get_burst_azitime(self):
Expand Down
27 changes: 27 additions & 0 deletions test/test_denoising.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import pdb

from xsar.sentinel1_dataset import add_denoised
import xsar
import logging
logging.basicConfig()
logging.getLogger('xsar').setLevel(logging.DEBUG)
logging.getLogger('xsar.utils').setLevel(logging.DEBUG)
logging.getLogger('xsar.xml_parser').setLevel(logging.DEBUG)
logging.captureWarnings(True)

logger = logging.getLogger('xsar_test')
logger.setLevel(logging.DEBUG)
def test_denoising():
meta = xsar.Sentinel1Meta(
xsar.get_test_file('S1A_IW_GRDH_1SDV_20170907T103020_20170907T103045_018268_01EB76_Z010.SAFE'))
reader = xsar.Sentinel1Dataset(meta)
ds = reader.datatree['measurement'].dataset #(meta)#.isel(pol=0, sample=slice(0, 100), line=slice(0, 100))
ds = ds[['longitude','latitude','incidence','sigma0_raw','nesz']] # removed calibrated / denoised variables for the test
print('ds input',ds)
# pdb.set_trace()
ds = add_denoised(ds,sar_meta_denoised={'VV':False},dataset_recalibration=reader._dataset_recalibration)
print('ds output',ds)
return True

if __name__ == '__main__':
test_denoising()

0 comments on commit af2b2ad

Please sign in to comment.