Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

small changes in attributes #251

Merged
merged 5 commits into from
Mar 7, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 38 additions & 30 deletions src/xsar/sentinel1_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,20 @@
merge_yaml,
to_lon180,
config,

get_path_aux_cal,
get_path_aux_pp1,
get_geap_gains,
get_gproc_gains,
)
from .sentinel1_meta import Sentinel1Meta
from .ipython_backends import repr_mimebundle
from .base_dataset import BaseDataset
import pandas as pd
import geopandas as gpd

import os


logger = logging.getLogger("xsar.sentinel1_dataset")
logger.addHandler(logging.NullHandler())
Expand Down Expand Up @@ -623,6 +630,17 @@ def add_high_resolution_variables(

if "GRD" in str(self.datatree.attrs["product"]):
self.add_swath_number()
path_aux_cal_old = get_path_aux_cal(
self.sar_meta.manifest_attrs["aux_cal"]
)

path_aux_pp1_old = get_path_aux_pp1(
self.sar_meta.manifest_attrs["aux_pp1"]
)

if self.apply_recalibration == False:
new_cal = "None"
new_pp1 = "None"

if self.apply_recalibration:
path_dataframe_aux = config["path_dataframe_aux"]
Expand All @@ -647,7 +665,7 @@ def add_high_resolution_variables(
sel_cal = sel_cal.sort_values(
by=["validation_date", "generation_date"], ascending=False)

path_new_cal = sel_cal.iloc[0].aux_path
new_cal = sel_cal.iloc[0].aux_path

sel_pp1 = dataframe_aux.loc[(dataframe_aux.sat_name == self.sar_meta.manifest_attrs['satellite']) &
(dataframe_aux.aux_type == "PP1") &
Expand All @@ -668,9 +686,20 @@ def add_high_resolution_variables(
sel_pp1 = sel_pp1.sort_values(
by=["validation_date", "generation_date"], ascending=False
)
path_new_pp1 = sel_pp1.iloc[0].aux_path
new_pp1 = sel_pp1.iloc[0].aux_path

path_aux_cal_new = get_path_aux_cal(
os.path.basename(new_cal))
path_aux_pp1_new = get_path_aux_pp1(
os.path.basename(new_pp1))

self.add_gains(path_aux_cal_new, path_aux_pp1_new,
path_aux_cal_old, path_aux_pp1_old)

self.add_gains(path_new_cal, path_new_pp1)
self.datatree["recalibration"].attrs["aux_cal_new"] = os.path.basename(
new_cal)
self.datatree["recalibration"].attrs["aux_pp1_new"] = os.path.basename(
new_pp1)

rasters = self._load_rasters_vars()
if rasters is not None:
Expand Down Expand Up @@ -778,28 +807,12 @@ def add_swath_number(self):
self._dataset_recalibration
)

def add_gains(self, new_aux_cal_name, new_aux_pp1_name):
from .utils import (
get_path_aux_cal,
get_path_aux_pp1,
get_geap_gains,
get_gproc_gains,
)
import os
def add_gains(self, path_aux_cal_new, path_aux_pp1_new, path_aux_cal_old, path_aux_pp1_old):

from scipy.interpolate import interp1d

logger.debug(
f"doing recalibration with AUX_CAL = {new_aux_cal_name} & AUX_PP1 = {new_aux_pp1_name}"
)

path_aux_cal_new = get_path_aux_cal(os.path.basename(new_aux_cal_name))
path_aux_cal_old = get_path_aux_cal(
os.path.basename(self.sar_meta.manifest_attrs["aux_cal"])
)

path_aux_pp1_new = get_path_aux_pp1(os.path.basename(new_aux_pp1_name))
path_aux_pp1_old = get_path_aux_pp1(
os.path.basename(self.sar_meta.manifest_attrs["aux_pp1"])
f"doing recalibration with AUX_CAL = {path_aux_cal_new} & AUX_PP1 = {path_aux_pp1_new}"
)

#  1 - compute offboresight angle
Expand Down Expand Up @@ -986,10 +999,6 @@ def add_gains(self, new_aux_cal_name, new_aux_pp1_name):
self._dataset_recalibration
)

self.datatree["recalibration"].attrs["path_aux_cal_new"] = path_aux_cal_new
self.datatree["recalibration"].attrs["path_aux_pp1_new"] = path_aux_pp1_new
self.datatree["recalibration"].attrs["path_aux_cal_old"] = path_aux_cal_old
self.datatree["recalibration"].attrs["path_aux_pp1_old"] = path_aux_pp1_old
# return self._dataset

def apply_calibration_and_denoising(self):
Expand Down Expand Up @@ -1040,7 +1049,7 @@ def apply_calibration_and_denoising(self):
)

self._dataset = self._add_denoised(self._dataset)

for var_name, lut_name in self._map_var_lut.items():
var_name_raw = var_name + "_raw"
if var_name_raw in self._dataset:
Expand All @@ -1051,8 +1060,7 @@ def apply_calibration_and_denoising(self):
"Skipping variable '%s' ('%s' lut is missing)"
% (var_name, lut_name)
)



self.datatree["measurement"] = self.datatree["measurement"].assign(
self._dataset
)
Expand Down Expand Up @@ -1121,7 +1129,7 @@ def _patch_lut(self, lut):
if self.sar_meta.swath == "WV":
if (
lut.name in ["noise_lut_azi", "noise_lut"]
and self.sar_meta.ipf in [2.9, 2.91]
and self.sar_meta.ipf_version in [2.9, 2.91]
and self.sar_meta.platform in ["SENTINEL-1A", "SENTINEL-1B"]
):
noise_calibration_cst_pp1 = {
Expand Down
36 changes: 28 additions & 8 deletions src/xsar/sentinel1_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,13 @@ def __init__(self, name):

self.manifest_attrs = self.reader.manifest_attrs

for attr in ['aux_cal', 'aux_pp1', 'aux_ins']:
if attr not in self.manifest_attrs:
self.manifest_attrs[attr] = None
else:
self.manifest_attrs[attr] = os.path.basename(
self.manifest_attrs[attr])

self.multidataset = False
"""True if multi dataset"""
self.subdatasets = gpd.GeoDataFrame(geometry=[], index=[])
Expand All @@ -101,8 +108,10 @@ def __init__(self, name):
)
except ValueError:
# not as many footprints than subdatasets count. (probably TOPS product)
self._submeta = [Sentinel1Meta(subds) for subds in datasets_names]
sub_footprints = [submeta.footprint for submeta in self._submeta]
self._submeta = [Sentinel1Meta(subds)
for subds in datasets_names]
sub_footprints = [
submeta.footprint for submeta in self._submeta]
self.subdatasets = gpd.GeoDataFrame(
geometry=sub_footprints, index=datasets_names
)
Expand Down Expand Up @@ -151,7 +160,8 @@ def _get_time_range(self):

def to_dict(self, keys="minimal"):

info_keys = {"minimal": ["ipf", "platform", "swath", "product", "pols"]}
info_keys = {"minimal": [
"ipf_version", "platform", "swath", "product", "pols"]}
info_keys["all"] = info_keys["minimal"] + [
"name",
"start_date",
Expand All @@ -160,6 +170,11 @@ def to_dict(self, keys="minimal"):
"coverage",
"orbit_pass",
"platform_heading",
"icid",
"aux_cal",
"aux_pp1",
"aux_ins",

] # 'pixel_line_m', 'pixel_sample_m',

if isinstance(keys, str):
Expand All @@ -172,7 +187,9 @@ def to_dict(self, keys="minimal"):
elif k in self.manifest_attrs.keys():
res_dict[k] = self.manifest_attrs[k]
else:
raise KeyError('Unable to find key/attr "%s" in Sentinel1Meta' % k)
raise KeyError(
'Unable to find key/attr "%s" in Sentinel1Meta' % k)

return res_dict

def annotation_angle(self, line, sample, angle):
Expand Down Expand Up @@ -240,7 +257,8 @@ def geoloc(self):
self._geoloc[ll].isel(line=a, sample=x).values
for a, x in [(0, 0), (0, -1), (-1, -1), (-1, 0)]
]
corners = list(zip(footprint_dict["longitude"], footprint_dict["latitude"]))
corners = list(
zip(footprint_dict["longitude"], footprint_dict["latitude"]))
p = Polygon(corners)
self._geoloc.attrs["footprint"] = p

Expand Down Expand Up @@ -272,10 +290,12 @@ def _to_rio_gcp(pt_geoloc):
for sample in self._geoloc.sample
]
# approx transform, from all gcps (inaccurate)
self._geoloc.attrs["approx_transform"] = rasterio.transform.from_gcps(gcps)
self._geoloc.attrs["approx_transform"] = rasterio.transform.from_gcps(
gcps)
for vv in self._geoloc:
if vv in self.xsd_definitions:
self._geoloc[vv].attrs["definition"] = str(self.xsd_definitions[vv])
self._geoloc[vv].attrs["definition"] = str(
self.xsd_definitions[vv])

return self._geoloc

Expand Down Expand Up @@ -315,7 +335,7 @@ def denoised(self):
return self.reader.denoised

@property
def ipf(self):
def ipf_version(self):
"""ipf version"""
return self.manifest_attrs["ipf_version"]

Expand Down
Loading