Skip to content

Commit

Permalink
Merge pull request #69 from umr-lops/newWW3hindcast
Browse files Browse the repository at this point in the history
activate the new directories tree YYYY/JJJ/..SAFE/
  • Loading branch information
agrouaze committed Feb 23, 2024
2 parents 700c652 + 30711dc commit 8c2aedc
Showing 1 changed file with 54 additions and 30 deletions.
84 changes: 54 additions & 30 deletions slcl1butils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@
import logging
import zipfile
import fsspec
import datetime
import xarray as xr
import aiohttp
from slcl1butils.get_config import get_conf

config = get_conf()
logger = logging.getLogger('xsar.utils')
logger = logging.getLogger("xsar.utils")
logger.addHandler(logging.NullHandler())

mem_monitor = True
Expand All @@ -34,23 +36,31 @@ def netcdf_compliant(dataset):
var_to_rm = list()
var_to_add = list()
for i in dataset.variables.keys():
if dataset[i].dtype == complex or dataset[i].dtype=='complex64':
if dataset[i].dtype == complex or dataset[i].dtype == "complex64":
re = dataset[i].real
# re.encoding['_FillValue'] = 9.9692099683868690e+36
im = dataset[i].imag
# im.encoding['_FillValue'] = 9.9692099683868690e+36
var_to_add.append({str(i) + '_Re': re, str(i) + '_Im': im})
var_to_add.append({str(i) + "_Re": re, str(i) + "_Im": im})
var_to_rm.append(str(i))
ds_to_save = xr.merge([dataset.drop_vars(var_to_rm), *var_to_add], compat='override')
ds_to_save = xr.merge(
[dataset.drop_vars(var_to_rm), *var_to_add], compat="override"
)
for vv in ds_to_save.variables.keys():
if ds_to_save[vv].dtype == 'int64': # to avoid ncview: netcdf_dim_value: unknown data type (10) for corner_line ...
if (
ds_to_save[vv].dtype == "int64"
): # to avoid ncview: netcdf_dim_value: unknown data type (10) for corner_line ...
ds_to_save[vv] = ds_to_save[vv].astype(np.int16)
elif ds_to_save[vv].dtype == 'float64':
ds_to_save[vv] = ds_to_save[vv].astype(np.float32) # to reduce volume of output files
elif ds_to_save[vv].dtype == "float64":
ds_to_save[vv] = ds_to_save[vv].astype(
np.float32
) # to reduce volume of output files
else:
logging.debug('%s is dtype %s',vv,ds_to_save[vv].dtype)
logging.debug("%s is dtype %s", vv, ds_to_save[vv].dtype)
return ds_to_save
def url_get(url, cache_dir=os.path.join(config['data_dir'], 'fsspec_cache')):


def url_get(url, cache_dir=os.path.join(config["data_dir"], "fsspec_cache")):
"""
Get fil from url, using caching.
Parameters
Expand All @@ -70,11 +80,11 @@ def url_get(url, cache_dir=os.path.join(config['data_dir'], 'fsspec_cache')):
Due to fsspec, the returned filename won't match the remote one.
"""

if '://' in url:
if "://" in url:
with fsspec.open(
'filecache::%s' % url,
https={'client_kwargs': {'timeout': aiohttp.ClientTimeout(total=3600)}},
#filecache={'cache_storage': os.path.join(os.path.join(config['data_dir'], 'fsspec_cache'))}
"filecache::%s" % url,
https={"client_kwargs": {"timeout": aiohttp.ClientTimeout(total=3600)}},
# filecache={'cache_storage': os.path.join(os.path.join(config['data_dir'], 'fsspec_cache'))}
) as f:
fname = f.name
else:
Expand All @@ -99,22 +109,25 @@ def get_test_file(fname):
path to file, relative to `config['data_dir']`
"""
#res_path = config['data_dir']
res_path = os.path.join(os.path.dirname(os.path.dirname(slcl1butils.__file__)),'assests')
#base_url = 'https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata'
base_url = 'https://cerweb.ifremer.fr/datarmor/sarwave/documentation/processor/sar/l1butils/example_products/iw/slc/l1b/'
file_url = '%s/%s.zip' % (base_url, fname)
# res_path = config['data_dir']
res_path = os.path.join(
os.path.dirname(os.path.dirname(slcl1butils.__file__)), "assests"
)
# base_url = 'https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata'
base_url = "https://cerweb.ifremer.fr/datarmor/sarwave/documentation/processor/sar/l1butils/example_products/iw/slc/l1b/"
file_url = "%s/%s.zip" % (base_url, fname)
final = os.path.join(res_path, fname)
if not os.path.exists(os.path.join(res_path, fname)):
warnings.warn("Downloading %s" % file_url)
local_file = url_get(file_url)
warnings.warn("Unzipping %s" % final)

#shutil.move(local_file,final)
with zipfile.ZipFile(local_file, 'r') as zip_ref:
zip_ref.extractall(res_path)
# shutil.move(local_file,final)
with zipfile.ZipFile(local_file, "r") as zip_ref:
zip_ref.extractall(res_path)
return final


def get_memory_usage():
"""
Expand All @@ -124,11 +137,15 @@ def get_memory_usage():
"""
try:
import resource
memory_used_go = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1000. / 1000.

memory_used_go = (
resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1000.0 / 1000.0
)
except: # on windows resource is not usable
import psutil
memory_used_go = psutil.virtual_memory().used / 1000 / 1000 / 1000.
str_mem = 'RAM usage: %1.1f Go' % memory_used_go

memory_used_go = psutil.virtual_memory().used / 1000 / 1000 / 1000.0
str_mem = "RAM usage: %1.1f Go" % memory_used_go
return str_mem


Expand All @@ -143,9 +160,12 @@ def xndindex(sizes):
"""
from itertools import repeat

for d, k in zip(repeat(tuple(sizes.keys())), zip(np.ndindex(tuple(sizes.values())))):
for d, k in zip(
repeat(tuple(sizes.keys())), zip(np.ndindex(tuple(sizes.values())))
):
yield {k: l for k, l in zip(d, k[0])}


def get_l1c_filepath(l1b_fullpath, version, format="nc", outputdir=None, makedir=True):
"""
Expand All @@ -164,7 +184,9 @@ def get_l1c_filepath(l1b_fullpath, version, format="nc", outputdir=None, makedir
pathout_root = run_directory.replace("l1b", "l1c")
else:
pathout_root = outputdir
pathout = os.path.join(pathout_root, version, safe_file)
# pathout = os.path.join(pathout_root, version, safe_file)
safe_start_date = datetime.datetime.strptime(safe_file.split('_')[5],'%Y%m%dT%H%M%S')
pathout = os.path.join(pathout_root, safe_start_date.strftime('%Y'),safe_start_date.strftime('%j'), safe_file)

# Output filename
l1c_full_path = os.path.join(
Expand All @@ -173,11 +195,13 @@ def get_l1c_filepath(l1b_fullpath, version, format="nc", outputdir=None, makedir
# add the product ID in the SAFE name
basesafe = os.path.basename(os.path.dirname(l1c_full_path))
basesafe0 = copy.copy(basesafe)
if len(basesafe.split("_")) == 10: # classical ESA SLC naming #:TODO once xsarslc will be updated this case could be removed
if (
len(basesafe.split("_")) == 10
): # classical ESA SLC naming #:TODO once xsarslc will be updated this case could be removed
basesafe = basesafe.replace(".SAFE", "_" + version.upper() + ".SAFE")
else: # there is already a product ID in the L1B SAFE name
lastpart = basesafe.split('_')[-1]
basesafe = basesafe.replace(lastpart,version.upper()+'.SAFE')
lastpart = basesafe.split("_")[-1]
basesafe = basesafe.replace(lastpart, version.upper() + ".SAFE")
l1c_full_path = l1c_full_path.replace(basesafe0, basesafe)

lastpiece = l1c_full_path.split("_")[-1]
Expand All @@ -188,4 +212,4 @@ def get_l1c_filepath(l1b_fullpath, version, format="nc", outputdir=None, makedir
logging.debug("File out: %s ", l1c_full_path)
if not os.path.exists(os.path.dirname(l1c_full_path)) and makedir:
os.makedirs(os.path.dirname(l1c_full_path), 0o0775)
return l1c_full_path
return l1c_full_path

0 comments on commit 8c2aedc

Please sign in to comment.