diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f74a9ae..0713437 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,69 +15,69 @@ jobs: steps: # Checkout the code - - name: Checkout code - uses: actions/checkout@v4 + - name: Checkout code + uses: actions/checkout@v4 - - uses: mamba-org/setup-micromamba@v1 - with: - micromamba-version: "1.5.9-1" # any version from https://github.com/mamba-org/micromamba-releases - channels: tcevaer, conda-forge - init-shell: bash - post-cleanup: "all" + - uses: mamba-org/setup-micromamba@v1 + with: + micromamba-version: "1.5.9-1" # any version from https://github.com/mamba-org/micromamba-releases + channels: tcevaer, conda-forge + init-shell: bash + post-cleanup: "all" - - name: Configure Conda channel priority to disabled - run: | - conda config --set channel_priority disabled + - name: Configure Conda channel priority to disabled + run: | + conda config --set channel_priority disabled - - name: Create environment and install tools - run: micromamba create -n grdwind_env pytest conda-build boa python=3.10 -y + - name: Create environment and install tools + run: micromamba create -n grdwind_env pytest conda-build boa python=3.10 -y - - name: Build package - run: | - cd recipe - eval "$(micromamba shell hook --shell bash)" - micromamba activate grdwind_env - conda mambabuild . + - name: Build package + run: | + cd recipe + eval "$(micromamba shell hook --shell bash)" + micromamba activate grdwind_env + conda mambabuild . - # Install the built package into the environment - - name: Install the built package - run: | - eval "$(micromamba shell hook --shell bash)" - micromamba activate grdwind_env - conda install --use-local grdwindinversion -y + # Install the built package into the environment + - name: Install the built package + run: | + eval "$(micromamba shell hook --shell bash)" + micromamba activate grdwind_env + conda install --use-local grdwindinversion -y - # Cache the test data if previously downloaded (up to 10 GB limit for the cache) - # WARNING : modify the key if the data is modified !! - - name: Cache test data - uses: actions/cache@v4 - id: cache - with: - path: ./test_data - key: test-data-v3 - restore-keys: test-data-v3 + # Cache the test data if previously downloaded (up to 10 GB limit for the cache) + # WARNING : modify the key if the data is modified !! + - name: Cache test data + uses: actions/cache@v4 + id: cache + with: + path: ./test_data + key: test-data-v3 + restore-keys: test-data-v3 - # Download test data if not already cached - - name: Download test data - if: steps.cache.outputs.cache-hit != 'true' # Only download if cache miss - run: | + # Download test data if not already cached + - name: Download test data + if: steps.cache.outputs.cache-hit != 'true' # Only download if cache miss + run: | mkdir -p ./test_data/ wget https://cloud.ifremer.fr/index.php/s/ExLQ2TnYAqozPWE/download -O /tmp/ecmwf.zip unzip /tmp/ecmwf.zip -d ./test_data/ wget https://cloud.ifremer.fr/index.php/s/kRgdOOPsjoZieZR/download -O /tmp/l1.zip unzip /tmp/l1.zip -d ./test_data/ - timeout-minutes: 200 # Adjust depending on the size of your data + timeout-minutes: 200 # Adjust depending on the size of your data - # Set up xsar configuration - - name: Setup xsar configuration - run: | + # Set up xsar configuration + - name: Setup xsar configuration + run: | mkdir -p ~/.xsar echo "data_dir: /tmp" > ~/.xsar/config.yaml echo "auxiliary_dir: ./test_data/auxiliary" >> ~/.xsar/config.yaml echo "path_dataframe_aux: ./test_data/auxiliary/active_aux.csv" >> ~/.xsar/config.yaml - # Set up grdwindinversion configuration - - name: Setup grdwindinversion configuration - run: | + # Set up grdwindinversion configuration + - name: Setup grdwindinversion configuration + run: | mkdir -p ~/.grdwindinversion echo "'ecmwf_0100_1h': ./test_data/ECMWF/forecast/hourly/0100deg/netcdf_light/%Y/%j/ECMWF_FORECAST_0100_%Y%m%d%H%M_10U_10V.nc" > ~/.grdwindinversion/data_config.yaml echo "'ecmwf_0125_1h': ./test_data/ECMWF/0.125deg/1h/forecasts/%Y/%j/ecmwf_%Y%m%d%H%M.nc" >> ~/.grdwindinversion/data_config.yaml @@ -85,9 +85,9 @@ jobs: #echo "'lut_cmod7_path': './test_data/GMFS/v1.6/GMF_cmod7_official/cmod7_and_python_script'" >> ~/.grdwindinversion/data_config.yaml #echo "'lut_ms1ahw_path': './test_data/GMFS/v1.6/GMF_cmodms1ahw'" >> ~/.grdwindinversion/data_config.yaml - # Run the tests - - name: Run tests - run: | + # Run the tests + - name: Run tests + run: | eval "$(micromamba shell hook --shell bash)" micromamba activate grdwind_env pytest diff --git a/README.md b/README.md index 4811e5d..0017a89 100644 --- a/README.md +++ b/README.md @@ -1,50 +1,38 @@ - # grdwindinversion - - [![Python Version](https://img.shields.io/pypi/pyversions/grdwindinversion.svg)](https://pypi.org/project/grdwindinversion/) [![Dependencies Status](https://img.shields.io/badge/dependencies-up%20to%20date-brightgreen.svg)](https://github.com/umr-lops/grdwindinversion/pulls?utf8=%E2%9C%93&q=is%3Apr%20author%3Aapp%2Fdependabot) - - - - Package to perform Wind inversion from GRD Level-1 SAR images - -* Free software: MIT license -* Documentation: https://grdwindinversion.readthedocs.io. - +- Free software: MIT license +- Documentation: https://grdwindinversion.readthedocs.io. ## Usage +```python - ```python + SAR_L1-to-L2_wind_processor -h + usage: SAR_L1-to-L2_wind_processor [-h] --input_file INPUT_FILE [--config_file CONFIG_FILE] --outputdir OUTPUTDIR [--verbose] [--overwrite] - SAR_L1-to-L2_wind_processor -h - usage: SAR_L1-to-L2_wind_processor [-h] --input_file INPUT_FILE [--config_file CONFIG_FILE] --outputdir OUTPUTDIR [--verbose] [--overwrite] + Perform inversion from S1(L1-GRD) SAFE, L1-RCM, L1-RS2 ; using xsar/xsarsea tools - Perform inversion from S1(L1-GRD) SAFE, L1-RCM, L1-RS2 ; using xsar/xsarsea tools - - options: - -h, --help show this help message and exit - --input_file INPUT_FILE - input file path - --config_file CONFIG_FILE - config file path [if not provided will take config file based on input file] - --outputdir OUTPUTDIR - --verbose - --overwrite overwrite existing .nc files [default is False] + options: + -h, --help show this help message and exit + --input_file INPUT_FILE + input file path + --config_file CONFIG_FILE + config file path [if not provided will take config file based on input file] + --outputdir OUTPUTDIR + --verbose + --overwrite overwrite existing .nc files [default is False] ``` - ## Features This Python library (based on `xarray`) allows to perform wind inversion from level-1 GRD (projected magnitude image). Mission supported: - * Sentinel-1 - * RCM - * RadarSat-2 - +- Sentinel-1 +- RCM +- RadarSat-2 diff --git a/docs/conf.py b/docs/conf.py index f74f6a9..0e430cc 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -17,11 +17,11 @@ # relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. # +import grdwindinversion import os import sys sys.path.insert(0, os.path.abspath('..')) -import grdwindinversion # -- General configuration --------------------------------------------- @@ -43,6 +43,12 @@ 'sphinxcontrib.mermaid', 'nbsphinx', 'jupyter_sphinx', + 'myst_parser' +] +myst_enable_extensions = [ + "deflist", # Pour les listes de définitions + "linkify", # Pour rendre les URLs cliquables automatiquement + "colon_fence", # Pour activer des blocs ::: comme Markdown GFM ] # order by source autodoc_member_order = 'bysource' @@ -69,7 +75,12 @@ # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +# source_suffix = '.rst' + +source_suffix = { + '.rst': 'restructuredtext', + '.md': 'markdown', +} # The master toctree document. master_doc = 'index' @@ -112,8 +123,11 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'alabaster' -#html_theme = 'sphinx_rtd_theme' +# html_theme = 'alabaster' +# html_theme = 'sphinx_rtd_theme' +# html_theme = 'furo' +html_theme = 'sphinx_rtd_theme' + # Theme options are theme-specific and customize the look and feel of a # theme further. For a list of options available for each theme, see the @@ -125,8 +139,12 @@ # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -#html_style = 'css/grdwindinversion.css' +html_style = 'css/grdwindinversion.css' +# html_style = 'css/xsarsea.css' +html_theme_options = { + 'logo_only': False, +} # -- Options for HTMLHelp output --------------------------------------- # Output file base name for HTML help builder. @@ -187,6 +205,3 @@ 'One line description of project.', 'Miscellaneous'), ] - - - diff --git a/docs/examples/streaks-display.ipynb b/docs/examples/streaks-display.ipynb new file mode 100644 index 0000000..e197bba --- /dev/null +++ b/docs/examples/streaks-display.ipynb @@ -0,0 +1,322 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Compute and display streaks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# test on an dummy GRD IW product\n", + "import xsar\n", + "filename = xsar.get_test_file('S1A_IW_GRDH_1SDV_20170907T103020_20170907T103045_018268_01EB76_Z010.SAFE')\n", + "outdir = \"/tmp/\"\n", + "config_path = \"/home/vincelhx/Documents/autoentreprise/IFREMER/libs/fork_grdwi/grdwindinversion/grdwindinversion/config_prod_streaks.yaml\"\n", + "overwrite = True\n", + "resolution = '1000m'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "logger = logging.getLogger('grdwindinversion.gradientFeatures')\n", + "logger.setLevel(logging.DEBUG)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## preprocess & load config" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from grdwindinversion.inversion import preprocess " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "xr_dataset, out_file, config = preprocess(\n", + " filename, outdir, config_path, overwrite, False, resolution)\n", + "\n", + "model_co = config[\"l2_params\"][\"model_co\"]\n", + "model_cross = config[\"l2_params\"][\"model_cross\"]\n", + "copol = config[\"l2_params\"][\"copol\"]\n", + "crosspol = config[\"l2_params\"][\"crosspol\"]\n", + "copol_gmf = config[\"l2_params\"][\"copol_gmf\"]\n", + "crosspol_gmf = config[\"l2_params\"][\"crosspol_gmf\"]\n", + "dual_pol = config[\"l2_params\"][\"dual_pol\"]\n", + "ancillary_name = config[\"ancillary\"]\n", + "sensor_longname = config[\"sensor_longname\"]\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## process gradients\n", + "- in xr_dataset, it will add variables related to heterogeneity mask an\n", + "- it will create xr_dataset_streaks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from grdwindinversion.inversion import process_gradients\n", + "\n", + "if config[\"add_gradientsfeatures\"]:\n", + " xr_dataset, xr_dataset_streaks = process_gradients(\n", + " xr_dataset, config)\n", + "else:\n", + " xr_dataset_streaks = None" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "xr_dataset_streaks" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Plotting the results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "def get_uv_from_dir(wdir):\n", + " \"\"\"\n", + " Get u, v from wind direction\n", + " \n", + " Parameters\n", + " ----------\n", + " wdir : float\n", + " Wind direction in degrees, meteo convention\n", + " \n", + " Returns\n", + " -------\n", + " u : float\n", + " u component of the wind\n", + " v : float\n", + " v component of the wind\n", + " u_norm : float\n", + " Normalized u component of the wind\n", + " v_norm : float\n", + " Normalized v component of the wind \n", + " \"\"\"\n", + " u = np.cos(np.radians(270-wdir))\n", + " v = np.sin(np.radians(270-wdir))\n", + " sq_mean = np.sqrt(u**2+v**2)\n", + " return u, v, u/sq_mean, v/sq_mean" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import cartopy.crs as ccrs\n", + "from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER\n", + "import matplotlib\n", + "cmin=0;cmax=30\n", + "norm = matplotlib.colors.Normalize(vmin=cmin,vmax=cmax,clip=False)\n", + "import os \n", + "\n", + "#ancillary \n", + "ds = xr_dataset\n", + "p1 = int(ds.line.size/16)\n", + "p2 = int(ds.sample.size/16)\n", + "\n", + "lons = ds.longitude[::p1,::p2].values.flatten()\n", + "lats = ds.latitude[::p1,::p2].values.flatten()\n", + "winddir_ancillary = ds.ancillary_wind_direction[::p1,::p2]\n", + "u_ancillary, v_ancllary, u_norm_ancillary, v_norm_ancillary = get_uv_from_dir(winddir_ancillary)\n", + "\n", + "longitude = xr_dataset_streaks['longitude'].values\n", + "latitude = xr_dataset_streaks['latitude'].values" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def plot_wind_from_streaks(streaks_dir, **kwargs):\n", + " \"\"\"\n", + " Plot wind from streaks\n", + " \n", + " Parameters\n", + " ----------\n", + " streaks_dir : xr.Dataset\n", + " Dataset containing streaks direction in degrees in meteo convention\n", + " \n", + " **kwargs : dict\n", + " window_size : int\n", + " Size of the window for smoothing the streaks\n", + " downscale_factor : int\n", + " Downscale factor for smoothing the streaks\n", + " pol : str\n", + " Polarization to plot\n", + " varname : str\n", + " Variable name to plot\n", + " \"\"\"\n", + " \n", + " \n", + " # handle kwargs\n", + " window_size = kwargs.get('window_size', None)\n", + " downscale_factor = kwargs.get('downscale_factor', None)\n", + " pol = kwargs.get('pol', None)\n", + " pol_display = kwargs.get('pol', 'VV')\n", + " varname = kwargs.get('varname', None)\n", + " savefig = kwargs.get('savefig', False)\n", + " savedir = kwargs.get('savedir', './tmp')\n", + " \n", + " # Plot\n", + " fig = plt.figure(figsize=(10, 9))\n", + " ax = plt.axes(projection=ccrs.PlateCarree())\n", + "\n", + " # Calculer les composantes u et v à partir des angles streaks_dir\n", + " u, v, u_norm, v_norm = get_uv_from_dir(streaks_dir)\n", + "\n", + " # Display streaks direction\n", + " #ax.quiver(longitude, latitude, u_norm, v_norm, edgecolors='k', norm=norm, pivot= 'mid', scale_units='xy', scale=4., zorder=10, width=0.1/25,transform = ccrs.PlateCarree(), color = 'red', label = 'owiWindStreaks')\n", + " ax.quiver(longitude, latitude, u_norm, v_norm, edgecolors='k', norm=norm, pivot= 'mid', scale_units='xy', scale=8., zorder=10, width=0.1/25,transform = ccrs.PlateCarree(), color = 'red', label = 'owiWindStreaks')\n", + "\n", + " # Display ancillary wind direction\n", + " #ax.quiver(lons, lats, u_norm_ancillary, v_norm_ancillary, edgecolors='k', norm=norm, pivot= 'mid', scale_units='xy', scale=4., zorder=10, width=0.1/25,transform = ccrs.PlateCarree(), color='blue', label = \"owiAncillaryWindDirection\")\n", + " ax.quiver(lons, lats, u_norm_ancillary, v_norm_ancillary, edgecolors='k', norm=norm, pivot= 'mid', scale_units='xy', scale=8., zorder=10, width=0.1/25,transform = ccrs.PlateCarree(), color='blue', label = \"owiAncillaryWindDirection\")\n", + "\n", + " # Display sigma0 copol\n", + " im = ax.pcolormesh(xr_dataset.longitude,xr_dataset.latitude, xr_dataset.sigma0.sel(pol=pol_display),cmap='gray')\n", + " cbar = plt.colorbar(im, ax =ax)\n", + " cbar.set_label(f'Sigma0, pol = {pol_display}')\n", + " \n", + " # Expand the map by 1 degree in height\n", + " lon_min, lon_max = np.nanmin(lons) - .2, np.nanmax(lons) + .2 \n", + " lat_min, lat_max = np.nanmin(lats) - .2, np.nanmax(lats) + .7\n", + " ax.set_extent([lon_min, lon_max, lat_min, lat_max], crs=ccrs.PlateCarree())\n", + "\n", + "\n", + " # Configurer les axes\n", + " ax.set_xlabel('Longitude')\n", + " ax.set_ylabel('Latitude')\n", + " ax.set_aspect('equal', adjustable='box')\n", + "\n", + " gl = ax.gridlines(draw_labels=True)\n", + " gl.top_labels = False\n", + " gl.right_labels = False\n", + " gl.xformatter = LONGITUDE_FORMATTER\n", + " gl.yformatter = LATITUDE_FORMATTER\n", + " ax.set_title(f\"{os.path.basename(filename)}\\nvarname = {varname}\\nwindow size = {window_size} ; downscale_factor = {downscale_factor} ; pol= {pol}\")\n", + " legend = ax.legend(facecolor='white', loc = 'upper right')\n", + " legend.set_zorder(100)\n", + " if savefig:\n", + " os.makedirs(savedir, exist_ok=True)\n", + " plt.savefig(os.path.join(savedir, f\"wind_streaks_{varname}_{window_size}_{downscale_factor}_{pol}.png\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "savefig = False\n", + "\n", + "varname = 'dir_mean_smooth'\n", + "kwargs = {'varname' : varname, 'savefig': savefig, 'savedir': \"./tmp\"}\n", + "plot_wind_from_streaks(xr_dataset_streaks[varname], **kwargs)\n", + "\n", + "varname = 'dir_smooth_mean'\n", + "kwargs = {'varname' : varname, 'savefig': savefig, 'savedir': \"./tmp\"}\n", + "plot_wind_from_streaks(xr_dataset_streaks[varname], **kwargs)\n", + "\n", + "\n", + "# plot individual solutions \n", + "varname = 'dir_smooth'\n", + "for downscale_factor in xr_dataset_streaks.downscale_factor.values:\n", + " for pol in xr_dataset_streaks.pol.values:\n", + " for window_size in xr_dataset_streaks.window_size.values:\n", + " streaks_dir = xr_dataset_streaks[varname].sel(\n", + " downscale_factor=downscale_factor, pol=pol, window_size=window_size).values\n", + " kwargs = {'varname' : varname , 'window_size': window_size, 'downscale_factor': downscale_factor, 'pol': pol, 'savefig': savefig, 'savedir': \"./tmp\"}\n", + " plot_wind_from_streaks(streaks_dir, **kwargs)\n", + "\n", + "# plot one individual solution\n", + "# downscale_factor = 1\n", + "# pol = 'VV'\n", + "# window_size = 3200\n", + "\n", + "# streaks_dir = xr_dataset_streaks[varname].sel(\n", + "# downscale_factor=downscale_factor, pol=pol, window_size=window_size).values\n", + "# kwargs = {'varname' : varname , 'window_size': window_size, 'downscale_factor': downscale_factor, 'pol': pol, 'savefig': True, 'savedir': \"./tmp\"}\n", + "# plot_wind_from_streaks(streaks_dir, **kwargs)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "env_xsar", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.15" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/examples/wind-inversion-from-grd.ipynb b/docs/examples/wind-inversion-from-grd.ipynb index fac82de..5de4fc8 100644 --- a/docs/examples/wind-inversion-from-grd.ipynb +++ b/docs/examples/wind-inversion-from-grd.ipynb @@ -18,7 +18,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "ef483f15-4690-460b-b688-fa966bab8cd8", "metadata": {}, "outputs": [], @@ -28,46 +28,10 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "e04fa1b0-eb49-4e45-af0f-0be798e26f95", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/vincelhx/Documents/autoentreprise/IFREMER/libs/fork_xsar_vinc/xsar/src/xsar/xsar.py:212: UserWarning: Downloading https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata/S1A_IW_GRDH_1SDV_20170907T103020_20170907T103045_018268_01EB76_Z010.SAFE.zip\n", - " warnings.warn(\"Downloading %s\" % file_url)\n" - ] - }, - { - "ename": "ClientResponseError", - "evalue": "503, message='Service Unavailable', url=URL('https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata/S1A_IW_GRDH_1SDV_20170907T103020_20170907T103045_018268_01EB76_Z010.SAFE.zip')", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mClientResponseError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[6], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# test on an dummy GRD IW product\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m input_file \u001b[38;5;241m=\u001b[39m \u001b[43mxsar\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_test_file\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mS1A_IW_GRDH_1SDV_20170907T103020_20170907T103045_018268_01EB76_Z010.SAFE\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/Documents/autoentreprise/IFREMER/libs/fork_xsar_vinc/xsar/src/xsar/xsar.py:213\u001b[0m, in \u001b[0;36mget_test_file\u001b[0;34m(fname)\u001b[0m\n\u001b[1;32m 211\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mexists(os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(res_path, fname)):\n\u001b[1;32m 212\u001b[0m warnings\u001b[38;5;241m.\u001b[39mwarn(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDownloading \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m file_url)\n\u001b[0;32m--> 213\u001b[0m local_file \u001b[38;5;241m=\u001b[39m \u001b[43murl_get\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfile_url\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 214\u001b[0m warnings\u001b[38;5;241m.\u001b[39mwarn(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mUnzipping \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(res_path, fname))\n\u001b[1;32m 215\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m zipfile\u001b[38;5;241m.\u001b[39mZipFile(local_file, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124m'\u001b[39m) \u001b[38;5;28;01mas\u001b[39;00m zip_ref:\n", - "File \u001b[0;32m~/Documents/autoentreprise/IFREMER/libs/fork_xsar_vinc/xsar/src/xsar/utils.py:664\u001b[0m, in \u001b[0;36murl_get\u001b[0;34m(url, cache_dir)\u001b[0m\n\u001b[1;32m 640\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 641\u001b[0m \u001b[38;5;124;03mGet fil from url, using caching.\u001b[39;00m\n\u001b[1;32m 642\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 660\u001b[0m \u001b[38;5;124;03mDue to fsspec, the returned filename won't match the remote one.\u001b[39;00m\n\u001b[1;32m 661\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 663\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m://\u001b[39m\u001b[38;5;124m'\u001b[39m \u001b[38;5;129;01min\u001b[39;00m url:\n\u001b[0;32m--> 664\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m fsspec\u001b[38;5;241m.\u001b[39mopen(\n\u001b[1;32m 665\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mfilecache::\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m \u001b[38;5;241m%\u001b[39m url,\n\u001b[1;32m 666\u001b[0m https\u001b[38;5;241m=\u001b[39m{\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mclient_kwargs\u001b[39m\u001b[38;5;124m'\u001b[39m: {\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtimeout\u001b[39m\u001b[38;5;124m'\u001b[39m: aiohttp\u001b[38;5;241m.\u001b[39mClientTimeout(total\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m3600\u001b[39m)}},\n\u001b[1;32m 667\u001b[0m filecache\u001b[38;5;241m=\u001b[39m{\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mcache_storage\u001b[39m\u001b[38;5;124m'\u001b[39m: os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(config[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mdata_dir\u001b[39m\u001b[38;5;124m'\u001b[39m], \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mfsspec_cache\u001b[39m\u001b[38;5;124m'\u001b[39m))}\n\u001b[1;32m 668\u001b[0m ) \u001b[38;5;28;01mas\u001b[39;00m f:\n\u001b[1;32m 669\u001b[0m fname \u001b[38;5;241m=\u001b[39m f\u001b[38;5;241m.\u001b[39mname\n\u001b[1;32m 670\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/core.py:102\u001b[0m, in \u001b[0;36mOpenFile.__enter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 99\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__enter__\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[1;32m 100\u001b[0m mode \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmode\u001b[38;5;241m.\u001b[39mreplace(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mt\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m\"\u001b[39m)\u001b[38;5;241m.\u001b[39mreplace(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mb\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mb\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m--> 102\u001b[0m f \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mopen\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmode\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 104\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfobjects \u001b[38;5;241m=\u001b[39m [f]\n\u001b[1;32m 106\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcompression \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/implementations/cached.py:444\u001b[0m, in \u001b[0;36mCachingFileSystem.__getattribute__..\u001b[0;34m(*args, **kw)\u001b[0m\n\u001b[1;32m 408\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__getattribute__\u001b[39m(\u001b[38;5;28mself\u001b[39m, item):\n\u001b[1;32m 409\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m item \u001b[38;5;129;01min\u001b[39;00m [\n\u001b[1;32m 410\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mload_cache\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 411\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m_open\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 442\u001b[0m \u001b[38;5;66;03m# all the methods defined in this class. Note `open` here, since\u001b[39;00m\n\u001b[1;32m 443\u001b[0m \u001b[38;5;66;03m# it calls `_open`, but is actually in superclass\u001b[39;00m\n\u001b[0;32m--> 444\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mlambda\u001b[39;00m \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkw: \u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mtype\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mitem\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;21;43m__get__\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 445\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkw\u001b[49m\n\u001b[1;32m 446\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 447\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m item \u001b[38;5;129;01min\u001b[39;00m [\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m__reduce_ex__\u001b[39m\u001b[38;5;124m\"\u001b[39m]:\n\u001b[1;32m 448\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAttributeError\u001b[39;00m\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/spec.py:1151\u001b[0m, in \u001b[0;36mAbstractFileSystem.open\u001b[0;34m(self, path, mode, block_size, cache_options, compression, **kwargs)\u001b[0m\n\u001b[1;32m 1149\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1150\u001b[0m ac \u001b[38;5;241m=\u001b[39m kwargs\u001b[38;5;241m.\u001b[39mpop(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mautocommit\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_intrans)\n\u001b[0;32m-> 1151\u001b[0m f \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_open\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1152\u001b[0m \u001b[43m \u001b[49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1153\u001b[0m \u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1154\u001b[0m \u001b[43m \u001b[49m\u001b[43mblock_size\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mblock_size\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1155\u001b[0m \u001b[43m \u001b[49m\u001b[43mautocommit\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mac\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1156\u001b[0m \u001b[43m \u001b[49m\u001b[43mcache_options\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1157\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1158\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1159\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m compression \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 1160\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mfsspec\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mcompression\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m compr\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/implementations/cached.py:444\u001b[0m, in \u001b[0;36mCachingFileSystem.__getattribute__..\u001b[0;34m(*args, **kw)\u001b[0m\n\u001b[1;32m 408\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__getattribute__\u001b[39m(\u001b[38;5;28mself\u001b[39m, item):\n\u001b[1;32m 409\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m item \u001b[38;5;129;01min\u001b[39;00m [\n\u001b[1;32m 410\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mload_cache\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 411\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m_open\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 442\u001b[0m \u001b[38;5;66;03m# all the methods defined in this class. Note `open` here, since\u001b[39;00m\n\u001b[1;32m 443\u001b[0m \u001b[38;5;66;03m# it calls `_open`, but is actually in superclass\u001b[39;00m\n\u001b[0;32m--> 444\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mlambda\u001b[39;00m \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkw: \u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mtype\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mitem\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;21;43m__get__\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 445\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkw\u001b[49m\n\u001b[1;32m 446\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 447\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m item \u001b[38;5;129;01min\u001b[39;00m [\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m__reduce_ex__\u001b[39m\u001b[38;5;124m\"\u001b[39m]:\n\u001b[1;32m 448\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAttributeError\u001b[39;00m\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/implementations/cached.py:695\u001b[0m, in \u001b[0;36mWholeFileCacheFileSystem._open\u001b[0;34m(self, path, mode, **kwargs)\u001b[0m\n\u001b[1;32m 693\u001b[0m f2\u001b[38;5;241m.\u001b[39mwrite(data)\n\u001b[1;32m 694\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 695\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 696\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msave_cache()\n\u001b[1;32m 697\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_open(path, mode)\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/asyn.py:115\u001b[0m, in \u001b[0;36msync_wrapper..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 112\u001b[0m \u001b[38;5;129m@functools\u001b[39m\u001b[38;5;241m.\u001b[39mwraps(func)\n\u001b[1;32m 113\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapper\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 114\u001b[0m \u001b[38;5;28mself\u001b[39m \u001b[38;5;241m=\u001b[39m obj \u001b[38;5;129;01mor\u001b[39;00m args[\u001b[38;5;241m0\u001b[39m]\n\u001b[0;32m--> 115\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43msync\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mloop\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/asyn.py:100\u001b[0m, in \u001b[0;36msync\u001b[0;34m(loop, func, timeout, *args, **kwargs)\u001b[0m\n\u001b[1;32m 98\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m FSTimeoutError \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mreturn_result\u001b[39;00m\n\u001b[1;32m 99\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(return_result, \u001b[38;5;167;01mBaseException\u001b[39;00m):\n\u001b[0;32m--> 100\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m return_result\n\u001b[1;32m 101\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 102\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m return_result\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/asyn.py:55\u001b[0m, in \u001b[0;36m_runner\u001b[0;34m(event, coro, result, timeout)\u001b[0m\n\u001b[1;32m 53\u001b[0m coro \u001b[38;5;241m=\u001b[39m asyncio\u001b[38;5;241m.\u001b[39mwait_for(coro, timeout\u001b[38;5;241m=\u001b[39mtimeout)\n\u001b[1;32m 54\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 55\u001b[0m result[\u001b[38;5;241m0\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mawait\u001b[39;00m coro\n\u001b[1;32m 56\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m ex:\n\u001b[1;32m 57\u001b[0m result[\u001b[38;5;241m0\u001b[39m] \u001b[38;5;241m=\u001b[39m ex\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/asyn.py:562\u001b[0m, in \u001b[0;36mAsyncFileSystem._get\u001b[0;34m(self, rpath, lpath, recursive, callback, **kwargs)\u001b[0m\n\u001b[1;32m 560\u001b[0m callback\u001b[38;5;241m.\u001b[39mbranch(rpath, lpath, kwargs)\n\u001b[1;32m 561\u001b[0m coros\u001b[38;5;241m.\u001b[39mappend(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_file(rpath, lpath, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs))\n\u001b[0;32m--> 562\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mawait\u001b[39;00m _run_coros_in_chunks(\n\u001b[1;32m 563\u001b[0m coros, batch_size\u001b[38;5;241m=\u001b[39mbatch_size, callback\u001b[38;5;241m=\u001b[39mcallback\n\u001b[1;32m 564\u001b[0m )\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/asyn.py:246\u001b[0m, in \u001b[0;36m_run_coros_in_chunks\u001b[0;34m(coros, batch_size, callback, timeout, return_exceptions, nofiles)\u001b[0m\n\u001b[1;32m 240\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m callback \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m _DEFAULT_CALLBACK:\n\u001b[1;32m 241\u001b[0m [\n\u001b[1;32m 242\u001b[0m t\u001b[38;5;241m.\u001b[39madd_done_callback(\u001b[38;5;28;01mlambda\u001b[39;00m \u001b[38;5;241m*\u001b[39m_, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m__: callback\u001b[38;5;241m.\u001b[39mrelative_update(\u001b[38;5;241m1\u001b[39m))\n\u001b[1;32m 243\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m t \u001b[38;5;129;01min\u001b[39;00m chunk\n\u001b[1;32m 244\u001b[0m ]\n\u001b[1;32m 245\u001b[0m results\u001b[38;5;241m.\u001b[39mextend(\n\u001b[0;32m--> 246\u001b[0m \u001b[38;5;28;01mawait\u001b[39;00m asyncio\u001b[38;5;241m.\u001b[39mgather(\u001b[38;5;241m*\u001b[39mchunk, return_exceptions\u001b[38;5;241m=\u001b[39mreturn_exceptions),\n\u001b[1;32m 247\u001b[0m )\n\u001b[1;32m 248\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m results\n", - "File \u001b[0;32m~/miniconda3/envs/xsar_N3_local/lib/python3.10/asyncio/tasks.py:408\u001b[0m, in \u001b[0;36mwait_for\u001b[0;34m(fut, timeout)\u001b[0m\n\u001b[1;32m 405\u001b[0m loop \u001b[38;5;241m=\u001b[39m events\u001b[38;5;241m.\u001b[39mget_running_loop()\n\u001b[1;32m 407\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m timeout \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 408\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mawait\u001b[39;00m fut\n\u001b[1;32m 410\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m timeout \u001b[38;5;241m<\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[1;32m 411\u001b[0m fut \u001b[38;5;241m=\u001b[39m ensure_future(fut, loop\u001b[38;5;241m=\u001b[39mloop)\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/implementations/http.py:248\u001b[0m, in \u001b[0;36mHTTPFileSystem._get_file\u001b[0;34m(self, rpath, lpath, chunk_size, callback, **kwargs)\u001b[0m\n\u001b[1;32m 245\u001b[0m size \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 247\u001b[0m callback\u001b[38;5;241m.\u001b[39mset_size(size)\n\u001b[0;32m--> 248\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_raise_not_found_for_status\u001b[49m\u001b[43m(\u001b[49m\u001b[43mr\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrpath\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 249\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m isfilelike(lpath):\n\u001b[1;32m 250\u001b[0m outfile \u001b[38;5;241m=\u001b[39m lpath\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/fsspec/implementations/http.py:214\u001b[0m, in \u001b[0;36mHTTPFileSystem._raise_not_found_for_status\u001b[0;34m(self, response, url)\u001b[0m\n\u001b[1;32m 212\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mstatus \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m404\u001b[39m:\n\u001b[1;32m 213\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mFileNotFoundError\u001b[39;00m(url)\n\u001b[0;32m--> 214\u001b[0m \u001b[43mresponse\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mraise_for_status\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/aiohttp/client_reqrep.py:1005\u001b[0m, in \u001b[0;36mClientResponse.raise_for_status\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1003\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreason \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1004\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrelease()\n\u001b[0;32m-> 1005\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m ClientResponseError(\n\u001b[1;32m 1006\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrequest_info,\n\u001b[1;32m 1007\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhistory,\n\u001b[1;32m 1008\u001b[0m status\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstatus,\n\u001b[1;32m 1009\u001b[0m message\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreason,\n\u001b[1;32m 1010\u001b[0m headers\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mheaders,\n\u001b[1;32m 1011\u001b[0m )\n", - "\u001b[0;31mClientResponseError\u001b[0m: 503, message='Service Unavailable', url=URL('https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata/S1A_IW_GRDH_1SDV_20170907T103020_20170907T103045_018268_01EB76_Z010.SAFE.zip')" - ] - } - ], + "outputs": [], "source": [ "# test on an dummy GRD IW product\n", "input_file = xsar.get_test_file('S1A_IW_GRDH_1SDV_20170907T103020_20170907T103045_018268_01EB76_Z010.SAFE')\n" @@ -88,8 +52,7 @@ "metadata": {}, "outputs": [], "source": [ - "from grdwindinversion.inversion import makeL2 \n", - "\n" + "from grdwindinversion.inversion import makeL2 " ] }, { @@ -105,15 +68,7 @@ "execution_count": null, "id": "f95ee426-0b46-4d39-ae90-241e8bbaa332", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n" - ] - } - ], + "outputs": [], "source": [ "import grdwindinversion\n", "import os\n", @@ -137,21 +92,9 @@ "execution_count": null, "id": "b90c78bb-8643-4d64-a463-28fcb68d9b38", "metadata": {}, - "outputs": [ - { - "ename": "NameError", - "evalue": "name 'input_file' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[4], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m out_file,outds \u001b[38;5;241m=\u001b[39m makeL2(\u001b[43minput_file\u001b[49m,out_folder, config_path\u001b[38;5;241m=\u001b[39mconfig_file_s1,overwrite\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m 2\u001b[0m outds\n", - "\u001b[0;31mNameError\u001b[0m: name 'input_file' is not defined" - ] - } - ], + "outputs": [], "source": [ - "out_file,outds = makeL2(input_file,out_folder, config_path=config_file_s1,overwrite=True)\n", + "outfile,outds = makeL2(input_file,out_folder, config_path=config_file_s1,overwrite=True)\n", "outds" ] }, @@ -161,15 +104,9 @@ "id": "91137cfe", "metadata": {}, "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9a83becc", - "metadata": {}, - "outputs": [], - "source": [] + "source": [ + "outds.owiWindSpeed.plot() " + ] } ], "metadata": { @@ -188,7 +125,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.10.15" } }, "nbformat": 4, diff --git a/docs/index.rst b/docs/index.rst index 00275b7..0347d05 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -23,10 +23,12 @@ Functions description :maxdepth: 2 :caption: Contents: - readme + ../README installation usage examples/wind-inversion-from-grd + examples/streaks-display + algorithm modules contributing diff --git a/docs/readme.rst b/docs/readme.rst deleted file mode 100644 index 72a3355..0000000 --- a/docs/readme.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../README.rst diff --git a/grdwindinversion/config_prod.yaml b/grdwindinversion/config_prod.yaml index 4e34246..8d09e83 100644 --- a/grdwindinversion/config_prod.yaml +++ b/grdwindinversion/config_prod.yaml @@ -1,6 +1,9 @@ no_subdir: True winddir_convention: "meteorological" +add_gradientsfeatures: False +add_nrcs_model: False S1A: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" GMF_VV_NAME: "gmf_cmod5n" GMF_VH_NAME: "gmf_s1_v2" dsig_VH_NAME: "gmf_s1_v2" @@ -12,6 +15,7 @@ S1A: phi_step: 1.0 resolution: "high" S1B: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" GMF_VV_NAME: "gmf_cmod5n" GMF_VH_NAME: "gmf_s1_v2" dsig_VH_NAME: "gmf_s1_v2" @@ -23,6 +27,7 @@ S1B: phi_step: 1.0 resolution: "high" RS2: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" GMF_VV_NAME: "gmf_cmod5n" GMF_VH_NAME: "gmf_rs2_v2" dsig_VH_NAME: "gmf_rs2_v2" @@ -34,6 +39,7 @@ RS2: phi_step: 1.0 resolution: "high" RCM: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" GMF_VV_NAME: "gmf_cmod5n" GMF_VH_NAME: "gmf_rcm_noaa" dsig_VH_NAME: "gmf_s1_v2" diff --git a/grdwindinversion/config_prod_recal.yaml b/grdwindinversion/config_prod_recal.yaml index 98fedfb..471cf55 100644 --- a/grdwindinversion/config_prod_recal.yaml +++ b/grdwindinversion/config_prod_recal.yaml @@ -1,16 +1,18 @@ no_subdir: True S1A: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" GMF_VV_NAME: "gmf_cmod5n" GMF_VH_NAME: "gmf_s1_v2" dsig_VH_NAME: "gmf_s1_v2" apply_flattening: True recalibration: True ancillary: "ecmwf" - inc_step: 0.1 - wspd_step: 0.1 - phi_step: 1.0 + inc_step: 0.3 + wspd_step: 0.3 + phi_step: 2.0 resolution: "high" S1B: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" GMF_VV_NAME: "gmf_cmod5n" GMF_VH_NAME: "gmf_s1_v2" dsig_VH_NAME: "gmf_s1_v2" @@ -22,6 +24,7 @@ S1B: phi_step: 1.0 resolution: "high" RS2: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" GMF_VV_NAME: "gmf_cmod5n" GMF_VH_NAME: "gmf_rs2_v2" dsig_VH_NAME: "gmf_rs2_v2" @@ -33,6 +36,7 @@ RS2: phi_step: 1.0 resolution: "high" RCM: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" GMF_VV_NAME: "gmf_cmod5n" GMF_VH_NAME: "gmf_rcm_noaa" dsig_VH_NAME: "gmf_s1_v2" diff --git a/grdwindinversion/config_prod_recal_streaks_nrcsmod.yaml b/grdwindinversion/config_prod_recal_streaks_nrcsmod.yaml new file mode 100644 index 0000000..9f5cdb8 --- /dev/null +++ b/grdwindinversion/config_prod_recal_streaks_nrcsmod.yaml @@ -0,0 +1,48 @@ +no_subdir: True +winddir_convention: "meteorological" +add_gradientsfeatures: True +add_nrcs_model: True +S1A: + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_s1_v2" + dsig_VH_NAME: "gmf_s1_v2" + apply_flattening: True + recalibration: True + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" +S1B: + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_s1_v2" + dsig_VH_NAME: "gmf_s1_v2" + apply_flattening: True + recalibration: True + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" +RS2: + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_rs2_v2" + dsig_VH_NAME: "gmf_rs2_v2" + apply_flattening: False + recalibration: True + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" +RCM: + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_rcm_noaa" + dsig_VH_NAME: "gmf_s1_v2" + apply_flattening: True + recalibration: True + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" diff --git a/grdwindinversion/config_prod_streaks.yaml b/grdwindinversion/config_prod_streaks.yaml new file mode 100644 index 0000000..b286030 --- /dev/null +++ b/grdwindinversion/config_prod_streaks.yaml @@ -0,0 +1,52 @@ +no_subdir: True +winddir_convention: "meteorological" +add_gradientsfeatures: True +add_nrcs_model: False +S1A: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_s1_v2" + dsig_VH_NAME: "gmf_s1_v2" + apply_flattening: True + recalibration: False + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" +S1B: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_s1_v2" + dsig_VH_NAME: "gmf_s1_v2" + apply_flattening: True + recalibration: False + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" +RS2: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_rs2_v2" + dsig_VH_NAME: "gmf_rs2_v2" + apply_flattening: False + recalibration: False + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" +RCM: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_rcm_noaa" + dsig_VH_NAME: "gmf_s1_v2" + apply_flattening: True + recalibration: False + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" diff --git a/grdwindinversion/config_prod_streaks_nrcsmod.yaml b/grdwindinversion/config_prod_streaks_nrcsmod.yaml new file mode 100644 index 0000000..a92b39b --- /dev/null +++ b/grdwindinversion/config_prod_streaks_nrcsmod.yaml @@ -0,0 +1,52 @@ +no_subdir: True +winddir_convention: "meteorological" +add_gradientsfeatures: True +add_nrcs_model: True +S1A: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_s1_v2" + dsig_VH_NAME: "gmf_s1_v2" + apply_flattening: True + recalibration: False + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" +S1B: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_s1_v2" + dsig_VH_NAME: "gmf_s1_v2" + apply_flattening: True + recalibration: False + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" +RS2: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_rs2_v2" + dsig_VH_NAME: "gmf_rs2_v2" + apply_flattening: False + recalibration: False + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" +RCM: + GMF_HH_NAME: "nc_lut_gmf_cmod5n_Rhigh_hh_mouche1" + GMF_VV_NAME: "gmf_cmod5n" + GMF_VH_NAME: "gmf_rcm_noaa" + dsig_VH_NAME: "gmf_s1_v2" + apply_flattening: True + recalibration: False + ancillary: "ecmwf" + inc_step: 0.1 + wspd_step: 0.1 + phi_step: 1.0 + resolution: "high" diff --git a/grdwindinversion/gradientFeatures.py b/grdwindinversion/gradientFeatures.py new file mode 100644 index 0000000..43806e9 --- /dev/null +++ b/grdwindinversion/gradientFeatures.py @@ -0,0 +1,448 @@ +import xsarsea.gradients +import cv2 +import xarray as xr +import xarray as xr +from scipy.ndimage import binary_dilation +import numpy as np +import logging + +import logging +logger = logging.getLogger('grdwindinversion.gradientFeatures') +logger.addHandler(logging.NullHandler()) + + +class GradientFeatures: + def __init__(self, xr_dataset, xr_dataset_100, windows_sizes, downscales_factors, window_step=1): + """ + Initialize variables and xsarsea.gradients.Gradients. + + Parameters + ---------- + xr_dataset : xarray.Dataset + xarray.Dataset containing the SAR data. + xr_dataset_100 : xarray.Dataset + xarray.Dataset containing the 100m resolution SAR data. + windows_sizes : list + List of window sizes for gradient computation. + downscales_factors : list + List of downscale factors for gradient computation. + window_step : int + Step size for the window (default is 1). + + Returns + ------- + None + """ + self.xr_dataset = xr_dataset + self.xr_dataset_100 = xr_dataset_100 + self.windows_sizes = windows_sizes + self.downscales_factors = downscales_factors + self.window_step = window_step + self.gradients = None + self.hist = None + self._compute_gradients() + + def _compute_gradients(self): + """ + Instantiate the gradients object and compute the histogram. + + Parameters + ---------- + None + + Returns + ------- + None + + """ + self.gradients = xsarsea.gradients.Gradients( + self.xr_dataset_100['sigma0_detrend'], + windows_sizes=self.windows_sizes, + downscales_factors=self.downscales_factors, + window_step=self.window_step + ) + self.hist = self.gradients.histogram + # Get orthogonal gradients + self.hist['angles'] = self.hist['angles'] + np.pi / 2 + + def get_heterogeneity_mask(self, config): + """ + Compute the heterogeneity mask. + + Parameters + ---------- + config : dict + Configuration parameters. + + Returns + ------- + dict + Dictionary containing the dataArrays related toheterogeneity mask. + + """ + dual_pol = config["l2_params"]["dual_pol"] + + new_dataArrays = {} + + try: + + sigma0_400_co = [da.sigma0 for da in self.gradients.gradients_list if ( + da.sigma0["pol"] == config["l2_params"]["copol"] and da.sigma0.downscale_factor == 4)][0] + sigs = [sigma0_400_co] + + if dual_pol: + sigma0_800_cross = [da.sigma0 for da in self.gradients.gradients_list if ( + da.sigma0["pol"] == config["l2_params"]["crosspol"] and da.sigma0.downscale_factor == 8)][0] + sigs.append(sigma0_800_cross) + + filters = {} + for sig in sigs: + + pol = sig["pol"].values + res = 100 * sig.downscale_factor.values + + # delete useless coords : could be problematic to have it later + if 'downscale_factor' in sig.coords: + sig = sig.reset_coords("downscale_factor", drop=True) + + if 'window_size' in sig.coords: + sig = sig.reset_coords("window_size", drop=True) + # mask + sig = xr.where(sig <= 0, 1e-15, sig) + + # map incidence for detrend + incidence = xr.DataArray(data=cv2.resize( + self.xr_dataset_100.incidence.values, sig.shape[::-1], cv2.INTER_NEAREST), dims=sig.dims, coords=sig.coords) + + sigma0_detrend = xsarsea.sigma0_detrend(sig, incidence) + + filter_name = str(res)+"_"+str(pol) + I = sigma0_detrend + f1, f2, f3, f4, f = xsarsea.gradients.filtering_parameters(I) + filters[filter_name] = f + + thresholds = [0.78] # < is unusable + if dual_pol: + # Seuil pour crosspol si dual_pol est activé + thresholds.append(0.71) + + for idx_filter, filter in enumerate(filters): + # interp to user resolution and map on dataset grid + new_dataArrays[filter] = filters[filter].interp( + line=self.xr_dataset.line, sample=self.xr_dataset.sample, method="nearest") + new_dataArrays[filter+"_mask"] = xr.where( + new_dataArrays[filter] > thresholds[idx_filter], True, False) + + varname_400_copol_mask = f'400_{config["l2_params"]["copol"]}_mask' + varname_800_crosspol_mask = f'800_{config["l2_params"]["crosspol"]}_mask' + + # Cas 0 : no heterogeneity + new_dataArrays["heterogeneity_mask"] = xr.full_like( + new_dataArrays[varname_400_copol_mask], 0) + + if dual_pol: + # Cas 3 : Dual-polarization + new_dataArrays["heterogeneity_mask"] = xr.where( + new_dataArrays[varname_400_copol_mask] & new_dataArrays[varname_800_crosspol_mask], 3, new_dataArrays["heterogeneity_mask"]) + + # Cas 1 : Co-polarization only + new_dataArrays["heterogeneity_mask"] = xr.where( + new_dataArrays[varname_400_copol_mask] & ~new_dataArrays[varname_800_crosspol_mask], 1, new_dataArrays["heterogeneity_mask"]) + + # Cas 2 : Cross-polarization only + new_dataArrays["heterogeneity_mask"] = xr.where( + ~new_dataArrays[varname_400_copol_mask] & new_dataArrays[varname_800_crosspol_mask], 2, new_dataArrays["heterogeneity_mask"]) + + # Attributes + new_dataArrays["heterogeneity_mask"].attrs["valid_range"] = np.array([ + 0, 3]) + new_dataArrays["heterogeneity_mask"].attrs["flag_values"] = np.array([ + 0, 1, 2, 3]) + new_dataArrays["heterogeneity_mask"].attrs["flag_meanings"] = ( + "homogeneous_NRCS, heterogeneous_from_co-polarization_NRCS, " + "heterogeneous_from_cross-polarization_NRCS, heterogeneous_from_dual-polarization_NRCS" + ) + else: + # no crosspol + new_dataArrays["heterogeneity_mask"] = xr.where( + new_dataArrays[varname_400_copol_mask], 1, new_dataArrays["heterogeneity_mask"]) + + # Attributs pour le cas single-pol + new_dataArrays["heterogeneity_mask"].attrs["valid_range"] = np.array([ + 0, 1]) + new_dataArrays["heterogeneity_mask"].attrs["flag_values"] = np.array([ + 0, 1]) + new_dataArrays["heterogeneity_mask"].attrs["flag_meanings"] = ( + "homogeneous_NRCS, heterogeneous_from_co-polarization_NRCS" + ) + + # Attributs généraux + new_dataArrays["heterogeneity_mask"].attrs["long_name"] = "Quality flag taking into account the local heterogeneity" + return new_dataArrays + + except Exception as e: + logging.error("Error in get_heterogeneity_mask: %s", e) + + new_dataArrays["heterogeneity_mask"] = xr.DataArray(data=np.nan * np.ones([len(self.xr_dataset.coords[dim]) for dim in ['line', 'sample']]), + dims=[ + 'line', 'sample'], + coords=[self.xr_dataset.coords[dim] + for dim in ['line', 'sample']], + attrs={"comment": "no heterogeneity mask found"}) + + return new_dataArrays + + def _remove_ambiguity(self, streaks): + """ + Remove direction ambiguity using ancillary wind data. + + Parameters + ---------- + streaks : xarray.Dataset + Dataset containing the streaks. + + Returns + ------- + xarray.Dataset + Dataset containing the streaks with ambiguity removed. + """ + + # Load ancillary wind in antenna convention + ancillary_wind = self.xr_dataset['ancillary_wind'].interp( + line=streaks.line, + sample=streaks.sample, + method='nearest' + ).compute() + + # Convert angles to complex numbers + streaks_c = streaks['weight'] * np.exp(1j * streaks['angle']) + # Calculate the difference in angle + diff_angle = xr.apply_ufunc(np.angle, ancillary_wind / streaks_c) + + # Remove ambiguity + streaks_c = xr.where(np.abs(diff_angle) > np.pi / + 2, -streaks_c, streaks_c) + + # Update streaks with corrected values + streaks['weight'] = np.abs(streaks_c) + streaks['angle'] = xr.apply_ufunc(np.angle, streaks_c) + return streaks + + def convert_to_meteo_convention(self, streaks): + """ + Convert wind direction to meteorological convention by creating a new 'angle' DataArray. + + Parameters + ---------- + streaks : xarray.Dataset + Dataset containing the streaks. + + Returns + ------- + xarray.Dataset + Dataset containing the streaks with wind direction in meteorological convention. + + """ + streaks_meteo = self.xr_dataset[['longitude', 'latitude', 'ground_heading', 'ancillary_wind']].interp( + line=streaks.line, + sample=streaks.sample, + method='nearest') + + streaks_meteo['angle'] = xsarsea.dir_sample_to_meteo( + np.rad2deg(streaks['angle']), streaks_meteo['ground_heading']) + streaks_meteo['angle'].attrs[ + 'winddir_convention'] = "Wind direction in meteorological convention (clockwise, from), ex: 0°=from north, 90°=from east" + + return streaks_meteo + + def streaks_smooth_mean(self): + """ + Compute streaks by smoothing the histograms first and then computing the mean. + + Parameters + ---------- + None + + Returns + ------- + xarray.DataArray + DataArray containing the streaks. + """ + + try: + hist_smooth = self.hist.copy() + hist_smooth['weight'] = xsarsea.gradients.circ_smooth( + hist_smooth['weight']) + + # Compute the mean across 'downscale_factor', 'window_size', and 'pol' + hist_smooth_mean = hist_smooth.mean( + ['downscale_factor', 'window_size', 'pol']) + + # Select histogram peak + iangle_smooth_mean = hist_smooth_mean['weight'].fillna( + 0).argmax(dim='angles') + streaks_dir_smooth_mean = hist_smooth_mean['angles'].isel( + angles=iangle_smooth_mean) + streaks_weight_smooth_mean = hist_smooth_mean['weight'].isel( + angles=iangle_smooth_mean) + + # Combine angles and weights into a dataset + streaks_smooth_mean = xr.Dataset({ + 'angle': streaks_dir_smooth_mean, + 'weight': streaks_weight_smooth_mean + }) + + # Remove 'angles' coordinate + streaks_smooth_mean = streaks_smooth_mean.reset_coords( + 'angles', drop=True) + + # Remove ambiguity with ancillary wind + streaks_smooth_mean = self._remove_ambiguity( + streaks_smooth_mean) + + # Convert to meteo convention + streaks_smooth_mean = self.convert_to_meteo_convention( + streaks_smooth_mean) + + # Set attributes + streaks_smooth_mean['angle'].attrs['description'] = 'Wind direction estimated from local gradient; histograms smoothed first, then mean computed' + + return streaks_smooth_mean + + except Exception as e: + logging.error("Error in streaks_smooth_mean: %s", e) + + streaks_dir_smooth_mean_interp = xr.DataArray(data=np.nan * np.ones([len(self.xr_dataset.coords[dim]) for dim in ['line', 'sample']]), + dims=[ + 'line', 'sample'], + coords=[self.xr_dataset.coords[dim] + for dim in ['line', 'sample']], + attrs={"comment": "no streaks_smooth_mean found"}) + + return streaks_dir_smooth_mean_interp + + def streaks_mean_smooth(self): + """ + Compute streaks by meaning the histograms first and then smoothing. + + Parameters + ---------- + None + + Returns + ------- + xarray.DataArray + DataArray containing the streaks. + """ + try: + # Compute the mean of the histograms + hist_mean = self.hist.copy().mean( + ['downscale_factor', 'window_size', 'pol']) + + # Smooth the mean histogram + hist_mean_smooth = hist_mean.copy() + hist_mean_smooth['weight'] = xsarsea.gradients.circ_smooth( + hist_mean['weight']) + + # Select histogram peak + iangle_mean_smooth = hist_mean_smooth['weight'].fillna( + 0).argmax(dim='angles') + streaks_dir_mean_smooth = hist_mean_smooth['angles'].isel( + angles=iangle_mean_smooth) + streaks_weight_mean_smooth = hist_mean_smooth['weight'].isel( + angles=iangle_mean_smooth) + + # Combine angles and weights into a dataset + streaks_mean_smooth = xr.Dataset({ + 'angle': streaks_dir_mean_smooth, + 'weight': streaks_weight_mean_smooth + }) + + # Remove 'angles' coordinate + streaks_mean_smooth = streaks_mean_smooth.reset_coords( + 'angles', drop=True) + + # Remove ambiguity with ancillary wind + streaks_mean_smooth = self._remove_ambiguity( + streaks_mean_smooth) + + # Convert to meteo convention + streaks_mean_smooth = self.convert_to_meteo_convention( + streaks_mean_smooth) + + # Set attributes + streaks_mean_smooth['angle'].attrs['description'] = 'Wind direction estimated from local gradient; histograms mean first, then smooth computed' + + return streaks_mean_smooth + + except Exception as e: + logging.error("Error in streaks_mean_smooth: %s", e) + + streaks_mean_smooth = xr.DataArray(data=np.nan * np.ones([len(self.xr_dataset.coords[dim]) for dim in ['line', 'sample']]), + dims=[ + 'line', 'sample'], + coords=[self.xr_dataset.coords[dim] + for dim in ['line', 'sample']], + attrs={"comment": "no streaks_mean_smooth found"}) + + return streaks_mean_smooth + + def streaks_individual(self): + """ + Compute streaks by smoothing the histogram. + + Parameters + ---------- + None + + Returns + ------- + xarray.DataArray + DataArray containing the individual streaks for each window_size, downscale_factor, polarisation (no combination). + """ + try: + # Compute the mean of the histograms + hist_smooth = self.hist.copy() + hist_smooth['weight'] = xsarsea.gradients.circ_smooth( + hist_smooth['weight']) + + # Select histogram peak for each individual solution + iangle_individual = hist_smooth['weight'].fillna( + 0).argmax(dim='angles') + streaks_dir_individual = hist_smooth['angles'].isel( + angles=iangle_individual) + streaks_weight_individual = hist_smooth['weight'].isel( + angles=iangle_individual) + # Combine angles and weights into a dataset + streaks_individual = xr.Dataset({ + 'angle': streaks_dir_individual, + 'weight': streaks_weight_individual + }) + # Remove 'angles' coordinate + streaks_individual = streaks_individual.reset_coords( + 'angles', drop=True) + + # Remove ambiguity with ancillary wind for each individual solution + streaks_individual = self._remove_ambiguity( + streaks_individual) + + # Convert to meteo convention + streaks_individual = self.convert_to_meteo_convention( + streaks_individual) + + # Set attributes + streaks_individual['angle'].attrs['description'] = 'Wind direction estimated from local gradient for each individual solution; histograms smoothed individually' + + return streaks_individual + + except Exception as e: + logging.error("Error in streaks_individual: %s", e) + + streaks_individual = xr.DataArray(data=np.nan * np.ones([len(self.xr_dataset.coords[dim]) for dim in ['line', 'sample']]), + dims=[ + 'line', 'sample'], + coords=[self.xr_dataset.coords[dim] + for dim in ['line', 'sample']], + attrs={"comment": "no streaks_individual found"}) + + return streaks_individual diff --git a/grdwindinversion/inversion.py b/grdwindinversion/inversion.py index 874a337..5c7f220 100644 --- a/grdwindinversion/inversion.py +++ b/grdwindinversion/inversion.py @@ -1,3 +1,4 @@ +import tempfile import traceback import xsar @@ -15,14 +16,12 @@ import re import string import os -from grdwindinversion.streaks import get_streaks -from grdwindinversion.utils import check_incidence_range, get_pol_ratio_name +from grdwindinversion.utils import check_incidence_range, get_pol_ratio_name, timing from grdwindinversion.load_config import getConf # optional debug messages import logging -logging.basicConfig() -logging.getLogger('xsarsea.windspeed').setLevel( - logging.INFO) # or .setLevel(logging.INFO) +logger = logging.getLogger('grdwindinversion.inversion') +logger.addHandler(logging.NullHandler()) def getSensorMetaDataset(filename): @@ -45,10 +44,16 @@ def getSensorMetaDataset(filename): return "S1B", "SENTINEL-1 B", xsar.Sentinel1Meta, xsar.Sentinel1Dataset elif ("RS2" in filename): return "RS2", "RADARSAT-2", xsar.RadarSat2Meta, xsar.RadarSat2Dataset - elif ("RCM" in filename): - return "RCM", "RADARSAT Constellation", xsar.RcmMeta, xsar.RcmDataset + elif ("RCM1" in filename): + return "RCM", "RADARSAT Constellation 1", xsar.RcmMeta, xsar.RcmDataset + elif ("RCM2" in filename): + return "RCM", "RADARSAT Constellation 2", xsar.RcmMeta, xsar.RcmDataset + elif ("RCM3" in filename): + return "RCM", "RADARSAT Constellation 3", xsar.RcmMeta, xsar.RcmDataset + else: - raise ValueError("must be S1A|S1B|RS2|RCM, got filename %s" % filename) + raise ValueError( + "must be S1A|S1B|RS2|RCM1|RCM2|RCM3, got filename %s" % filename) def getOutputName2(input_file, outdir, sensor, meta, subdir=True): @@ -96,12 +101,10 @@ def getOutputName2(input_file, outdir, sensor, meta, subdir=True): new_format = f"{MISSIONID.lower()}--owi-xx-{meta_start_date.lower()}-{meta_stop_date.lower()}-_____-_____.nc" elif sensor == 'RCM': regex = re.compile( - "([A-Z0-9]+)_OK([0-9]+)_PK([0-9]+)_(.*?)_(.*?)_(.*?)_(.*?)_(.*?)_(.*?)_(.*?)") - template = string.Template( - "${MISSIONID}_OK${DATA1}_PK${DATA2}_${DATA3}_${BEAM}_${DATE}_${TIME}_${POLARIZATION1}_${POLARIZATION2}_${PRODUCT}") + r"(RCM[0-9])_OK([0-9]+)_PK([0-9]+)_([0-9]+)_([A-Z]+)_(\d{8})_(\d{6})_([A-Z]{2}(?:_[A-Z]{2})?)_([A-Z]+)$") match = regex.match(basename_match) - MISSIONID, DATA1, DATA2, DATA3, BEAM_MODE, DATE, TIME, POLARIZATION1, POLARIZATION2, LAST = match.groups() - new_format = f"{MISSIONID.lower()}-{BEAM_MODE.lower()}-owi-xx-{meta_start_date.lower()}-{meta_stop_date.lower()}-_____-_____.nc" + MISSIONID, DATA1, DATA2, DATA3, BEAM, DATE, TIME, POLARIZATION, PRODUCT = match.groups() + new_format = f"{MISSIONID.lower()}-{BEAM.lower()}-owi-xx-{meta_start_date.lower()}-{meta_stop_date.lower()}-_____-_____.nc" else: raise ValueError( "sensor must be S1A|S1B|RS2|RCM, got sensor %s" % sensor) @@ -209,6 +212,7 @@ def getAncillary(meta, ancillary_name='ecmwf'): ancillary_name) +@timing(logger=logger.debug) def inverse(dual_pol, inc, sigma0, sigma0_dual, ancillary_wind, dsig_cr, model_co, model_cross, **kwargs): """ Invert sigma0 to retrieve wind using model (lut or gmf). @@ -282,7 +286,8 @@ def inverse(dual_pol, inc, sigma0, sigma0_dual, ancillary_wind, dsig_cr, model_c return wind_co, None, None -def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flattening): +@timing(logger=logger.debug) +def makeL2asOwi(xr_dataset, config): """ Rename xr_dataset variables and attributes to match naming convention. @@ -290,12 +295,8 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte ---------- xr_dataset: xarray.Dataset dataset to rename - dual_pol: bool - True if dualpol, False if singlepol - copol: str - copolarization name - crosspol: str - crosspolarization name + config: dict + configuration dict Returns ------- @@ -320,13 +321,27 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte 'winddir_co': 'owiWindDirection_co', 'ancillary_wind_speed': 'owiAncillaryWindSpeed', 'ancillary_wind_direction': 'owiAncillaryWindDirection', - 'sigma0_detrend': 'owiNrcs_detrend' + 'sigma0_detrend': 'owiNrcs_detrend', }) if "offboresight" in xr_dataset: xr_dataset = xr_dataset.rename( {"offboresight": "owiOffboresightAngle"}) + if config["add_nrcs_model"]: + xr_dataset = xr_dataset.rename( + {"ancillary_nrcs": "owiAncillaryNrcs"}) + xr_dataset.owiAncillaryNrcs.attrs["units"] = "m^2 / m^2" + xr_dataset.owiAncillaryNrcs.attrs[ + "long_name"] = f"Ancillary Normalized Radar Cross Section - simulated from {config['l2_params']['copol_gmf']} & ancillary wind" + + if config["l2_params"]["dual_pol"]: + xr_dataset = xr_dataset.rename( + {"ancillary_nrcs_cross": "owiAncillaryNrcs_cross"}) + xr_dataset.owiAncillaryNrcs_cross.attrs["units"] = "m^2 / m^2" + xr_dataset.owiAncillaryNrcs_cross.attrs[ + "long_name"] = f"Ancillary Normalized Radar Cross Section - simulated from {config['l2_params']['crosspol_gmf']} & ancillary wind" + xr_dataset.owiLon.attrs["units"] = "degrees_east" xr_dataset.owiLon.attrs["long_name"] = "Longitude at wind cell center" xr_dataset.owiLon.attrs["standard_name"] = "longitude" @@ -343,23 +358,25 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte xr_dataset.owiElevationAngle.attrs["long_name"] = "Elevation angle at wind cell center" xr_dataset.owiElevationAngle.attrs["standard_name"] = "elevation" - xr_dataset['owiNrcs'] = xr_dataset['sigma0_ocean'].sel(pol=copol) + xr_dataset['owiNrcs'] = xr_dataset['sigma0_ocean'].sel( + pol=config["l2_params"]["copol"]) xr_dataset.owiNrcs.attrs = xr_dataset.sigma0_ocean.attrs xr_dataset.owiNrcs.attrs['units'] = 'm^2 / m^2' xr_dataset.owiNrcs.attrs['long_name'] = 'Normalized Radar Cross Section' xr_dataset.owiNrcs.attrs['definition'] = 'owiNrcs_no_noise_correction - owiNesz' - xr_dataset['owiMask_Nrcs'] = xr_dataset['sigma0_mask'].sel(pol=copol) + xr_dataset['owiMask_Nrcs'] = xr_dataset['sigma0_mask'].sel( + pol=config["l2_params"]["copol"]) xr_dataset.owiMask_Nrcs.attrs = xr_dataset.sigma0_mask.attrs # NESZ & DSIG xr_dataset = xr_dataset.assign( - owiNesz=(['line', 'sample'], xr_dataset.nesz.sel(pol=copol).values)) + owiNesz=(['line', 'sample'], xr_dataset.nesz.sel(pol=config["l2_params"]["copol"]).values)) xr_dataset.owiNesz.attrs['units'] = 'm^2 / m^2' xr_dataset.owiNesz.attrs['long_name'] = 'Noise Equivalent SigmaNaught' xr_dataset['owiNrcs_no_noise_correction'] = xr_dataset['sigma0_ocean_raw'].sel( - pol=copol) + pol=config["l2_params"]["copol"]) xr_dataset.owiNrcs_no_noise_correction.attrs = xr_dataset.sigma0_ocean_raw.attrs xr_dataset.owiNrcs_no_noise_correction.attrs['units'] = 'm^2 / m^2' xr_dataset.owiNrcs_no_noise_correction.attrs[ @@ -378,7 +395,7 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte # sigma0_raw__corrected cross if "sigma0_raw__corrected" in xr_dataset: xr_dataset['owiNrcs_no_noise_correction_recalibrated'] = xr_dataset['sigma0_raw__corrected'].sel( - pol=copol) + pol=config["l2_params"]["copol"]) xr_dataset.owiNrcs_no_noise_correction_recalibrated.attrs = xr_dataset.sigma0_raw__corrected.attrs xr_dataset.owiNrcs_no_noise_correction_recalibrated.attrs['units'] = 'm^2 / m^2' xr_dataset.owiNrcs_no_noise_correction_recalibrated.attrs[ @@ -388,7 +405,7 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte xr_dataset.owiNrcs.attrs['definition'] = 'owiNrcs_no_noise_correction_recalibrated - owiNesz' - if dual_pol: + if config["l2_params"]["dual_pol"]: xr_dataset = xr_dataset.rename({ 'dsig_cross': 'owiDsig_cross', @@ -399,31 +416,31 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte 'sigma0_detrend_cross': 'owiNrcs_detrend_cross' }) - if apply_flattening: + if config["apply_flattening"]: xr_dataset = xr_dataset.rename({ 'nesz_cross_flattened': 'owiNesz_cross_flattened', }) # nrcs cross xr_dataset['owiNrcs_cross'] = xr_dataset['sigma0_ocean'].sel( - pol=crosspol) + pol=config["l2_params"]["crosspol"]) xr_dataset.owiNrcs_cross.attrs['units'] = 'm^2 / m^2' xr_dataset.owiNrcs_cross.attrs['long_name'] = 'Normalized Radar Cross Section' xr_dataset.owiNrcs_cross.attrs['definition'] = 'owiNrcs_cross_no_noise_correction - owiNesz_cross' xr_dataset['owiMask_Nrcs_cross'] = xr_dataset['sigma0_mask'].sel( - pol=crosspol) + pol=config["l2_params"]["crosspol"]) xr_dataset.owiMask_Nrcs_cross.attrs = xr_dataset.sigma0_mask.attrs # nesz cross xr_dataset = xr_dataset.assign(owiNesz_cross=( - ['line', 'sample'], xr_dataset.nesz.sel(pol=crosspol).values)) # no flattening + ['line', 'sample'], xr_dataset.nesz.sel(pol=config["l2_params"]["crosspol"]).values)) # no flattening xr_dataset.owiNesz_cross.attrs['units'] = 'm^2 / m^2' xr_dataset.owiNesz_cross.attrs['long_name'] = 'Noise Equivalent SigmaNaught' xr_dataset['owiNrcs_cross_no_noise_correction'] = xr_dataset['sigma0_ocean_raw'].sel( - pol=crosspol) + pol=config["l2_params"]["crosspol"]) xr_dataset.owiNrcs_cross_no_noise_correction.attrs['units'] = 'm^2 / m^2' xr_dataset.owiNrcs_cross_no_noise_correction.attrs[ @@ -432,7 +449,7 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte #  sigma0_raw__corrected cross if "sigma0_raw__corrected" in xr_dataset: xr_dataset['owiNrcs_cross_no_noise_correction_recalibrated'] = xr_dataset['sigma0_raw__corrected'].sel( - pol=crosspol) + pol=config["l2_params"]["crosspol"]) xr_dataset.owiNrcs_cross_no_noise_correction_recalibrated.attrs = xr_dataset.sigma0_raw__corrected.attrs xr_dataset.owiNrcs_cross_no_noise_correction_recalibrated.attrs['units'] = 'm^2 / m^2' xr_dataset.owiNrcs_cross_no_noise_correction_recalibrated.attrs[ @@ -442,10 +459,18 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte xr_dataset.owiNrcs_cross.attrs['definition'] = 'owiNrcs_cross_no_noise_correction_recalibrated - owiNesz_cross' - if add_streaks: + if config["add_gradientsfeatures"]: xr_dataset = xr_dataset.rename({ - 'streaks_direction': 'owiStreaksDirection', + 'heterogeneity_mask': 'owiWindFilter' }) + else: + xr_dataset['owiWindFilter'] = xr.full_like(xr_dataset.owiNrcs, 0) + xr_dataset['owiWindFilter'].attrs['long_name'] = "Quality flag taking into account the local heterogeneity" + xr_dataset['owiWindFilter'].attrs['valid_range'] = np.array([0, 3]) + xr_dataset['owiWindFilter'].attrs['flag_values'] = np.array([ + 0, 1, 2, 3]) + xr_dataset['owiWindFilter'].attrs[ + 'flag_meanings'] = "homogeneous_NRCS, heterogeneous_from_co-polarization_NRCS, heterogeneous_from_cross-polarization_NRCS, heterogeneous_from_dual-polarization_NRCS" #  other variables @@ -458,15 +483,6 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte xr_dataset['owiWindQuality'].attrs['flag_meanings'] = "good medium low poor" xr_dataset['owiWindQuality'].attrs['comment'] = 'NOT COMPUTED YET' - xr_dataset['owiWindFilter'] = xr.full_like(xr_dataset.owiNrcs, 0) - xr_dataset['owiWindFilter'].attrs['long_name'] = "Quality flag taking into account the local heterogeneity" - xr_dataset['owiWindFilter'].attrs['valid_range'] = np.array([0, 3]) - xr_dataset['owiWindFilter'].attrs['flag_values'] = np.array([ - 0, 1, 2, 3]) - xr_dataset['owiWindFilter'].attrs[ - 'flag_meanings'] = "homogeneous_NRCS, heterogeneous_from_co-polarization_NRCS, heterogeneous_from_cross-polarization_NRCS, heterogeneous_from_dual-polarization_NRCS" - xr_dataset['owiWindFilter'].attrs['comment'] = 'NOT COMPUTED YET' - xr_dataset = xr_dataset.rename( {"line": "owiAzSize", "sample": "owiRaSize"}) @@ -476,8 +492,6 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte xr_dataset = xr_dataset.drop_vars(["sigma0_raw__corrected"]) xr_dataset = xr_dataset.drop_dims(['pol']) - xr_dataset.compute() - table_fillValue = { "owiWindQuality": -1, "owiHeading": 9999.99, @@ -505,7 +519,7 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flatte return xr_dataset, encoding -def preprocess(filename, outdir, config_path, overwrite=False, add_streaks=False, resolution='1000m'): +def preprocess(filename, outdir, config_path, overwrite=False, add_gradientsfeatures=False, resolution='1000m'): """ Main function to generate L2 product. @@ -549,6 +563,10 @@ def preprocess(filename, outdir, config_path, overwrite=False, add_streaks=False recalibration = config["recalibration"] meta = fct_meta(filename) + # si une des deux n'est pas VV VH HH HV on ne fait rien + if not all([pol in ["VV", "VH", "HH", "HV"] for pol in meta.pols.split(' ')]): + raise ValueError(f"Polarisation non gérée : meta.pols = {meta.pols}") + no_subdir_cfg = config_base.get("no_subdir", False) config["no_subdir"] = no_subdir_cfg @@ -560,6 +578,26 @@ def preprocess(filename, outdir, config_path, overwrite=False, add_streaks=False f'Using meteorological convention because "winddir_convention" was not found in config.') config["winddir_convention"] = winddir_convention + if "add_gradientsfeatures" in config_base: + add_gradientsfeatures = config_base["add_gradientsfeatures"] + else: + add_gradientsfeatures = False + logging.warning( + f'Not computing gradients by default') + config["add_gradientsfeatures"] = add_gradientsfeatures + + if "add_nrcs_model" in config_base: + add_nrcs_model = config_base["add_nrcs_model"] + add_nrcs_model = False + logging.warning( + f'Force this variable to be false, before fixing the issue' + ) + else: + add_nrcs_model = False + logging.warning( + f'Not computing nrcs from model by default') + config["add_nrcs_model"] = add_nrcs_model + # creating a dictionnary of parameters config["l2_params"] = {} @@ -607,6 +645,11 @@ def preprocess(filename, outdir, config_path, overwrite=False, add_streaks=False logging.error(e) sys.exit(-1) + #  add parameters in config + config["meta"] = meta + config["fct_dataset"] = fct_dataset + config["map_model"] = map_model + # load xr_dataset = xr_dataset.load() @@ -642,6 +685,7 @@ def preprocess(filename, outdir, config_path, overwrite=False, add_streaks=False config["l2_params"]["model_co"] = model_co config["l2_params"]["model_cross"] = model_cross config["sensor_longname"] = sensor_longname + config["sensor"] = sensor # need to load LUTs before inversion nc_luts = [x for x in [model_co, model_cross] if x.startswith("nc_lut")] @@ -796,41 +840,155 @@ def preprocess(filename, outdir, config_path, overwrite=False, add_streaks=False xr_dataset.attrs["path_aux_cal_old"] = os.path.basename(os.path.dirname( os.path.dirname(xsar_dataset.datatree['recalibration'].attrs['path_aux_cal_old']))) - if add_streaks: - xsar_dataset_100 = fct_dataset( - meta, resolution='100m') - xr_dataset_100 = xsar_dataset_100.datatree['measurement'].to_dataset() - xr_dataset_100 = xr_dataset_100.rename(map_model) - - # adding sigma0 detrend - xr_dataset_100['sigma0_detrend'] = xsarsea.sigma0_detrend( - xr_dataset_100.sigma0.sel(pol=copol), xr_dataset_100.incidence, model=model_co) + if add_nrcs_model: + # add timing + phi = np.abs( + np.rad2deg(xsarsea.dir_meteo_to_sample( + xr_dataset["ancillary_wind_direction"], xr_dataset["ground_heading"])) + ) + varnames = ["ancillary_nrcs"] + gmf_names = [model_co] if dual_pol: - xr_dataset_100['sigma0_detrend_cross'] = xsarsea.sigma0_detrend( - xr_dataset_100.sigma0.sel(pol=crosspol), xr_dataset_100.incidence, model=model_cross) + varnames.append("ancillary_nrcs_cross") + gmf_names.append(model_cross) - sigma0_detrend_combined = xr.concat( - [xr_dataset_100['sigma0_detrend'], - xr_dataset_100['sigma0_detrend_cross']], - dim='pol' - ) - sigma0_detrend_combined['pol'] = [copol, crosspol] - - xr_dataset_100['sigma0_detrend'] = sigma0_detrend_combined + for idx, gmf_name in enumerate(gmf_names): - xr_dataset_100.land_mask.values = binary_dilation(xr_dataset_100['land_mask'].values.astype('uint8'), - structure=np.ones((3, 3), np.uint8), iterations=3) - xr_dataset_100['sigma0_detrend'] = xr.where( - xr_dataset_100['land_mask'], np.nan, xr_dataset_100['sigma0']).transpose(*xr_dataset_100['sigma0'].dims) + @timing(logger=logger.info) + def apply_lut_to_dataset(): + lut = xsarsea.windspeed.get_model( + gmf_name).to_lut(unit="linear") - xr_dataset['streaks_direction'] = get_streaks( - xr_dataset, xr_dataset_100) + def lut_selection(incidence, wspd, phi): + if "phi" in lut.coords: + return lut.sel( + incidence=incidence, wspd=wspd, phi=phi, method="nearest" + ) + else: + return lut.sel( + incidence=incidence, wspd=wspd, method="nearest" + ) + + xr_dataset[varnames[idx]] = xr.apply_ufunc( + lut_selection, + xr_dataset.incidence, + xr_dataset.ancillary_wind_speed, + phi, + vectorize=True, + dask="parallelized", + output_dtypes=[float], + ) + + apply_lut_to_dataset() return xr_dataset, out_file, config -def makeL2(filename, outdir, config_path, overwrite=False, generateCSV=True, add_streaks=False, resolution='1000m'): +def process_gradients(xr_dataset, config): + """ + Function to process gradients features. + + Parameters + ---------- + xr_dataset : xarray.Dataset + Main dataset to process. + meta : object + Metadata from the original dataset. + fct_dataset : callable + Function to load the dataset. + map_model : dict + Mapping model for renaming variables. + config : dict + Configuration dictionary. + + Returns + ------- + tuple + Updated xr_dataset and xr_dataset_streaks dataset. + """ + from grdwindinversion.gradientFeatures import GradientFeatures + + meta = config["meta"] + fct_dataset = config["fct_dataset"] + map_model = config["map_model"] + + model_co = config["l2_params"]["model_co"] + model_cross = config["l2_params"]["model_cross"] + copol = config["l2_params"]["copol"] + crosspol = config["l2_params"]["crosspol"] + dual_pol = config["l2_params"]["dual_pol"] + + # Load the 100m dataset + xsar_dataset_100 = fct_dataset( + meta, resolution='100m') + + xr_dataset_100 = xsar_dataset_100.datatree['measurement'].to_dataset() + xr_dataset_100 = xr_dataset_100.rename(map_model) + # load dataset + xr_dataset_100 = xr_dataset_100.load() + + # adding sigma0 detrend + xr_dataset_100['sigma0_detrend'] = xsarsea.sigma0_detrend( + xr_dataset_100.sigma0.sel(pol=copol), xr_dataset_100.incidence, model=model_co) + + if dual_pol: + xr_dataset_100['sigma0_detrend_cross'] = xsarsea.sigma0_detrend( + xr_dataset_100.sigma0.sel(pol=crosspol), xr_dataset_100.incidence, model=model_cross) + + sigma0_detrend_combined = xr.concat( + [xr_dataset_100['sigma0_detrend'], + xr_dataset_100['sigma0_detrend_cross']], + dim='pol' + ) + sigma0_detrend_combined['pol'] = [copol, crosspol] + + xr_dataset_100['sigma0_detrend'] = sigma0_detrend_combined + + xr_dataset_100.land_mask.values = binary_dilation(xr_dataset_100['land_mask'].values.astype('uint8'), + structure=np.ones((3, 3), np.uint8), iterations=3) + xr_dataset_100['sigma0_detrend'] = xr.where( + xr_dataset_100['land_mask'], np.nan, xr_dataset_100['sigma0']).transpose(*xr_dataset_100['sigma0'].dims) + + xr_dataset_100['ancillary_wind'] = ( + xr_dataset_100.model_U10 + 1j * xr_dataset_100.model_V10) * np.exp(1j * np.deg2rad(xr_dataset_100.ground_heading)) + + downscales_factors = [1, 2, 4, 8] + # 4 and 8 must be in downscales_factors + assert all([x in downscales_factors for x in [4, 8]]) + + gradientFeatures = GradientFeatures( + xr_dataset=xr_dataset, + xr_dataset_100=xr_dataset_100, + windows_sizes=[1600, 3200], + downscales_factors=downscales_factors, + window_step=1 + ) + + # Compute heterogeneity mask and variables + dataArraysHeterogeneity = gradientFeatures.get_heterogeneity_mask(config) + xr_dataset = xr_dataset.merge(dataArraysHeterogeneity) + + # Add streaks dataset + streaks_indiv = gradientFeatures.streaks_individual() + if 'longitude' in streaks_indiv: + xr_dataset_streaks = xr.Dataset({ + 'longitude': streaks_indiv.longitude, + 'latitude': streaks_indiv.latitude, + 'dir_smooth': streaks_indiv.angle, + 'dir_mean_smooth': gradientFeatures.streaks_mean_smooth().angle, + 'dir_smooth_mean': gradientFeatures.streaks_smooth_mean().angle, + }) + else: + logger.warn( + "'longitude' not found in streaks_indiv : there is probably an error") + xr_dataset_streaks = None + + return xr_dataset, xr_dataset_streaks + + +@timing(logger=logger.info) +def makeL2(filename, outdir, config_path, overwrite=False, generateCSV=True, resolution='1000m'): """ Main function to generate L2 product. @@ -858,7 +1016,13 @@ def makeL2(filename, outdir, config_path, overwrite=False, generateCSV=True, add """ xr_dataset, out_file, config = preprocess( - filename, outdir, config_path, overwrite, add_streaks, resolution) + filename, outdir, config_path, overwrite, resolution) + + if config["add_gradientsfeatures"]: + xr_dataset, xr_dataset_streaks = process_gradients( + xr_dataset, config) + else: + xr_dataset_streaks = None model_co = config["l2_params"]["model_co"] model_cross = config["l2_params"]["model_cross"] @@ -951,8 +1115,9 @@ def makeL2(filename, outdir, config_path, overwrite=False, generateCSV=True, add "long_name"] = f"{ancillary_name} wind direction in oceanographic convention (clockwise, to), ex: 0°=to north, 90°=to east" xr_dataset, encoding = makeL2asOwi( - xr_dataset, dual_pol, copol, crosspol, add_streaks=add_streaks, apply_flattening=config["apply_flattening"]) + xr_dataset, config) + xr_dataset = xr_dataset.compute() #  add attributes firstMeasurementTime = None lastMeasurementTime = None @@ -1035,7 +1200,26 @@ def makeL2(filename, outdir, config_path, overwrite=False, generateCSV=True, add os.makedirs(os.path.dirname(out_file), exist_ok=True) + # Sauvegarde de xr_dataset dans le fichier de sortie final xr_dataset.to_netcdf(out_file, mode="w", encoding=encoding) + + # Vérifier si le dataset de streaks est présent + if xr_dataset_streaks is not None: + # Créer un fichier temporaire pour le dataset streaks + with tempfile.NamedTemporaryFile(delete=False, suffix=".nc") as tmp_file: + temp_out_file = tmp_file.name + + # Écrire xr_dataset_streaks dans le fichier temporaire + xr_dataset_streaks.to_netcdf( + temp_out_file, mode="w", group="owiWindStreaks") + + # Charger le fichier temporaire et l'ajouter au fichier final en tant que groupe + with xr.open_dataset(temp_out_file, group="owiWindStreaks") as ds_streaks: + ds_streaks.to_netcdf(out_file, mode="a", group="owiWindStreaks") + + # Supprimer le fichier temporaire après l'opération + os.remove(temp_out_file) + if generateCSV: df = xr_dataset.to_dataframe() df = df[df.owiMask == False] diff --git a/grdwindinversion/streaks.py b/grdwindinversion/streaks.py deleted file mode 100644 index 9588aa4..0000000 --- a/grdwindinversion/streaks.py +++ /dev/null @@ -1,79 +0,0 @@ -import xarray as xr -import xsarsea.gradients -import xarray as xr -from scipy.ndimage import binary_dilation -import numpy as np - - -def get_streaks(xr_dataset, xr_dataset_100): - """ - Get the streaks from the wind field. - - Parameters - ---------- - xr_dataset : xarray.Dataset - dataset at user resolution. - xr_dataset_100 : xarray.Dataset - dataset at 100m resolution. - - Returns - ------- - xarray.Dataset - Extract wind direction from Koch Method using xsarsea tools. - """ - - # return empy dataArray, waiting for solution - return xr.DataArray(data=np.nan * np.ones([len(xr_dataset.coords[dim]) for dim in ['line','sample']]), - dims=['line','sample'], - coords=[xr_dataset.coords[dim] for dim in ['line','sample']]) - # - - """ - gradients = xsarsea.gradients.Gradients(xr_dataset_100['sigma0_detrend'], windows_sizes=[ - 1600, 3200], downscales_factors=[1, 2], window_step=1) - - # get gradients histograms as an xarray dataset - hist = gradients.histogram - - # get orthogonals gradients - hist['angles'] = hist['angles'] + np.pi/2 - - # mean - hist_mean = hist.mean(['downscale_factor', 'window_size', 'pol']) - - # smooth - hist_mean_smooth = hist_mean.copy() - hist_mean_smooth['weight'] = xsarsea.gradients.circ_smooth( - hist_mean['weight']) - - # smooth only - # hist_smooth = hist.copy() - # hist_smooth['weight'] = xsarsea.gradients.circ_smooth(hist_smooth['weight']) - - # select histogram peak - iangle = hist_mean_smooth['weight'].fillna(0).argmax(dim='angles') - streaks_dir = hist_mean_smooth.angles.isel(angles=iangle) - streaks_weight = hist_mean_smooth['weight'].isel(angles=iangle) - streaks = xr.merge( - [dict(angle=streaks_dir, weight=streaks_weight)]).drop('angles') - - # streaks are [0, pi]. Remove ambiguity with anciallary wind - ancillary_wind = xr_dataset_100['ancillary_wind'].sel(line=streaks.line, - sample=streaks.sample, - method='nearest').compute() - streaks_c = streaks['weight'] * np.exp(1j * streaks['angle']) - diff_angle = xr.apply_ufunc(np.angle, ancillary_wind / streaks_c) - streaks_c = xr.where(np.abs(diff_angle) > np.pi/2, -streaks_c, streaks_c) - streaks['weight'] = np.abs(streaks_c) - streaks['angle'] = xr.apply_ufunc(np.angle, streaks_c) - - streaks_dir = xr.apply_ufunc( - np.angle, streaks_c.interp(line=xr_dataset.line, sample=xr_dataset.sample)) - streaks_dir = xr.where( - xr_dataset['land_mask'], np.nan, streaks_dir) - streaks_dir.attrs['comment'] = 'angle in radians, anticlockwise, 0=line' - streaks_dir.attrs['description'] = 'wind direction estimated from local gradient, and direction ambiguity removed with ancillary wind' - - return streaks_dir - - """ diff --git a/grdwindinversion/utils.py b/grdwindinversion/utils.py index a638e8d..06f5c3b 100644 --- a/grdwindinversion/utils.py +++ b/grdwindinversion/utils.py @@ -1,7 +1,22 @@ +import os +import time import logging import xsarsea +logging.basicConfig(level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') +logger = logging.getLogger('grdwindinversion') + + +mem_monitor = True +try: + from psutil import Process +except ImportError: + logger.warning("psutil module not found. Disabling memory monitor") + mem_monitor = False + + def check_incidence_range(incidence, models, **kwargs): """ Check if the incidence range of the dataset is within the range of the LUT of the model. @@ -81,3 +96,28 @@ def check_format(s): return "not_written_in_lut" else: return '/' + + +def timing(logger=logger.debug): + """provide a @timing decorator() for functions, that log time spent in it""" + + def decorator(f): + # @wraps(f) + def wrapper(*args, **kwargs): + mem_str = '' + process = None + if mem_monitor: + process = Process(os.getpid()) + startrss = process.memory_info().rss + starttime = time.time() + result = f(*args, **kwargs) + endtime = time.time() + if mem_monitor: + endrss = process.memory_info().rss + mem_str = 'mem: %+.1fMb' % ((endrss - startrss) / (1024 ** 2)) + logger( + 'timing %s : %.2fs. %s' % (f.__name__, endtime - starttime, mem_str)) + return result + wrapper.__doc__ = f.__doc__ + return wrapper + return decorator diff --git a/pyproject.toml b/pyproject.toml index 32fcf69..65cebb3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ name = "grdwindinversion" requires-python = ">= 3.9" description = "Package to perform Wind inversion from GRD Level-1 SAR images" -readme = "README.md" +readme = "README.rst" license = {text = "MIT"} dependencies = [ "xsar", @@ -25,10 +25,6 @@ classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', diff --git a/tests/config_test.yaml b/tests/config_test.yaml index 92337e8..1515736 100644 --- a/tests/config_test.yaml +++ b/tests/config_test.yaml @@ -1,5 +1,7 @@ no_subdir: True convention: "meteorological" +add_gradientsfeatures: True +add_nrcs_model: False S1A: GMF_VV_NAME: "gmf_cmod5n" GMF_VH_NAME: "gmf_s1_v2" diff --git a/tests/test_grdwindinversion_ci.py b/tests/test_grdwindinversion_ci.py index 4797a18..595de9f 100644 --- a/tests/test_grdwindinversion_ci.py +++ b/tests/test_grdwindinversion_ci.py @@ -40,7 +40,6 @@ def test_makeL2_generation(): config_path=config_path, overwrite=True, # Set to True to ensure a clean run generateCSV=False, # Disable CSV generation for now - add_streaks=False, resolution="1000m", )