From ab85d9c5707facd2d5f517054cd4be0ab9907424 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Fri, 5 Apr 2024 02:27:18 +0200 Subject: [PATCH 001/230] Warper: add a EXCLUDED_VALUES warping option to specify pixel values to be ignored as contributing source pixels during resampling Limited to average resampling for now --- alg/gdalwarper.cpp | 18 ++++ alg/gdalwarper.h | 6 ++ alg/gdalwarpkernel.cpp | 199 ++++++++++++++++++++++++++++++++++++++--- autotest/alg/warp.py | 75 ++++++++++++++++ 4 files changed, 284 insertions(+), 14 deletions(-) diff --git a/alg/gdalwarper.cpp b/alg/gdalwarper.cpp index 4a9d075b4331..e278979462f8 100644 --- a/alg/gdalwarper.cpp +++ b/alg/gdalwarper.cpp @@ -1249,6 +1249,24 @@ CPLErr GDALWarpDstAlphaMasker(void *pMaskFuncArg, int nBandCount, * an explicit source and target SRS. *
  • MULT_FACTOR_VERTICAL_SHIFT: Multiplication factor for the vertical * shift. Default 1.0
  • + * + *
  • EXCLUDED_VALUES: (GDAL >= 3.9) Comma-separated tuple of values + * (thus typically "R,G,B"), that are ignored as contributing source + * pixels during resampling. The number of values in the tuple must be the same + * as the number of bands, excluding the alpha band. + * Several tuples of excluded values may be specified using the + * "(R1,G1,B2),(R2,G2,B2)" syntax. + * Only taken into account by Average currently. + * This concept is a bit similar to nodata/alpha, but the main difference is + * that pixels matching one of the excluded value tuples are still considered + * as valid, when determining the target pixel validity/density. + *
  • + * + *
  • EXCLUDED_VALUES_PCT_THRESHOLD=[0-100]: (GDAL >= 3.9) Minimum percentage + * of source pixels that must be set at one of the EXCLUDED_VALUES to cause + * the excluded value, that is in majority among source pixels, to be used as the + * target pixel value. Default value is 50 (%)
  • + * * */ diff --git a/alg/gdalwarper.h b/alg/gdalwarper.h index d23e486371fb..90ca632a091b 100644 --- a/alg/gdalwarper.h +++ b/alg/gdalwarper.h @@ -461,6 +461,12 @@ class CPL_DLL GDALWarpKernel bool bApplyVerticalShift = false; double dfMultFactorVerticalShift = 1.0; + + // Tuples of values (e.g. ",," or "(,,),(,,)") that must + // be ignored as contributing source pixels during resampling. Only taken into account by + // Average currently + std::vector> m_aadfExcludedValues{}; + /*! @endcond */ GDALWarpKernel(); diff --git a/alg/gdalwarpkernel.cpp b/alg/gdalwarpkernel.cpp index b8f87b6cda1a..1bfb1fa78191 100644 --- a/alg/gdalwarpkernel.cpp +++ b/alg/gdalwarpkernel.cpp @@ -1407,6 +1407,37 @@ CPLErr GDALWarpKernel::Validate() return CE_Failure; } + // Tuples of values (e.g. ",," or "(,,),(,,)") that must + // be ignored as contributing source pixels during resampling. Only taken into account by + // Average currently + const char *pszExcludedValues = + CSLFetchNameValue(papszWarpOptions, "EXCLUDED_VALUES"); + if (pszExcludedValues) + { + const CPLStringList aosTokens( + CSLTokenizeString2(pszExcludedValues, "(,)", 0)); + if ((aosTokens.size() % nBands) != 0) + { + CPLError(CE_Failure, CPLE_AppDefined, + "EXCLUDED_VALUES should contain one or several tuples of " + "%d values formatted like ,, or " + "(,,),(,,) if there are multiple " + "tuples", + nBands); + return CE_Failure; + } + std::vector adfTuple; + for (int i = 0; i < aosTokens.size(); ++i) + { + adfTuple.push_back(CPLAtof(aosTokens[i])); + if (((i + 1) % nBands) == 0) + { + m_aadfExcludedValues.push_back(adfTuple); + adfTuple.clear(); + } + } + } + return CE_None; } @@ -6549,6 +6580,11 @@ static void GWKAverageOrModeThread(void *pData) const double dfErrorThreshold = CPLAtof( CSLFetchNameValueDef(poWK->papszWarpOptions, "ERROR_THRESHOLD", "0")); + const double dfExcludedValuesThreshold = + CPLAtof(CSLFetchNameValueDef(poWK->papszWarpOptions, + "EXCLUDED_VALUES_PCT_THRESHOLD", "50")) / + 100.0; + const int nXMargin = 2 * std::max(1, static_cast(std::ceil(1. / poWK->dfXScale))); const int nYMargin = @@ -6689,13 +6725,161 @@ static void GWKAverageOrModeThread(void *pData) if (iSrcYMin == iSrcYMax && iSrcYMax < nSrcYSize) iSrcYMax++; +#define COMPUTE_WEIGHT_Y(iSrcY) \ + ((iSrcY == iSrcYMin) \ + ? ((iSrcYMin + 1 == iSrcYMax) ? 1.0 : 1 - (dfYMin - iSrcYMin)) \ + : (iSrcY + 1 == iSrcYMax) ? 1 - (iSrcYMax - dfYMax) \ + : 1.0) + +#define COMPUTE_WEIGHT(iSrcX, dfWeightY) \ + ((iSrcX == iSrcXMin) ? ((iSrcXMin + 1 == iSrcXMax) \ + ? dfWeightY \ + : dfWeightY * (1 - (dfXMin - iSrcXMin))) \ + : (iSrcX + 1 == iSrcXMax) ? dfWeightY * (1 - (iSrcXMax - dfXMax)) \ + : dfWeightY) + + bool bDone = false; + + // Special Average mode where we process all bands together, + // to avoid averaging tuples that match an entry of m_aadfExcludedValues + if (nAlgo == GWKAOM_Average && + !poWK->m_aadfExcludedValues.empty() && + !poWK->bApplyVerticalShift && !bIsComplex) + { + double dfTotalWeightInvalid = 0.0; + double dfTotalWeightExcluded = 0.0; + double dfTotalWeightRegular = 0.0; + std::vector adfValueReal(poWK->nBands, 0); + std::vector adfValueAveraged(poWK->nBands, 0); + std::vector anCountExcludedValues( + poWK->m_aadfExcludedValues.size(), 0); + + for (int iSrcY = iSrcYMin; iSrcY < iSrcYMax; iSrcY++) + { + const double dfWeightY = COMPUTE_WEIGHT_Y(iSrcY); + iSrcOffset = + iSrcXMin + static_cast(iSrcY) * nSrcXSize; + for (int iSrcX = iSrcXMin; iSrcX < iSrcXMax; + iSrcX++, iSrcOffset++) + { + if (bWrapOverX) + iSrcOffset = + (iSrcX % nSrcXSize) + + static_cast(iSrcY) * nSrcXSize; + + if (poWK->panUnifiedSrcValid != nullptr && + !CPLMaskGet(poWK->panUnifiedSrcValid, iSrcOffset)) + { + continue; + } + + const double dfWeight = + COMPUTE_WEIGHT(iSrcX, dfWeightY); + if (dfWeight <= 0) + continue; + + bool bAllValid = true; + for (int iBand = 0; iBand < poWK->nBands; iBand++) + { + double dfBandDensity = 0; + double dfValueImagTmp = 0; + if (!(GWKGetPixelValue( + poWK, iBand, iSrcOffset, &dfBandDensity, + &adfValueReal[iBand], &dfValueImagTmp) && + dfBandDensity > BAND_DENSITY_THRESHOLD)) + { + bAllValid = false; + break; + } + } + + if (!bAllValid) + { + dfTotalWeightInvalid += dfWeight; + continue; + } + + bool bExcludedValueFound = false; + for (size_t i = 0; + i < poWK->m_aadfExcludedValues.size(); ++i) + { + if (poWK->m_aadfExcludedValues[i] == adfValueReal) + { + bExcludedValueFound = true; + ++anCountExcludedValues[i]; + dfTotalWeightExcluded += dfWeight; + break; + } + } + if (!bExcludedValueFound) + { + // Weighted incremental algorithm mean + // Cf https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Weighted_incremental_algorithm + dfTotalWeightRegular += dfWeight; + for (int iBand = 0; iBand < poWK->nBands; iBand++) + { + adfValueAveraged[iBand] += + (dfWeight / dfTotalWeightRegular) * + (adfValueReal[iBand] - + adfValueAveraged[iBand]); + } + } + } + } + + const double dfTotalWeight = dfTotalWeightInvalid + + dfTotalWeightExcluded + + dfTotalWeightRegular; + if (dfTotalWeightExcluded > 0 && + dfTotalWeightExcluded >= + dfExcludedValuesThreshold * dfTotalWeight) + { + // Find the most represented excluded value tuple + size_t iExcludedValue = 0; + int nExcludedValueCount = 0; + for (size_t i = 0; i < poWK->m_aadfExcludedValues.size(); + ++i) + { + if (anCountExcludedValues[i] > nExcludedValueCount) + { + iExcludedValue = i; + nExcludedValueCount = anCountExcludedValues[i]; + } + } + + bHasFoundDensity = true; + + for (int iBand = 0; iBand < poWK->nBands; iBand++) + { + GWKSetPixelValue( + poWK, iBand, iDstOffset, /* dfBandDensity = */ 1.0, + poWK->m_aadfExcludedValues[iExcludedValue][iBand], + 0); + } + } + else if (dfTotalWeightRegular > 0) + { + bHasFoundDensity = true; + + for (int iBand = 0; iBand < poWK->nBands; iBand++) + { + GWKSetPixelValue(poWK, iBand, iDstOffset, + /* dfBandDensity = */ 1.0, + adfValueAveraged[iBand], 0); + } + } + + // Skip below loop on bands + bDone = true; + } + /* ==================================================================== */ /* Loop processing each band. */ /* ==================================================================== */ - for (int iBand = 0; iBand < poWK->nBands; iBand++) + for (int iBand = 0; !bDone && iBand < poWK->nBands; iBand++) { double dfBandDensity = 0.0; double dfValueReal = 0.0; @@ -6711,19 +6895,6 @@ static void GWKAverageOrModeThread(void *pData) // Loop over source lines and pixels - 3 possible algorithms. -#define COMPUTE_WEIGHT_Y(iSrcY) \ - ((iSrcY == iSrcYMin) \ - ? ((iSrcYMin + 1 == iSrcYMax) ? 1.0 : 1 - (dfYMin - iSrcYMin)) \ - : (iSrcY + 1 == iSrcYMax) ? 1 - (iSrcYMax - dfYMax) \ - : 1.0) - -#define COMPUTE_WEIGHT(iSrcX, dfWeightY) \ - ((iSrcX == iSrcXMin) ? ((iSrcXMin + 1 == iSrcXMax) \ - ? dfWeightY \ - : dfWeightY * (1 - (dfXMin - iSrcXMin))) \ - : (iSrcX + 1 == iSrcXMax) ? dfWeightY * (1 - (iSrcXMax - dfXMax)) \ - : dfWeightY) - // poWK->eResample == GRA_Average. if (nAlgo == GWKAOM_Average) { diff --git a/autotest/alg/warp.py b/autotest/alg/warp.py index 2729fd4f3264..17ba627b8f25 100755 --- a/autotest/alg/warp.py +++ b/autotest/alg/warp.py @@ -1740,3 +1740,78 @@ def test_non_square(): assert res.ymax == pytest.approx(30.25) assert res.xmin == pytest.approx(9.9) assert res.xmax == pytest.approx(10.5) + + +############################################################################### +# Test EXCLUDED_VALUES warping option with average resampling + + +def test_warp_average_excluded_values(): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2, 3, gdal.GDT_Byte) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("B" * 4, 10, 20, 30, 40) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 2, 2, struct.pack("B" * 4, 11, 21, 31, 41) + ) + src_ds.GetRasterBand(3).WriteRaster( + 0, 0, 2, 2, struct.pack("B" * 4, 12, 22, 32, 42) + ) + src_ds.SetGeoTransform([1, 1, 0, 1, 0, 1]) + + with pytest.raises( + Exception, + match="EXCLUDED_VALUES should contain one or several tuples of 3 values", + ): + out_ds = gdal.Warp( + "", src_ds, options="-of MEM -ts 1 1 -r average -wo EXCLUDED_VALUES=30,31" + ) + + # The excluded value is just ignored in contributing source pixels that are average, as it represents only 25% of contributing pixels + out_ds = gdal.Warp( + "", src_ds, options="-of MEM -ts 1 1 -r average -wo EXCLUDED_VALUES=(30,31,32)" + ) + assert struct.unpack("B" * 3, out_ds.ReadRaster()) == ( + (10 + 20 + 40) // 3, + (11 + 21 + 41) // 3, + (12 + 22 + 42) // 3, + ) + + # The excluded value is selected because its contributing 25% is >= 0% + out_ds = gdal.Warp( + "", + src_ds, + options="-of MEM -ts 1 1 -r average -wo EXCLUDED_VALUES=(30,31,32) -wo EXCLUDED_VALUES_PCT_THRESHOLD=0", + ) + assert struct.unpack("B" * 3, out_ds.ReadRaster()) == (30, 31, 32) + + # The excluded value is selected because its contributing 25% is >= 24% + out_ds = gdal.Warp( + "", + src_ds, + options="-of MEM -ts 1 1 -r average -wo EXCLUDED_VALUES=(30,31,32) -wo EXCLUDED_VALUES_PCT_THRESHOLD=24", + ) + assert struct.unpack("B" * 3, out_ds.ReadRaster()) == (30, 31, 32) + + # The excluded value is selected because its contributing 25% is < 26% + out_ds = gdal.Warp( + "", + src_ds, + options="-of MEM -ts 1 1 -r average -wo EXCLUDED_VALUES=(30,31,32) -wo EXCLUDED_VALUES_PCT_THRESHOLD=26", + ) + assert struct.unpack("B" * 3, out_ds.ReadRaster()) == ( + (10 + 20 + 40) // 3, + (11 + 21 + 41) // 3, + (12 + 22 + 42) // 3, + ) + + # No match of excluded value + out_ds = gdal.Warp( + "", src_ds, options="-of MEM -ts 1 1 -r average -wo EXCLUDED_VALUES=(30,31,0)" + ) + assert struct.unpack("B" * 3, out_ds.ReadRaster()) == ( + (10 + 20 + 30 + 40) // 4, + (11 + 21 + 31 + 41) // 4, + (12 + 22 + 32 + 42) // 4, + ) From fe5fe9c5bcf3f085c21f11a9c86194865e2e38be Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Fri, 5 Apr 2024 02:27:53 +0200 Subject: [PATCH 002/230] gdal2tiles: add --excluded-values and --excluded-values-pct-threshold switches --- autotest/pyscripts/test_gdal2tiles.py | 44 +++++++++++++- doc/source/programs/gdal2tiles.rst | 18 ++++++ .../gdal-utils/osgeo_utils/gdal2tiles.py | 57 ++++++++++++++----- 3 files changed, 105 insertions(+), 14 deletions(-) diff --git a/autotest/pyscripts/test_gdal2tiles.py b/autotest/pyscripts/test_gdal2tiles.py index 00ca7f684070..0b24b8306679 100755 --- a/autotest/pyscripts/test_gdal2tiles.py +++ b/autotest/pyscripts/test_gdal2tiles.py @@ -33,12 +33,13 @@ import os import os.path import shutil +import struct import sys import pytest import test_py_scripts # noqa # pylint: disable=E0401 -from osgeo import gdal # noqa +from osgeo import gdal, osr # noqa from osgeo_utils.gdalcompare import compare_db pytestmark = pytest.mark.skipif( @@ -581,3 +582,44 @@ def test_gdal2tiles_py_webp(script_path, tmp_path, resampling): ) diff_found = compare_db(gdal.Open(webp_filename), gdal.Open(filename)) assert not diff_found, (resampling, filename) + + +@pytest.mark.require_driver("PNG") +def test_gdal2tiles_excluded_values(script_path, tmp_path): + + input_tif = str(tmp_path / "test_gdal2tiles_excluded_values.tif") + output_folder = str(tmp_path / "test_gdal2tiles_excluded_values") + + src_ds = gdal.GetDriverByName("GTiff").Create(input_tif, 256, 256, 3, gdal.GDT_Byte) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("B" * 4, 10, 20, 30, 40) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 2, 2, struct.pack("B" * 4, 11, 21, 31, 41) + ) + src_ds.GetRasterBand(3).WriteRaster( + 0, 0, 2, 2, struct.pack("B" * 4, 12, 22, 32, 42) + ) + srs = osr.SpatialReference() + srs.ImportFromEPSG(3857) + src_ds.SetSpatialRef(srs) + MAX_GM = 20037508.342789244 + RES_Z0 = 2 * MAX_GM / 256 + RES_Z1 = RES_Z0 / 2 + # Spatial extent of tile (0,0) at zoom level 1 + src_ds.SetGeoTransform([-MAX_GM, RES_Z1, 0, MAX_GM, 0, -RES_Z1]) + src_ds = None + + test_py_scripts.run_py_script_as_external_script( + script_path, + "gdal2tiles", + f"-q -z 0-1 --excluded-values=30,31,32 --excluded-values-pct-threshold=50 {input_tif} {output_folder}", + ) + + ds = gdal.Open(f"{output_folder}/0/0/0.png") + assert struct.unpack("B" * 4, ds.ReadRaster(0, 0, 1, 1)) == ( + (10 + 20 + 40) // 3, + (11 + 21 + 41) // 3, + (12 + 22 + 42) // 3, + 255, + ) diff --git a/doc/source/programs/gdal2tiles.rst b/doc/source/programs/gdal2tiles.rst index abc29fc71dd6..79ca7e41e8f6 100644 --- a/doc/source/programs/gdal2tiles.rst +++ b/doc/source/programs/gdal2tiles.rst @@ -22,6 +22,7 @@ Synopsis [-w ] [-t ] [-c <copyright>] [--processes=<NB_PROCESSES>] [--mpi] [--xyz] [--tilesize=<PIXELS>] [--tmscompatible] + [--excluded-values=<EXCLUDED_VALUES>] [--excluded-values-pct-threshold=<EXCLUDED_VALUES_PCT_THRESHOLD>] [-g <googlekey] [-b <bingkey>] <input_file> [<output_dir>] [<COMMON_OPTIONS>] Description @@ -143,6 +144,23 @@ can publish a picture without proper georeferencing too. .. versionadded:: 3.6 +.. option:: --excluded-values=<EXCLUDED_VALUES> + + Comma-separated tuple of values (thus typically "R,G,B"), that are ignored + as contributing source * pixels during resampling. The number of values in + the tuple must be the same as the number of bands, excluding the alpha band. + Several tuples of excluded values may be specified using the "(R1,G1,B2),(R2,G2,B2)" syntax. + Only taken into account by Average currently. + This concept is a bit similar to nodata/alpha, but the main difference is + that pixels matching one of the excluded value tuples are still considered + as valid, when determining the target pixel validity/density. + +.. option:: --excluded-values-pct-threshold=EXCLUDED_VALUES_PCT_THRESHOLD + + Minimum percentage of source pixels that must be set at one of the --excluded-values to cause the excluded + value, that is in majority among source pixels, to be used as the target pixel value. Default value is 50(%) + + .. versionadded:: 3.9 .. option:: -h, --help diff --git a/swig/python/gdal-utils/osgeo_utils/gdal2tiles.py b/swig/python/gdal-utils/osgeo_utils/gdal2tiles.py index c33f08f6e537..6d255b1a61e3 100644 --- a/swig/python/gdal-utils/osgeo_utils/gdal2tiles.py +++ b/swig/python/gdal-utils/osgeo_utils/gdal2tiles.py @@ -873,7 +873,27 @@ def scale_query_to_tile(dsquery, dstile, options, tilefilename=""): tile_size = dstile.RasterXSize tilebands = dstile.RasterCount - if options.resampling == "average": + dsquery.SetGeoTransform( + ( + 0.0, + tile_size / float(querysize), + 0.0, + 0.0, + 0.0, + tile_size / float(querysize), + ) + ) + dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) + + if options.resampling == "average" and options.excluded_values: + + gdal.Warp( + dstile, + dsquery, + options=f"-r average -wo EXCLUDED_VALUES={options.excluded_values} -wo EXCLUDED_VALUES_PCT_THRESHOLD={options.excluded_values_pct_threshold}", + ) + + elif options.resampling == "average": # Function: gdal.RegenerateOverview() for i in range(1, tilebands + 1): @@ -949,18 +969,6 @@ def scale_query_to_tile(dsquery, dstile, options, tilefilename=""): gdal_resampling = gdal.GRA_Q3 # Other algorithms are implemented by gdal.ReprojectImage(). - dsquery.SetGeoTransform( - ( - 0.0, - tile_size / float(querysize), - 0.0, - 0.0, - 0.0, - tile_size / float(querysize), - ) - ) - dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) - res = gdal.ReprojectImage(dsquery, dstile, None, None, gdal_resampling) if res != 0: exit_with_error( @@ -1346,6 +1354,7 @@ def create_base_tile(tile_job_info: "TileJobInfo", tile_detail: "TileDetail") -> # Tile dataset in memory tilefilename = os.path.join(output, str(tz), str(tx), "%s.%s" % (ty, tileext)) dstile = mem_drv.Create("", tile_size, tile_size, tilebands) + dstile.GetRasterBand(tilebands).SetColorInterpretation(gdal.GCI_AlphaBand) data = alpha = None @@ -1399,6 +1408,8 @@ def create_base_tile(tile_job_info: "TileJobInfo", tile_detail: "TileDetail") -> # Big ReadRaster query in memory scaled to the tile_size - all but 'near' # algo dsquery = mem_drv.Create("", querysize, querysize, tilebands) + dsquery.GetRasterBand(tilebands).SetColorInterpretation(gdal.GCI_AlphaBand) + # TODO: fill the null value in case a tile without alpha is produced (now # only png tiles are supported) dsquery.WriteRaster( @@ -1422,6 +1433,11 @@ def create_base_tile(tile_job_info: "TileJobInfo", tile_detail: "TileDetail") -> tilefilename, dstile, strict=0, options=_get_creation_options(options) ) + # Remove useless side car file + aux_xml = tilefilename + ".aux.xml" + if gdal.VSIStatL(aux_xml) is not None: + gdal.Unlink(aux_xml) + del dstile # Create a KML file for this tile. @@ -1485,10 +1501,12 @@ def create_overview_tile( dsquery = mem_driver.Create( "", 2 * tile_job_info.tile_size, 2 * tile_job_info.tile_size, tilebands ) + dsquery.GetRasterBand(tilebands).SetColorInterpretation(gdal.GCI_AlphaBand) # TODO: fill the null value dstile = mem_driver.Create( "", tile_job_info.tile_size, tile_job_info.tile_size, tilebands ) + dstile.GetRasterBand(tilebands).SetColorInterpretation(gdal.GCI_AlphaBand) usable_base_tiles = [] @@ -1759,6 +1777,19 @@ def optparse_init() -> optparse.OptionParser: type="choice", help="which tile driver to use for the tiles", ) + p.add_option( + "--excluded-values", + dest="excluded_values", + type=str, + help="Tuples of values (e.g. <R>,<G>,<B> or (<R1>,<G1>,<B1>),(<R2>,<G2>,<B2>)) that must be ignored as contributing source pixels during resampling. Only taken into account for average resampling", + ) + p.add_option( + "--excluded-values-pct-threshold", + dest="excluded_values_pct_threshold", + type=float, + default=50, + help="Minimum percentage of source pixels that must be set at one of the --excluded-values to cause the excluded value, that is in majority among source pixels, to be used as the target pixel value. Default value is 50 (%)", + ) # KML options g = optparse.OptionGroup( From 1ad145579699156aa3d22ac943140bd9f4b9cb95 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 11:47:22 +0200 Subject: [PATCH 003/230] Doc: SetIgnoredFields(): clarify interaction with SetAttributeFilter() Fixes #9655 --- ogr/ogrsf_frmts/ogrsf_frmts.dox | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/ogr/ogrsf_frmts/ogrsf_frmts.dox b/ogr/ogrsf_frmts/ogrsf_frmts.dox index d15c60b20d28..0d32a30a69b5 100644 --- a/ogr/ogrsf_frmts/ogrsf_frmts.dox +++ b/ogr/ogrsf_frmts/ogrsf_frmts.dox @@ -3147,6 +3147,10 @@ form depending on the limitations of the format driver. By default, no fields are ignored. + Note that fields that are used in an attribute filter should generally not be set as + ignored fields, as most drivers (such as those relying on the OGR SQL engine) + will be unable to correctly evaluate the attribute filter. + This method is the same as the C function OGR_L_SetIgnoredFields() @param papszFields an array of field names terminated by NULL item. If NULL is passed, the ignored list is cleared. @@ -3167,6 +3171,10 @@ form depending on the limitations of the format driver. By default, no fields are ignored. + Note that fields that are used in an attribute filter should generally not be set as + ignored fields, as most drivers (such as those relying on the OGR SQL engine) + will be unable to correctly evaluate the attribute filter. + This method is the same as the C++ method OGRLayer::SetIgnoredFields() @param hLayer handle to the layer From 63060b867e1ea963f7cc1036139e7edeb67d63a7 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 13:46:35 +0200 Subject: [PATCH 004/230] AAIGRID: fix reading file whose first value is nan (fixes #9666) --- autotest/gdrivers/aaigrid.py | 11 +++++++++++ autotest/gdrivers/data/aaigrid/starting_with_nan.asc | 10 ++++++++++ frmts/aaigrid/aaigriddataset.cpp | 6 +++++- 3 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 autotest/gdrivers/data/aaigrid/starting_with_nan.asc diff --git a/autotest/gdrivers/aaigrid.py b/autotest/gdrivers/aaigrid.py index 26632ff18756..7921467ff543 100755 --- a/autotest/gdrivers/aaigrid.py +++ b/autotest/gdrivers/aaigrid.py @@ -478,3 +478,14 @@ def test_aaigrid_write_south_up_raster(): gdal.GetDriverByName("AAIGRID").Delete( "/vsimem/test_aaigrid_write_south_up_raster.asc" ) + + +############################################################################### +# Test reading a file starting with nan (https://github.com/OSGeo/gdal/issues/9666) + + +def test_aaigrid_starting_with_nan(): + + ds = gdal.Open("data/aaigrid/starting_with_nan.asc") + assert ds.GetRasterBand(1).DataType == gdal.GDT_Float32 + assert ds.GetRasterBand(1).Checksum() == 65300 diff --git a/autotest/gdrivers/data/aaigrid/starting_with_nan.asc b/autotest/gdrivers/data/aaigrid/starting_with_nan.asc new file mode 100644 index 000000000000..dd8d09ac6fda --- /dev/null +++ b/autotest/gdrivers/data/aaigrid/starting_with_nan.asc @@ -0,0 +1,10 @@ +ncols 5 +nrows 5 +xllcorner 0 +yllcorner 0 +cellsize 1 +nan nan nan nan nan +nan nan nan nan nan +nan nan 1.0 nan nan +nan nan nan nan nan +nan nan nan nan nan diff --git a/frmts/aaigrid/aaigriddataset.cpp b/frmts/aaigrid/aaigriddataset.cpp index 1194162df4af..098a8a47f474 100644 --- a/frmts/aaigrid/aaigriddataset.cpp +++ b/frmts/aaigrid/aaigriddataset.cpp @@ -1139,7 +1139,11 @@ GDALDataset *AAIGDataset::CommonOpen(GDALOpenInfo *poOpenInfo, // null seems to be specific of D12 software // See https://github.com/OSGeo/gdal/issues/5095 (i + 5 < poOpenInfo->nHeaderBytes && - memcmp(poOpenInfo->pabyHeader + i, "null ", 5) == 0)) && + memcmp(poOpenInfo->pabyHeader + i, "null ", 5) == 0) || + (i + 4 < poOpenInfo->nHeaderBytes && + EQUALN(reinterpret_cast<const char *>( + poOpenInfo->pabyHeader + i), + "nan ", 4))) && poOpenInfo->pabyHeader[i] != '\n' && poOpenInfo->pabyHeader[i] != '\r') { From 960a92116d870ba565b04e13c8cfa61796e55cea Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 13:49:28 +0200 Subject: [PATCH 005/230] Move tests of autotest/gcore/aaigrid_read.py into autotest/gdrivers/aaigrid.py --- autotest/gcore/aaigrid_read.py | 47 ------------------- autotest/gdrivers/aaigrid.py | 10 ++++ .../data/aaigrid}/byte.tif.grd | 0 3 files changed, 10 insertions(+), 47 deletions(-) delete mode 100755 autotest/gcore/aaigrid_read.py rename autotest/{gcore/data => gdrivers/data/aaigrid}/byte.tif.grd (100%) diff --git a/autotest/gcore/aaigrid_read.py b/autotest/gcore/aaigrid_read.py deleted file mode 100755 index cad2e1b83c81..000000000000 --- a/autotest/gcore/aaigrid_read.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env pytest -############################################################################### -# $Id$ -# -# Project: GDAL/OGR Test Suite -# Purpose: Test basic read support for Arc/Info ASCII grid (AAIGrid) file. -# Author: Andrey Kiselev, dron@remotesensing.org -# -############################################################################### -# Copyright (c) 2003, Andrey Kiselev <dron@remotesensing.org> -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Library General Public -# License as published by the Free Software Foundation; either -# version 2 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Library General Public License for more details. -# -# You should have received a copy of the GNU Library General Public -# License along with this library; if not, write to the -# Free Software Foundation, Inc., 59 Temple Place - Suite 330, -# Boston, MA 02111-1307, USA. -############################################################################### - -import gdaltest -import pytest - -############################################################################### -# When imported build a list of units based on the files available. - -init_list = [ - ("byte.tif.grd", 4672), -] - - -@pytest.mark.parametrize( - "filename,checksum", - init_list, - ids=[tup[0].split(".")[0] for tup in init_list], -) -@pytest.mark.require_driver("AAIGrid") -def test_aaigrid_open(filename, checksum): - ut = gdaltest.GDALTest("AAIGrid", filename, 1, checksum) - ut.testOpen() diff --git a/autotest/gdrivers/aaigrid.py b/autotest/gdrivers/aaigrid.py index 7921467ff543..a91c0c7f63e6 100755 --- a/autotest/gdrivers/aaigrid.py +++ b/autotest/gdrivers/aaigrid.py @@ -41,6 +41,16 @@ pytestmark = pytest.mark.require_driver("AAIGRID") + +############################################################################### + + +def test_aaigrid_read_byte_tif_grd(): + + ds = gdal.Open("data/aaigrid/byte.tif.grd") + assert ds.GetRasterBand(1).Checksum() == 4672 + + ############################################################################### # Perform simple read test. diff --git a/autotest/gcore/data/byte.tif.grd b/autotest/gdrivers/data/aaigrid/byte.tif.grd similarity index 100% rename from autotest/gcore/data/byte.tif.grd rename to autotest/gdrivers/data/aaigrid/byte.tif.grd From 307c2490468d11f95da2a32aec558a80919fb7eb Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 13:51:49 +0200 Subject: [PATCH 006/230] Move tests of autotest/gcore/aaigrid_write.py into autotest/gdrivers/aaigrid.py --- autotest/gcore/aaigrid_write.py | 52 --------------------------------- autotest/gdrivers/aaigrid.py | 24 +++++++++++++++ 2 files changed, 24 insertions(+), 52 deletions(-) delete mode 100755 autotest/gcore/aaigrid_write.py diff --git a/autotest/gcore/aaigrid_write.py b/autotest/gcore/aaigrid_write.py deleted file mode 100755 index e4028b2898a7..000000000000 --- a/autotest/gcore/aaigrid_write.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env pytest -############################################################################### -# $Id$ -# -# Project: GDAL/OGR Test Suite -# Purpose: Test read/write functionality for Arc/Info ASCII grid -# (AAIGrid) driver -# Author: Andrey Kiselev, dron@remotesensing.org -# -############################################################################### -# Copyright (c) 2003, Andrey Kiselev <dron@remotesensing.org> -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Library General Public -# License as published by the Free Software Foundation; either -# version 2 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Library General Public License for more details. -# -# You should have received a copy of the GNU Library General Public -# License along with this library; if not, write to the -# Free Software Foundation, Inc., 59 Temple Place - Suite 330, -# Boston, MA 02111-1307, USA. -############################################################################### - -import gdaltest -import pytest - -############################################################################### -# When imported build a list of units based on the files available. - -init_list = [ - ("byte.tif", 4672), - ("int16.tif", 4672), - ("uint16.tif", 4672), - ("float32.tif", 4672), - ("utmsmall.tif", 50054), -] - - -@pytest.mark.parametrize( - "filename,checksum", - init_list, - ids=[tup[0].split(".")[0] for tup in init_list], -) -@pytest.mark.require_driver("AAIGrid") -def test_aaigrid_create(filename, checksum): - ut = gdaltest.GDALTest("AAIGrid", filename, 1, checksum) - ut.testCreateCopy() diff --git a/autotest/gdrivers/aaigrid.py b/autotest/gdrivers/aaigrid.py index a91c0c7f63e6..a719e4618f01 100755 --- a/autotest/gdrivers/aaigrid.py +++ b/autotest/gdrivers/aaigrid.py @@ -61,6 +61,30 @@ def test_aaigrid_1(): tst.testOpen() +############################################################################### +# CreateCopy tests + +init_list = [ + ("byte.tif", 4672), + ("int16.tif", 4672), + ("uint16.tif", 4672), + ("float32.tif", 4672), + ("utmsmall.tif", 50054), +] + + +@pytest.mark.parametrize( + "filename,checksum", + init_list, + ids=[tup[0].split(".")[0] for tup in init_list], +) +def test_aaigrid_createcopy(filename, checksum): + ut = gdaltest.GDALTest( + "AAIGrid", "../gcore/data/" + filename, 1, checksum, filename_absolute=True + ) + ut.testCreateCopy() + + ############################################################################### # Verify some auxiliary data. From 011dde0fcf78746cd76d21f97a9d879ba3102e6d Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 14:16:02 +0200 Subject: [PATCH 007/230] Doc: S57 driver: add details about the Edge layer Fixes https://github.com/OSGeo/gdal/issues/9665 --- doc/source/drivers/vector/s57.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/source/drivers/vector/s57.rst b/doc/source/drivers/vector/s57.rst index 5693d39a9342..f1513bb69775 100644 --- a/doc/source/drivers/vector/s57.rst +++ b/doc/source/drivers/vector/s57.rst @@ -172,6 +172,11 @@ They can also be specified independently as open options to the driver. Should all the low level geometry primitives be returned as special IsolatedNode, ConnectedNode, Edge and Face layers. + Note that for features of the Edge layer, the returned OGR LineString + geometry does not include the start and end nodes. Their coordinates can + be retrieved by joining with the NAME_RCID_0 (identifier of the start node) + and NAME_RCID_1 (identifier of the end node) with the RCID field of + the ConnectedNode layer. - .. oo:: PRESERVE_EMPTY_NUMBERS :choices: ON, OFF From e463b57846cd36773841a904851a647bd7c4cf1a Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 14:45:52 +0200 Subject: [PATCH 008/230] OGRLayer::SetIgnoredFields(): make it take a CSLConstList argument instead of const char* --- MIGRATION_GUIDE.TXT | 3 + gnm/gnm.h | 2 +- gnm/gnmlayer.cpp | 2 +- ogr/ogrsf_frmts/generic/ogrlayer.cpp | 13 ++-- ogr/ogrsf_frmts/generic/ogrlayerdecorator.cpp | 2 +- ogr/ogrsf_frmts/generic/ogrlayerdecorator.h | 2 +- ogr/ogrsf_frmts/generic/ogrlayerpool.cpp | 2 +- ogr/ogrsf_frmts/generic/ogrlayerpool.h | 2 +- ogr/ogrsf_frmts/generic/ogrmutexedlayer.cpp | 2 +- ogr/ogrsf_frmts/generic/ogrmutexedlayer.h | 2 +- ogr/ogrsf_frmts/generic/ogrunionlayer.cpp | 62 +++++++---------- ogr/ogrsf_frmts/generic/ogrunionlayer.h | 4 +- ogr/ogrsf_frmts/gpkg/ogr_geopackage.h | 2 +- ogr/ogrsf_frmts/gpkg/ogrgeopackagelayer.cpp | 2 +- ogr/ogrsf_frmts/ngw/ogr_ngw.h | 2 +- ogr/ogrsf_frmts/ngw/ogrngwlayer.cpp | 4 +- ogr/ogrsf_frmts/ogrsf_frmts.h | 2 +- ogr/ogrsf_frmts/parquet/ogr_parquet.h | 2 +- ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp | 2 +- ogr/ogrsf_frmts/vrt/ogr_vrt.h | 2 +- ogr/ogrsf_frmts/vrt/ogrvrtlayer.cpp | 68 ++++++++----------- ogr/ogrsf_frmts/wfs/ogr_wfs.h | 2 +- ogr/ogrsf_frmts/wfs/ogrwfslayer.cpp | 2 +- 23 files changed, 79 insertions(+), 109 deletions(-) diff --git a/MIGRATION_GUIDE.TXT b/MIGRATION_GUIDE.TXT index f753bd3fb528..0031236db8b6 100644 --- a/MIGRATION_GUIDE.TXT +++ b/MIGRATION_GUIDE.TXT @@ -40,6 +40,9 @@ MIGRATION GUIDE FROM GDAL 3.8 to GDAL 3.9 GeoPackage, PostgreSQL, Shapefile, OpenFileGDB, MITAB, Memory, GeoJSON, JSONFG, TopoJSON, ESRIJSON, ODS, XLSX. +- OGRLayer::SetIgnoredFields() now accepts a ``CSLConstList papszIgnoredFields`` + instead of a ``const char** papszIgnoredFields`` + MIGRATION GUIDE FROM GDAL 3.7 to GDAL 3.8 ----------------------------------------- diff --git a/gnm/gnm.h b/gnm/gnm.h index c46a90626fb0..b7761533d688 100644 --- a/gnm/gnm.h +++ b/gnm/gnm.h @@ -552,7 +552,7 @@ class GNMGenericLayer : public OGRLayer virtual const char *GetFIDColumn() override; virtual const char *GetGeometryColumn() override; - virtual OGRErr SetIgnoredFields(const char **papszFields) override; + virtual OGRErr SetIgnoredFields(CSLConstList papszFields) override; /** Intersection */ OGRErr Intersection(OGRLayer *pLayerMethod, OGRLayer *pLayerResult, diff --git a/gnm/gnmlayer.cpp b/gnm/gnmlayer.cpp index 86204d0b0fde..19c2c5b8b8b3 100644 --- a/gnm/gnmlayer.cpp +++ b/gnm/gnmlayer.cpp @@ -57,7 +57,7 @@ const char *GNMGenericLayer::GetGeometryColumn() return m_poLayer->GetGeometryColumn(); } -OGRErr GNMGenericLayer::SetIgnoredFields(const char **papszFields) +OGRErr GNMGenericLayer::SetIgnoredFields(CSLConstList papszFields) { return m_poLayer->SetIgnoredFields(papszFields); } diff --git a/ogr/ogrsf_frmts/generic/ogrlayer.cpp b/ogr/ogrsf_frmts/generic/ogrlayer.cpp index 0e6c5460b92c..eb814d220e02 100644 --- a/ogr/ogrsf_frmts/generic/ogrlayer.cpp +++ b/ogr/ogrsf_frmts/generic/ogrlayer.cpp @@ -2251,7 +2251,7 @@ OGRwkbGeometryType OGR_L_GetGeomType(OGRLayerH hLayer) /* SetIgnoredFields() */ /************************************************************************/ -OGRErr OGRLayer::SetIgnoredFields(const char **papszFields) +OGRErr OGRLayer::SetIgnoredFields(CSLConstList papszFields) { OGRFeatureDefn *poDefn = GetLayerDefn(); @@ -2266,13 +2266,9 @@ OGRErr OGRLayer::SetIgnoredFields(const char **papszFields) } poDefn->SetStyleIgnored(FALSE); - if (papszFields == nullptr) - return OGRERR_NONE; - // ignore some fields - while (*papszFields) + for (const char *pszFieldName : cpl::Iterate(papszFields)) { - const char *pszFieldName = *papszFields; // check special fields if (EQUAL(pszFieldName, "OGR_GEOMETRY")) poDefn->SetGeometryIgnored(TRUE); @@ -2296,7 +2292,6 @@ OGRErr OGRLayer::SetIgnoredFields(const char **papszFields) else poDefn->GetFieldDefn(iField)->SetIgnored(TRUE); } - papszFields++; } return OGRERR_NONE; @@ -5440,7 +5435,7 @@ OGRLayer::GetGeometryTypes(int iGeomField, int nFlagsGGT, int &nEntryCountOut, if (poDefn->IsStyleIgnored()) aosIgnoredFieldsRestore.AddString("OGR_STYLE"); aosIgnoredFields.AddString("OGR_STYLE"); - SetIgnoredFields(const_cast<const char **>(aosIgnoredFields.List())); + SetIgnoredFields(aosIgnoredFields.List()); // Iterate over features std::map<OGRwkbGeometryType, int64_t> oMapCount; @@ -5488,7 +5483,7 @@ OGRLayer::GetGeometryTypes(int iGeomField, int nFlagsGGT, int &nEntryCountOut, } // Restore ignore fields state - SetIgnoredFields(const_cast<const char **>(aosIgnoredFieldsRestore.List())); + SetIgnoredFields(aosIgnoredFieldsRestore.List()); if (bInterrupted) { diff --git a/ogr/ogrsf_frmts/generic/ogrlayerdecorator.cpp b/ogr/ogrsf_frmts/generic/ogrlayerdecorator.cpp index 69d2cae39476..ce04097ccf43 100644 --- a/ogr/ogrsf_frmts/generic/ogrlayerdecorator.cpp +++ b/ogr/ogrsf_frmts/generic/ogrlayerdecorator.cpp @@ -347,7 +347,7 @@ const char *OGRLayerDecorator::GetGeometryColumn() return m_poDecoratedLayer->GetGeometryColumn(); } -OGRErr OGRLayerDecorator::SetIgnoredFields(const char **papszFields) +OGRErr OGRLayerDecorator::SetIgnoredFields(CSLConstList papszFields) { if (!m_poDecoratedLayer) return OGRERR_FAILURE; diff --git a/ogr/ogrsf_frmts/generic/ogrlayerdecorator.h b/ogr/ogrsf_frmts/generic/ogrlayerdecorator.h index 524b18f67bc1..da70ce916c54 100644 --- a/ogr/ogrsf_frmts/generic/ogrlayerdecorator.h +++ b/ogr/ogrsf_frmts/generic/ogrlayerdecorator.h @@ -116,7 +116,7 @@ class CPL_DLL OGRLayerDecorator : public OGRLayer virtual const char *GetFIDColumn() override; virtual const char *GetGeometryColumn() override; - virtual OGRErr SetIgnoredFields(const char **papszFields) override; + virtual OGRErr SetIgnoredFields(CSLConstList papszFields) override; virtual char **GetMetadata(const char *pszDomain = "") override; virtual CPLErr SetMetadata(char **papszMetadata, diff --git a/ogr/ogrsf_frmts/generic/ogrlayerpool.cpp b/ogr/ogrsf_frmts/generic/ogrlayerpool.cpp index a67487a46b1b..9bceadb97b2c 100644 --- a/ogr/ogrsf_frmts/generic/ogrlayerpool.cpp +++ b/ogr/ogrsf_frmts/generic/ogrlayerpool.cpp @@ -666,7 +666,7 @@ const char *OGRProxiedLayer::GetGeometryColumn() /* SetIgnoredFields() */ /************************************************************************/ -OGRErr OGRProxiedLayer::SetIgnoredFields(const char **papszFields) +OGRErr OGRProxiedLayer::SetIgnoredFields(CSLConstList papszFields) { if (poUnderlyingLayer == nullptr && !OpenUnderlyingLayer()) return OGRERR_FAILURE; diff --git a/ogr/ogrsf_frmts/generic/ogrlayerpool.h b/ogr/ogrsf_frmts/generic/ogrlayerpool.h index 6589b0e3c987..724180af2a28 100644 --- a/ogr/ogrsf_frmts/generic/ogrlayerpool.h +++ b/ogr/ogrsf_frmts/generic/ogrlayerpool.h @@ -186,7 +186,7 @@ class OGRProxiedLayer : public OGRAbstractProxiedLayer virtual const char *GetFIDColumn() override; virtual const char *GetGeometryColumn() override; - virtual OGRErr SetIgnoredFields(const char **papszFields) override; + virtual OGRErr SetIgnoredFields(CSLConstList papszFields) override; virtual OGRErr Rename(const char *pszNewName) override; }; diff --git a/ogr/ogrsf_frmts/generic/ogrmutexedlayer.cpp b/ogr/ogrsf_frmts/generic/ogrmutexedlayer.cpp index 9788abff0467..c96e9337393e 100644 --- a/ogr/ogrsf_frmts/generic/ogrmutexedlayer.cpp +++ b/ogr/ogrsf_frmts/generic/ogrmutexedlayer.cpp @@ -292,7 +292,7 @@ const char *OGRMutexedLayer::GetGeometryColumn() return OGRLayerDecorator::GetGeometryColumn(); } -OGRErr OGRMutexedLayer::SetIgnoredFields(const char **papszFields) +OGRErr OGRMutexedLayer::SetIgnoredFields(CSLConstList papszFields) { CPLMutexHolderOptionalLockD(m_hMutex); return OGRLayerDecorator::SetIgnoredFields(papszFields); diff --git a/ogr/ogrsf_frmts/generic/ogrmutexedlayer.h b/ogr/ogrsf_frmts/generic/ogrmutexedlayer.h index 0ab1d9093305..adbbba35f91d 100644 --- a/ogr/ogrsf_frmts/generic/ogrmutexedlayer.h +++ b/ogr/ogrsf_frmts/generic/ogrmutexedlayer.h @@ -124,7 +124,7 @@ class CPL_DLL OGRMutexedLayer : public OGRLayerDecorator virtual const char *GetFIDColumn() override; virtual const char *GetGeometryColumn() override; - virtual OGRErr SetIgnoredFields(const char **papszFields) override; + virtual OGRErr SetIgnoredFields(CSLConstList papszFields) override; virtual char **GetMetadata(const char *pszDomain = "") override; virtual CPLErr SetMetadata(char **papszMetadata, diff --git a/ogr/ogrsf_frmts/generic/ogrunionlayer.cpp b/ogr/ogrsf_frmts/generic/ogrunionlayer.cpp index 8e9124dcc6e2..eed6543d2be4 100644 --- a/ogr/ogrsf_frmts/generic/ogrunionlayer.cpp +++ b/ogr/ogrsf_frmts/generic/ogrunionlayer.cpp @@ -86,8 +86,7 @@ OGRUnionLayer::OGRUnionLayer(const char *pszName, int nSrcLayersIn, nFields(0), papoFields(nullptr), nGeomFields(0), papoGeomFields(nullptr), eFieldStrategy(FIELD_UNION_ALL_LAYERS), bPreserveSrcFID(FALSE), nFeatureCount(-1), iCurLayer(-1), pszAttributeFilter(nullptr), - nNextFID(0), panMap(nullptr), papszIgnoredFields(nullptr), - bAttrFilterPassThroughValue(-1), + nNextFID(0), panMap(nullptr), bAttrFilterPassThroughValue(-1), pabModifiedLayers(static_cast<int *>(CPLCalloc(sizeof(int), nSrcLayers))), pabCheckIfAutoWrap( static_cast<int *>(CPLCalloc(sizeof(int), nSrcLayers))), @@ -120,7 +119,6 @@ OGRUnionLayer::~OGRUnionLayer() CPLFree(pszAttributeFilter); CPLFree(panMap); - CSLDestroy(papszIgnoredFields); CPLFree(pabModifiedLayers); CPLFree(pabCheckIfAutoWrap); @@ -522,8 +520,7 @@ void OGRUnionLayer::ConfigureActiveLayer() for (int i = 0; i < poSrcFeatureDefn->GetFieldCount(); i++) { OGRFieldDefn *poSrcFieldDefn = poSrcFeatureDefn->GetFieldDefn(i); - if (CSLFindString(papszIgnoredFields, poSrcFieldDefn->GetNameRef()) == - -1) + if (m_aosIgnoredFields.FindString(poSrcFieldDefn->GetNameRef()) == -1) { panMap[i] = poFeatureDefn->GetFieldIndex(poSrcFieldDefn->GetNameRef()); @@ -536,75 +533,65 @@ void OGRUnionLayer::ConfigureActiveLayer() if (papoSrcLayers[iCurLayer]->TestCapability(OLCIgnoreFields)) { - char **papszIter = papszIgnoredFields; - char **papszFieldsSrc = nullptr; - while (papszIter != nullptr && *papszIter != nullptr) + CPLStringList aosFieldSrc; + for (const char *pszFieldName : cpl::Iterate(m_aosIgnoredFields)) { - const char *pszFieldName = *papszIter; if (EQUAL(pszFieldName, "OGR_GEOMETRY") || EQUAL(pszFieldName, "OGR_STYLE") || poSrcFeatureDefn->GetFieldIndex(pszFieldName) >= 0 || poSrcFeatureDefn->GetGeomFieldIndex(pszFieldName) >= 0) { - papszFieldsSrc = CSLAddString(papszFieldsSrc, pszFieldName); + aosFieldSrc.AddString(pszFieldName); } - papszIter++; } /* Attribute fields */ - int *panSrcFieldsUsed = static_cast<int *>( - CPLCalloc(sizeof(int), poSrcFeatureDefn->GetFieldCount())); + std::vector<bool> abSrcFieldsUsed(poSrcFeatureDefn->GetFieldCount()); for (int iField = 0; iField < poFeatureDefn->GetFieldCount(); iField++) { - OGRFieldDefn *poFieldDefn = poFeatureDefn->GetFieldDefn(iField); - int iSrcField = + const OGRFieldDefn *poFieldDefn = + poFeatureDefn->GetFieldDefn(iField); + const int iSrcField = poSrcFeatureDefn->GetFieldIndex(poFieldDefn->GetNameRef()); if (iSrcField >= 0) - panSrcFieldsUsed[iSrcField] = TRUE; + abSrcFieldsUsed[iSrcField] = true; } for (int iSrcField = 0; iSrcField < poSrcFeatureDefn->GetFieldCount(); iSrcField++) { - if (!panSrcFieldsUsed[iSrcField]) + if (!abSrcFieldsUsed[iSrcField]) { - OGRFieldDefn *poSrcDefn = + const OGRFieldDefn *poSrcDefn = poSrcFeatureDefn->GetFieldDefn(iSrcField); - papszFieldsSrc = - CSLAddString(papszFieldsSrc, poSrcDefn->GetNameRef()); + aosFieldSrc.AddString(poSrcDefn->GetNameRef()); } } - CPLFree(panSrcFieldsUsed); /* geometry fields now */ - panSrcFieldsUsed = static_cast<int *>( - CPLCalloc(sizeof(int), poSrcFeatureDefn->GetGeomFieldCount())); + abSrcFieldsUsed.clear(); + abSrcFieldsUsed.resize(poSrcFeatureDefn->GetGeomFieldCount()); for (int iField = 0; iField < poFeatureDefn->GetGeomFieldCount(); iField++) { - OGRGeomFieldDefn *poFieldDefn = + const OGRGeomFieldDefn *poFieldDefn = poFeatureDefn->GetGeomFieldDefn(iField); - int iSrcField = + const int iSrcField = poSrcFeatureDefn->GetGeomFieldIndex(poFieldDefn->GetNameRef()); if (iSrcField >= 0) - panSrcFieldsUsed[iSrcField] = TRUE; + abSrcFieldsUsed[iSrcField] = true; } for (int iSrcField = 0; iSrcField < poSrcFeatureDefn->GetGeomFieldCount(); iSrcField++) { - if (!panSrcFieldsUsed[iSrcField]) + if (!abSrcFieldsUsed[iSrcField]) { - OGRGeomFieldDefn *poSrcDefn = + const OGRGeomFieldDefn *poSrcDefn = poSrcFeatureDefn->GetGeomFieldDefn(iSrcField); - papszFieldsSrc = - CSLAddString(papszFieldsSrc, poSrcDefn->GetNameRef()); + aosFieldSrc.AddString(poSrcDefn->GetNameRef()); } } - CPLFree(panSrcFieldsUsed); - - papoSrcLayers[iCurLayer]->SetIgnoredFields( - const_cast<const char **>(papszFieldsSrc)); - CSLDestroy(papszFieldsSrc); + papoSrcLayers[iCurLayer]->SetIgnoredFields(aosFieldSrc.List()); } } @@ -1393,14 +1380,13 @@ OGRFeature *OGRUnionLayer::TranslateFromSrcLayer(OGRFeature *poSrcFeature) /* SetIgnoredFields() */ /************************************************************************/ -OGRErr OGRUnionLayer::SetIgnoredFields(const char **papszFields) +OGRErr OGRUnionLayer::SetIgnoredFields(CSLConstList papszFields) { OGRErr eErr = OGRLayer::SetIgnoredFields(papszFields); if (eErr != OGRERR_NONE) return eErr; - CSLDestroy(papszIgnoredFields); - papszIgnoredFields = papszFields ? CSLDuplicate(papszFields) : nullptr; + m_aosIgnoredFields = papszFields; return eErr; } diff --git a/ogr/ogrsf_frmts/generic/ogrunionlayer.h b/ogr/ogrsf_frmts/generic/ogrunionlayer.h index f63cc987c324..f6bbfe48ace1 100644 --- a/ogr/ogrsf_frmts/generic/ogrunionlayer.h +++ b/ogr/ogrsf_frmts/generic/ogrunionlayer.h @@ -90,7 +90,7 @@ class CPL_DLL OGRUnionLayer final : public OGRLayer char *pszAttributeFilter; int nNextFID; int *panMap; - char **papszIgnoredFields; + CPLStringList m_aosIgnoredFields{}; int bAttrFilterPassThroughValue; int *pabModifiedLayers; int *pabCheckIfAutoWrap; @@ -167,7 +167,7 @@ class CPL_DLL OGRUnionLayer final : public OGRLayer virtual void SetSpatialFilter(OGRGeometry *poGeomIn) override; virtual void SetSpatialFilter(int iGeomField, OGRGeometry *) override; - virtual OGRErr SetIgnoredFields(const char **papszFields) override; + virtual OGRErr SetIgnoredFields(CSLConstList papszFields) override; virtual OGRErr SyncToDisk() override; }; diff --git a/ogr/ogrsf_frmts/gpkg/ogr_geopackage.h b/ogr/ogrsf_frmts/gpkg/ogr_geopackage.h index eeb300ce8d69..58ba064d5fbd 100644 --- a/ogr/ogrsf_frmts/gpkg/ogr_geopackage.h +++ b/ogr/ogrsf_frmts/gpkg/ogr_geopackage.h @@ -645,7 +645,7 @@ class OGRGeoPackageLayer CPL_NON_FINAL : public OGRLayer, return m_poFeatureDefn; } - OGRErr SetIgnoredFields(const char **papszFields) override; + OGRErr SetIgnoredFields(CSLConstList papszFields) override; virtual bool HasFastSpatialFilter(int /*iGeomCol*/) override { diff --git a/ogr/ogrsf_frmts/gpkg/ogrgeopackagelayer.cpp b/ogr/ogrsf_frmts/gpkg/ogrgeopackagelayer.cpp index 82c578364829..fb16c4b49f9a 100644 --- a/ogr/ogrsf_frmts/gpkg/ogrgeopackagelayer.cpp +++ b/ogr/ogrsf_frmts/gpkg/ogrgeopackagelayer.cpp @@ -1242,7 +1242,7 @@ void OGRGeoPackageLayer::BuildFeatureDefn(const char *pszLayerName, /* SetIgnoredFields() */ /************************************************************************/ -OGRErr OGRGeoPackageLayer::SetIgnoredFields(const char **papszFields) +OGRErr OGRGeoPackageLayer::SetIgnoredFields(CSLConstList papszFields) { OGRErr eErr = OGRLayer::SetIgnoredFields(papszFields); if (eErr == OGRERR_NONE) diff --git a/ogr/ogrsf_frmts/ngw/ogr_ngw.h b/ogr/ogrsf_frmts/ngw/ogr_ngw.h index e00372e53eed..998a9ce4c4c8 100644 --- a/ogr/ogrsf_frmts/ngw/ogr_ngw.h +++ b/ogr/ogrsf_frmts/ngw/ogr_ngw.h @@ -197,7 +197,7 @@ class OGRNGWLayer final : public OGRLayer virtual CPLErr SetMetadataItem(const char *pszName, const char *pszValue, const char *pszDomain = "") override; - virtual OGRErr SetIgnoredFields(const char **papszFields) override; + virtual OGRErr SetIgnoredFields(CSLConstList papszFields) override; virtual OGRErr SetAttributeFilter(const char *pszQuery) override; virtual void SetSpatialFilter(OGRGeometry *poGeom) override; virtual void SetSpatialFilter(int iGeomField, OGRGeometry *poGeom) override; diff --git a/ogr/ogrsf_frmts/ngw/ogrngwlayer.cpp b/ogr/ogrsf_frmts/ngw/ogrngwlayer.cpp index 594bb96cbeef..fe4b41291705 100644 --- a/ogr/ogrsf_frmts/ngw/ogrngwlayer.cpp +++ b/ogr/ogrsf_frmts/ngw/ogrngwlayer.cpp @@ -1654,7 +1654,7 @@ OGRErr OGRNGWLayer::ICreateFeature(OGRFeature *poFeature) /* * SetIgnoredFields() */ -OGRErr OGRNGWLayer::SetIgnoredFields(const char **papszFields) +OGRErr OGRNGWLayer::SetIgnoredFields(CSLConstList papszFields) { OGRErr eResult = OGRLayer::SetIgnoredFields(papszFields); if (eResult != OGRERR_NONE) @@ -1832,7 +1832,7 @@ OGRErr OGRNGWLayer::SetSelectedFields(const std::set<std::string> &aosFields) } aosIgnoreFields.AddString(poFieldDefn->GetNameRef()); } - return SetIgnoredFields(const_cast<const char **>(aosIgnoreFields.List())); + return SetIgnoredFields(aosIgnoreFields.List()); } /* diff --git a/ogr/ogrsf_frmts/ogrsf_frmts.h b/ogr/ogrsf_frmts/ogrsf_frmts.h index 309be765f8e3..6316b1bbf945 100644 --- a/ogr/ogrsf_frmts/ogrsf_frmts.h +++ b/ogr/ogrsf_frmts/ogrsf_frmts.h @@ -300,7 +300,7 @@ class CPL_DLL OGRLayer : public GDALMajorObject virtual const char *GetFIDColumn(); virtual const char *GetGeometryColumn(); - virtual OGRErr SetIgnoredFields(const char **papszFields); + virtual OGRErr SetIgnoredFields(CSLConstList papszFields); virtual OGRGeometryTypeCounter * GetGeometryTypes(int iGeomField, int nFlagsGGT, int &nEntryCountOut, diff --git a/ogr/ogrsf_frmts/parquet/ogr_parquet.h b/ogr/ogrsf_frmts/parquet/ogr_parquet.h index 0b36c7b914c1..a8964b87dacd 100644 --- a/ogr/ogrsf_frmts/parquet/ogr_parquet.h +++ b/ogr/ogrsf_frmts/parquet/ogr_parquet.h @@ -157,7 +157,7 @@ class OGRParquetLayer final : public OGRParquetLayerBase OGRFeature *GetFeature(GIntBig nFID) override; GIntBig GetFeatureCount(int bForce) override; int TestCapability(const char *pszCap) override; - OGRErr SetIgnoredFields(const char **papszFields) override; + OGRErr SetIgnoredFields(CSLConstList papszFields) override; const char *GetMetadataItem(const char *pszName, const char *pszDomain = "") override; char **GetMetadata(const char *pszDomain = "") override; diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp index 18838645cea3..26694f5af6c4 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp @@ -1814,7 +1814,7 @@ void OGRParquetLayer::InvalidateCachedBatches() /* SetIgnoredFields() */ /************************************************************************/ -OGRErr OGRParquetLayer::SetIgnoredFields(const char **papszFields) +OGRErr OGRParquetLayer::SetIgnoredFields(CSLConstList papszFields) { m_bIgnoredFields = false; m_anRequestedParquetColumns.clear(); diff --git a/ogr/ogrsf_frmts/vrt/ogr_vrt.h b/ogr/ogrsf_frmts/vrt/ogr_vrt.h index 98847aaef143..0884f421df44 100644 --- a/ogr/ogrsf_frmts/vrt/ogr_vrt.h +++ b/ogr/ogrsf_frmts/vrt/ogr_vrt.h @@ -201,7 +201,7 @@ class OGRVRTLayer final : public OGRLayer virtual OGRErr CommitTransaction() override; virtual OGRErr RollbackTransaction() override; - virtual OGRErr SetIgnoredFields(const char **papszFields) override; + virtual OGRErr SetIgnoredFields(CSLConstList papszFields) override; GDALDataset *GetSrcDataset(); }; diff --git a/ogr/ogrsf_frmts/vrt/ogrvrtlayer.cpp b/ogr/ogrsf_frmts/vrt/ogrvrtlayer.cpp index 4decb5cf644c..2b4cedd558e6 100644 --- a/ogr/ogrsf_frmts/vrt/ogrvrtlayer.cpp +++ b/ogr/ogrsf_frmts/vrt/ogrvrtlayer.cpp @@ -2390,7 +2390,7 @@ OGRErr OGRVRTLayer::RollbackTransaction() /* SetIgnoredFields() */ /************************************************************************/ -OGRErr OGRVRTLayer::SetIgnoredFields(const char **papszFields) +OGRErr OGRVRTLayer::SetIgnoredFields(CSLConstList papszFields) { if (!bHasFullInitialized) FullInitialize(); @@ -2404,18 +2404,16 @@ OGRErr OGRVRTLayer::SetIgnoredFields(const char **papszFields) if (eErr != OGRERR_NONE) return eErr; - const char **papszIter = papszFields; - char **papszFieldsSrc = nullptr; + CPLStringList aosFieldsSrc; // Translate explicitly ignored fields of VRT layers to their equivalent // source fields. - while (papszIter != nullptr && *papszIter != nullptr) + for (const char *pszFieldName : cpl::Iterate(papszFields)) { - const char *pszFieldName = *papszIter; if (EQUAL(pszFieldName, "OGR_GEOMETRY") || EQUAL(pszFieldName, "OGR_STYLE")) { - papszFieldsSrc = CSLAddString(papszFieldsSrc, pszFieldName); + aosFieldsSrc.AddString(pszFieldName); } else { @@ -2449,10 +2447,9 @@ OGRErr OGRVRTLayer::SetIgnoredFields(const char **papszFields) } if (bOKToIgnore) { - OGRFieldDefn *poSrcDefn = + const OGRFieldDefn *poSrcDefn = GetSrcLayerDefn()->GetFieldDefn(iSrcField); - papszFieldsSrc = CSLAddString(papszFieldsSrc, - poSrcDefn->GetNameRef()); + aosFieldsSrc.AddString(poSrcDefn->GetNameRef()); } } } @@ -2465,26 +2462,23 @@ OGRErr OGRVRTLayer::SetIgnoredFields(const char **papszFields) int iSrcField = apoGeomFieldProps[iVRTField]->iGeomField; if (iSrcField >= 0) { - OGRGeomFieldDefn *poSrcDefn = + const OGRGeomFieldDefn *poSrcDefn = GetSrcLayerDefn()->GetGeomFieldDefn(iSrcField); - papszFieldsSrc = CSLAddString(papszFieldsSrc, - poSrcDefn->GetNameRef()); + aosFieldsSrc.AddString(poSrcDefn->GetNameRef()); } } } } - papszIter++; } // Add source fields that are not referenced by VRT layer. - int *panSrcFieldsUsed = static_cast<int *>( - CPLCalloc(sizeof(int), GetSrcLayerDefn()->GetFieldCount())); + std::vector<bool> abSrcFieldUsed(GetSrcLayerDefn()->GetFieldCount()); for (int iVRTField = 0; iVRTField < GetLayerDefn()->GetFieldCount(); iVRTField++) { const int iSrcField = anSrcField[iVRTField]; if (iSrcField >= 0) - panSrcFieldsUsed[iSrcField] = TRUE; + abSrcFieldUsed[iSrcField] = true; } for (int iVRTField = 0; iVRTField < GetLayerDefn()->GetGeomFieldCount(); iVRTField++) @@ -2497,16 +2491,16 @@ OGRErr OGRVRTLayer::SetIgnoredFields(const char **papszFields) { int iSrcField = apoGeomFieldProps[iVRTField]->iGeomXField; if (iSrcField >= 0) - panSrcFieldsUsed[iSrcField] = TRUE; + abSrcFieldUsed[iSrcField] = true; iSrcField = apoGeomFieldProps[iVRTField]->iGeomYField; if (iSrcField >= 0) - panSrcFieldsUsed[iSrcField] = TRUE; + abSrcFieldUsed[iSrcField] = true; iSrcField = apoGeomFieldProps[iVRTField]->iGeomZField; if (iSrcField >= 0) - panSrcFieldsUsed[iSrcField] = TRUE; + abSrcFieldUsed[iSrcField] = true; iSrcField = apoGeomFieldProps[iVRTField]->iGeomMField; if (iSrcField >= 0) - panSrcFieldsUsed[iSrcField] = TRUE; + abSrcFieldUsed[iSrcField] = true; } // Similarly for other kinds of geometry fields. else if (eGeometryStyle == VGS_WKT || eGeometryStyle == VGS_WKB || @@ -2514,29 +2508,27 @@ OGRErr OGRVRTLayer::SetIgnoredFields(const char **papszFields) { int iSrcField = apoGeomFieldProps[iVRTField]->iGeomField; if (iSrcField >= 0) - panSrcFieldsUsed[iSrcField] = TRUE; + abSrcFieldUsed[iSrcField] = true; } } if (iStyleField >= 0) - panSrcFieldsUsed[iStyleField] = TRUE; + abSrcFieldUsed[iStyleField] = true; if (iFIDField >= 0) - panSrcFieldsUsed[iFIDField] = TRUE; + abSrcFieldUsed[iFIDField] = true; for (int iSrcField = 0; iSrcField < GetSrcLayerDefn()->GetFieldCount(); iSrcField++) { - if (!panSrcFieldsUsed[iSrcField]) + if (!abSrcFieldUsed[iSrcField]) { - OGRFieldDefn *poSrcDefn = + const OGRFieldDefn *poSrcDefn = GetSrcLayerDefn()->GetFieldDefn(iSrcField); - papszFieldsSrc = - CSLAddString(papszFieldsSrc, poSrcDefn->GetNameRef()); + aosFieldsSrc.AddString(poSrcDefn->GetNameRef()); } } - CPLFree(panSrcFieldsUsed); // Add source geometry fields that are not referenced by VRT layer. - panSrcFieldsUsed = static_cast<int *>( - CPLCalloc(sizeof(int), GetSrcLayerDefn()->GetGeomFieldCount())); + abSrcFieldUsed.clear(); + abSrcFieldUsed.resize(GetSrcLayerDefn()->GetGeomFieldCount()); for (int iVRTField = 0; iVRTField < GetLayerDefn()->GetGeomFieldCount(); iVRTField++) { @@ -2544,27 +2536,21 @@ OGRErr OGRVRTLayer::SetIgnoredFields(const char **papszFields) { const int iSrcField = apoGeomFieldProps[iVRTField]->iGeomField; if (iSrcField >= 0) - panSrcFieldsUsed[iSrcField] = TRUE; + abSrcFieldUsed[iSrcField] = true; } } for (int iSrcField = 0; iSrcField < GetSrcLayerDefn()->GetGeomFieldCount(); iSrcField++) { - if (!panSrcFieldsUsed[iSrcField]) + if (!abSrcFieldUsed[iSrcField]) { - OGRGeomFieldDefn *poSrcDefn = + const OGRGeomFieldDefn *poSrcDefn = GetSrcLayerDefn()->GetGeomFieldDefn(iSrcField); - papszFieldsSrc = - CSLAddString(papszFieldsSrc, poSrcDefn->GetNameRef()); + aosFieldsSrc.AddString(poSrcDefn->GetNameRef()); } } - CPLFree(panSrcFieldsUsed); - eErr = poSrcLayer->SetIgnoredFields((const char **)papszFieldsSrc); - - CSLDestroy(papszFieldsSrc); - - return eErr; + return poSrcLayer->SetIgnoredFields(aosFieldsSrc.List()); } /************************************************************************/ diff --git a/ogr/ogrsf_frmts/wfs/ogr_wfs.h b/ogr/ogrsf_frmts/wfs/ogr_wfs.h index bf75b820889b..566b81c7f5bb 100644 --- a/ogr/ogrsf_frmts/wfs/ogr_wfs.h +++ b/ogr/ogrsf_frmts/wfs/ogr_wfs.h @@ -197,7 +197,7 @@ class OGRWFSLayer final : public OGRLayer virtual OGRErr CommitTransaction() override; virtual OGRErr RollbackTransaction() override; - virtual OGRErr SetIgnoredFields(const char **papszFields) override; + virtual OGRErr SetIgnoredFields(CSLConstList papszFields) override; int HasLayerDefn() { diff --git a/ogr/ogrsf_frmts/wfs/ogrwfslayer.cpp b/ogr/ogrsf_frmts/wfs/ogrwfslayer.cpp index 83e028c792e3..55f18291f71d 100644 --- a/ogr/ogrsf_frmts/wfs/ogrwfslayer.cpp +++ b/ogr/ogrsf_frmts/wfs/ogrwfslayer.cpp @@ -1284,7 +1284,7 @@ void OGRWFSLayer::ResetReading() /* SetIgnoredFields() */ /************************************************************************/ -OGRErr OGRWFSLayer::SetIgnoredFields(const char **papszFields) +OGRErr OGRWFSLayer::SetIgnoredFields(CSLConstList papszFields) { bReloadNeeded = true; ResetReading(); From 3312a9954534af87b765c75b213cd27e48a6cc82 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 12:31:44 +0200 Subject: [PATCH 009/230] OSM: properly escape special characters with TAGS_FORMAT=JSON open option Fixes https://github.com/OSGeo/gdal/issues/9673 --- autotest/ogr/data/osm/test_json.osm | 7 ++ autotest/ogr/data/osm/test_json.pbf | Bin 0 -> 204 bytes autotest/ogr/ogr_osm.py | 18 +++++ ogr/ogrsf_frmts/osm/ogr_osm.h | 3 +- ogr/ogrsf_frmts/osm/ogrosmlayer.cpp | 119 +++++++++++++++------------- 5 files changed, 90 insertions(+), 57 deletions(-) create mode 100644 autotest/ogr/data/osm/test_json.osm create mode 100644 autotest/ogr/data/osm/test_json.pbf diff --git a/autotest/ogr/data/osm/test_json.osm b/autotest/ogr/data/osm/test_json.osm new file mode 100644 index 000000000000..f754e1295aa3 --- /dev/null +++ b/autotest/ogr/data/osm/test_json.osm @@ -0,0 +1,7 @@ +<?xml version="1.0" encoding="UTF-8"?> +<osm version="0.6" generator="hand"> + <bounds minlat="49" minlon="2" maxlat="50" maxlon="3"/> + <node id="3" lat="49.5" lon="3" user="some_user" uid="1" version="1" changeset="1" timestamp="2012-07-10T00:00:00Z"> + <tag k="foo" v="x'\" y"/> + </node> +</osm> diff --git a/autotest/ogr/data/osm/test_json.pbf b/autotest/ogr/data/osm/test_json.pbf new file mode 100644 index 0000000000000000000000000000000000000000..4f94bbfe12aa62331bcfe658d215fda213d2a8f6 GIT binary patch literal 204 zcmV;-05ksp000dN2~Sf^NM&JUWpWr)5J(zOc%0*s;%K<A<};r_!;hx5ViFA(ZXf=_ zq|mUUwd*aD5}$u@Zg6r&YHp%#n1P;|5|>MAUU8~leoAU_6C-baac*X5uD+q3ArJuo zDTNx10000B3I|V9O+;aIVHj8tOd3>poa2(=V&LK|&d*JaFD*_j;$lwA&*$Q(P>)gK z<l?Oq(h<_)Vr0`2=3-<NVr@8d`#+}?Ba;#%lNKYB6KlisjvLHgEDe3<WP+HPm>B?v G(Gg#s8CI$Q literal 0 HcmV?d00001 diff --git a/autotest/ogr/ogr_osm.py b/autotest/ogr/ogr_osm.py index 41c84e298606..f50d66b04a68 100755 --- a/autotest/ogr/ogr_osm.py +++ b/autotest/ogr/ogr_osm.py @@ -911,3 +911,21 @@ def test_ogr_osm_tags_json(): assert lyr_defn.GetFieldDefn(other_tags_idx).GetSubType() == ogr.OFSTJSON f = lyr.GetNextFeature() assert f["other_tags"] == '{"foo":"bar"}' + + +############################################################################### +# Test TAGS_FORMAT=JSON + + +def test_ogr_osm_tags_json_special_characters(): + + ds = gdal.OpenEx("data/osm/test_json.pbf", open_options=["TAGS_FORMAT=JSON"]) + + lyr = ds.GetLayerByName("points") + lyr_defn = lyr.GetLayerDefn() + other_tags_idx = lyr_defn.GetFieldIndex("other_tags") + assert other_tags_idx >= 0 + assert lyr_defn.GetFieldDefn(other_tags_idx).GetType() == ogr.OFTString + assert lyr_defn.GetFieldDefn(other_tags_idx).GetSubType() == ogr.OFSTJSON + f = lyr.GetNextFeature() + assert f["other_tags"] == """{"foo":"x'\\\\\\"\\t\\n\\ry"}""" diff --git a/ogr/ogrsf_frmts/osm/ogr_osm.h b/ogr/ogrsf_frmts/osm/ogr_osm.h index 029271381631..4ffe52d4b343 100644 --- a/ogr/ogrsf_frmts/osm/ogr_osm.h +++ b/ogr/ogrsf_frmts/osm/ogr_osm.h @@ -124,8 +124,7 @@ class OGROSMLayer final : public OGRLayer bool m_bHasWarnedTooManyFeatures = false; - char *m_pszAllTags = nullptr; - bool m_bHasWarnedAllTagsTruncated = false; + std::string m_osAllTagsBuffer{}; bool m_bUserInterested = true; diff --git a/ogr/ogrsf_frmts/osm/ogrosmlayer.cpp b/ogr/ogrsf_frmts/osm/ogrosmlayer.cpp index f54bdddca306..e43e09e30a48 100644 --- a/ogr/ogrsf_frmts/osm/ogrosmlayer.cpp +++ b/ogr/ogrsf_frmts/osm/ogrosmlayer.cpp @@ -59,8 +59,6 @@ constexpr int SWITCH_THRESHOLD = 10000; constexpr int MAX_THRESHOLD = 100000; -constexpr int ALLTAGS_LENGTH = 8192; - /************************************************************************/ /* OGROSMLayer() */ /************************************************************************/ @@ -69,8 +67,7 @@ OGROSMLayer::OGROSMLayer(OGROSMDataSource *poDSIn, int nIdxLayerIn, const char *pszName) : m_poDS(poDSIn), m_nIdxLayer(nIdxLayerIn), m_poFeatureDefn(new OGRFeatureDefn(pszName)), - m_poSRS(new OGRSpatialReference()), - m_pszAllTags(static_cast<char *>(CPLMalloc(ALLTAGS_LENGTH))) + m_poSRS(new OGRSpatialReference()) { SetDescription(m_poFeatureDefn->GetName()); m_poFeatureDefn->Reference(); @@ -113,8 +110,6 @@ OGROSMLayer::~OGROSMLayer() sqlite3_finalize(m_oComputedAttributes[i].hStmt); } - CPLFree(m_pszAllTags); - CPLFree(m_papoFeatures); } @@ -520,25 +515,61 @@ int OGROSMLayer::AddInOtherOrAllTags(const char *pszK) } /************************************************************************/ -/* OGROSMEscapeString() */ +/* OGROSMEscapeStringHSTORE() */ /************************************************************************/ -static int OGROSMEscapeString(const char *pszV, char *pszAllTags) +static void OGROSMEscapeStringHSTORE(const char *pszV, std::string &sOut) { - int nAllTagsOff = 0; - - pszAllTags[nAllTagsOff++] = '"'; + sOut += '"'; for (int k = 0; pszV[k] != '\0'; k++) { if (pszV[k] == '"' || pszV[k] == '\\') - pszAllTags[nAllTagsOff++] = '\\'; - pszAllTags[nAllTagsOff++] = pszV[k]; + sOut += '\\'; + sOut += pszV[k]; } - pszAllTags[nAllTagsOff++] = '"'; + sOut += '"'; +} + +/************************************************************************/ +/* OGROSMEscapeStringJSON() */ +/************************************************************************/ + +static void OGROSMEscapeStringJSON(const char *pszV, std::string &sOut) +{ + sOut += '"'; - return nAllTagsOff; + for (int k = 0; pszV[k] != '\0'; k++) + { + const char ch = pszV[k]; + switch (ch) + { + case '"': + sOut += "\\\""; + break; + case '\\': + sOut += "\\\\"; + break; + case '\n': + sOut += "\\n"; + break; + case '\r': + sOut += "\\r"; + break; + case '\t': + sOut += "\\t"; + break; + default: + if (static_cast<unsigned char>(ch) < ' ') + sOut += CPLSPrintf("\\u%04X", ch); + else + sOut += ch; + break; + } + } + + sOut += '"'; } /************************************************************************/ @@ -627,7 +658,7 @@ void OGROSMLayer::SetFieldsFromTags(OGRFeature *poFeature, GIntBig nID, poFeature->SetField("osm_changeset", (int)psInfo->nChangeset); } - int nAllTagsOff = 0; + m_osAllTagsBuffer.clear(); for (unsigned int j = 0; j < nTags; j++) { const char *pszK = pasTags[j].pszK; @@ -643,48 +674,27 @@ void OGROSMLayer::SetFieldsFromTags(OGRFeature *poFeature, GIntBig nID, { if (AddInOtherOrAllTags(pszK)) { - const int nLenK = static_cast<int>(strlen(pszK)); - const int nLenV = static_cast<int>(strlen(pszV)); - const int nLenKEscaped = 1 + 2 * nLenK + 1; - const int nLenVEscaped = 1 + 2 * nLenV + 1; - // 3 is either for - // - HSTORE: ',' separator and '=>' - // - JSON: leading '{' or ',', ':' and closing '}' - if (nAllTagsOff + nLenKEscaped + nLenVEscaped + 3 >= - ALLTAGS_LENGTH - 1) - { - if (!m_bHasWarnedAllTagsTruncated) - CPLDebug("OSM", - "all_tags/other_tags field truncated for " - "feature " CPL_FRMT_GIB, - nID); - m_bHasWarnedAllTagsTruncated = true; - continue; - } - if (m_poDS->m_bTagsAsHSTORE) { - if (nAllTagsOff) - m_pszAllTags[nAllTagsOff++] = ','; + if (!m_osAllTagsBuffer.empty()) + m_osAllTagsBuffer += ','; - nAllTagsOff += - OGROSMEscapeString(pszK, m_pszAllTags + nAllTagsOff); + OGROSMEscapeStringHSTORE(pszK, m_osAllTagsBuffer); - m_pszAllTags[nAllTagsOff++] = '='; - m_pszAllTags[nAllTagsOff++] = '>'; + m_osAllTagsBuffer += '='; + m_osAllTagsBuffer += '>'; - nAllTagsOff += - OGROSMEscapeString(pszV, m_pszAllTags + nAllTagsOff); + OGROSMEscapeStringHSTORE(pszV, m_osAllTagsBuffer); } else { - m_pszAllTags[nAllTagsOff] = nAllTagsOff ? ',' : '{'; - nAllTagsOff++; - nAllTagsOff += - OGROSMEscapeString(pszK, m_pszAllTags + nAllTagsOff); - m_pszAllTags[nAllTagsOff++] = ':'; - nAllTagsOff += - OGROSMEscapeString(pszV, m_pszAllTags + nAllTagsOff); + if (!m_osAllTagsBuffer.empty()) + m_osAllTagsBuffer += ','; + else + m_osAllTagsBuffer = '{'; + OGROSMEscapeStringJSON(pszK, m_osAllTagsBuffer); + m_osAllTagsBuffer += ':'; + OGROSMEscapeStringJSON(pszV, m_osAllTagsBuffer); } } @@ -698,17 +708,16 @@ void OGROSMLayer::SetFieldsFromTags(OGRFeature *poFeature, GIntBig nID, } } - if (nAllTagsOff) + if (!m_osAllTagsBuffer.empty()) { if (!m_poDS->m_bTagsAsHSTORE) { - m_pszAllTags[nAllTagsOff++] = '}'; + m_osAllTagsBuffer += '}'; } - m_pszAllTags[nAllTagsOff] = '\0'; if (m_nIndexAllTags >= 0) - poFeature->SetField(m_nIndexAllTags, m_pszAllTags); + poFeature->SetField(m_nIndexAllTags, m_osAllTagsBuffer.c_str()); else - poFeature->SetField(m_nIndexOtherTags, m_pszAllTags); + poFeature->SetField(m_nIndexOtherTags, m_osAllTagsBuffer.c_str()); } for (size_t i = 0; i < m_oComputedAttributes.size(); i++) From 1c8185184ae37f7f68c3dec485ca73406c43c1f1 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 16:17:22 +0200 Subject: [PATCH 010/230] [Lint] Rename MultiPerform() to VSICURLMultiPerform() --- port/cpl_vsil_curl.cpp | 22 +++++++++++----------- port/cpl_vsil_curl_class.h | 2 +- port/cpl_vsil_s3.cpp | 2 +- port/cpl_vsil_swift.cpp | 2 +- port/cpl_vsil_webhdfs.cpp | 16 ++++++++-------- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/port/cpl_vsil_curl.cpp b/port/cpl_vsil_curl.cpp index 8b172349664c..34da518cd63f 100644 --- a/port/cpl_vsil_curl.cpp +++ b/port/cpl_vsil_curl.cpp @@ -826,10 +826,10 @@ static GIntBig VSICurlGetExpiresFromS3LikeSignedURL(const char *pszURL) } /************************************************************************/ -/* MultiPerform() */ +/* VSICURLMultiPerform() */ /************************************************************************/ -void MultiPerform(CURLM *hCurlMultiHandle, CURL *hEasyHandle) +void VSICURLMultiPerform(CURLM *hCurlMultiHandle, CURL *hEasyHandle) { int repeats = 0; @@ -1146,7 +1146,7 @@ vsi_l_offset VSICurlHandle::GetFileSizeOrHeaders(bool bSetError, unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_FILETIME, 1); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); @@ -1863,7 +1863,7 @@ std::string VSICurlHandle::DownloadRegion(const vsi_l_offset startOffset, unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_FILETIME, 1); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); @@ -2493,7 +2493,7 @@ int VSICurlHandle::ReadMultiRange(int const nRanges, void **const ppData, if (!aHandles.empty()) { - MultiPerform(hMultiHandle); + VSICURLMultiPerform(hMultiHandle); } int nRet = 0; @@ -2700,7 +2700,7 @@ int VSICurlHandle::ReadMultiRangeSingleGet(int const nRanges, headers = VSICurlMergeHeaders(headers, GetCurlHeaders("GET", headers)); unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_HTTPHEADER, headers); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); @@ -3096,7 +3096,7 @@ size_t VSICurlHandle::PRead(void *pBuffer, size_t nSize, CURLM *hMultiHandle = poFS->GetCurlMultiHandleFor(osURL); curl_multi_add_handle(hMultiHandle, hCurlHandle); - MultiPerform(hMultiHandle); + VSICURLMultiPerform(hMultiHandle); { std::lock_guard<std::mutex> oLock(m_oMutex); @@ -4814,7 +4814,7 @@ char **VSICurlFilesystemHandlerBase::GetFileList(const char *pszDirname, unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_HTTPHEADER, headers); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); curl_slist_free_all(headers); @@ -4973,7 +4973,7 @@ char **VSICurlFilesystemHandlerBase::GetFileList(const char *pszDirname, unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_HTTPHEADER, headers); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); curl_slist_free_all(headers); @@ -5546,8 +5546,8 @@ long CurlRequestHelper::perform(CURL *hCurlHandle, struct curl_slist *headers, szCurlErrBuf[0] = '\0'; unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_ERRORBUFFER, szCurlErrBuf); - MultiPerform(poFS->GetCurlMultiHandleFor(poS3HandleHelper->GetURL()), - hCurlHandle); + VSICURLMultiPerform(poFS->GetCurlMultiHandleFor(poS3HandleHelper->GetURL()), + hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); diff --git a/port/cpl_vsil_curl_class.h b/port/cpl_vsil_curl_class.h index 45c12ae0e612..99c9fda4d531 100644 --- a/port/cpl_vsil_curl_class.h +++ b/port/cpl_vsil_curl_class.h @@ -997,7 +997,7 @@ void VSICURLInitWriteFuncStruct(WriteFuncStruct *psStruct, VSILFILE *fp, void *pReadCbkUserData); size_t VSICurlHandleWriteFunc(void *buffer, size_t count, size_t nmemb, void *req); -void MultiPerform(CURLM *hCurlMultiHandle, CURL *hEasyHandle = nullptr); +void VSICURLMultiPerform(CURLM *hCurlMultiHandle, CURL *hEasyHandle = nullptr); void VSICURLResetHeaderAndWriterFunctions(CURL *hCurlHandle); int VSICurlParseUnixPermissions(const char *pszPermissions); diff --git a/port/cpl_vsil_s3.cpp b/port/cpl_vsil_s3.cpp index 810468294ef5..890abe0438ba 100644 --- a/port/cpl_vsil_s3.cpp +++ b/port/cpl_vsil_s3.cpp @@ -1379,7 +1379,7 @@ int VSIS3WriteHandle::FinishChunkedTransfer() m_nChunkedBufferOff = 0; m_nChunkedBufferSize = 0; - MultiPerform(m_hCurlMulti); + VSICURLMultiPerform(m_hCurlMulti); long response_code; curl_easy_getinfo(m_hCurl, CURLINFO_RESPONSE_CODE, &response_code); diff --git a/port/cpl_vsil_swift.cpp b/port/cpl_vsil_swift.cpp index bccc6c15ba88..359265fc67a2 100644 --- a/port/cpl_vsil_swift.cpp +++ b/port/cpl_vsil_swift.cpp @@ -613,7 +613,7 @@ char **VSISwiftFSHandler::GetFileList(const char *pszDirname, int nMaxFiles, unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_HTTPHEADER, headers); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); diff --git a/port/cpl_vsil_webhdfs.cpp b/port/cpl_vsil_webhdfs.cpp index 0ecc7468f071..9ce840a0c15f 100644 --- a/port/cpl_vsil_webhdfs.cpp +++ b/port/cpl_vsil_webhdfs.cpp @@ -353,7 +353,7 @@ bool VSIWebHDFSWriteHandle::CreateFile() unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_WRITEFUNCTION, VSICurlHandleWriteFunc); - MultiPerform(m_poFS->GetCurlMultiHandleFor(m_osURL), hCurlHandle); + VSICURLMultiPerform(m_poFS->GetCurlMultiHandleFor(m_osURL), hCurlHandle); curl_slist_free_all(headers); @@ -430,7 +430,7 @@ bool VSIWebHDFSWriteHandle::Append() unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_WRITEFUNCTION, VSICurlHandleWriteFunc); - MultiPerform(m_poFS->GetCurlMultiHandleFor(m_osURL), hCurlHandle); + VSICURLMultiPerform(m_poFS->GetCurlMultiHandleFor(m_osURL), hCurlHandle); curl_slist_free_all(headers); @@ -489,7 +489,7 @@ bool VSIWebHDFSWriteHandle::Append() unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_WRITEFUNCTION, VSICurlHandleWriteFunc); - MultiPerform(m_poFS->GetCurlMultiHandleFor(m_osURL), hCurlHandle); + VSICURLMultiPerform(m_poFS->GetCurlMultiHandleFor(m_osURL), hCurlHandle); curl_slist_free_all(headers); @@ -619,7 +619,7 @@ char **VSIWebHDFSFSHandler::GetFileList(const char *pszDirname, unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_HTTPHEADER, headers); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); @@ -724,7 +724,7 @@ int VSIWebHDFSFSHandler::Unlink(const char *pszFilename) unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_HTTPHEADER, headers); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); @@ -847,7 +847,7 @@ int VSIWebHDFSFSHandler::Mkdir(const char *pszDirname, long nMode) unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_HTTPHEADER, headers); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); @@ -961,7 +961,7 @@ vsi_l_offset VSIWebHDFSHandle::GetFileSize(bool bSetError) szCurlErrBuf[0] = '\0'; unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_ERRORBUFFER, szCurlErrBuf); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); @@ -1091,7 +1091,7 @@ std::string VSIWebHDFSHandle::DownloadRegion(const vsi_l_offset startOffset, unchecked_curl_easy_setopt(hCurlHandle, CURLOPT_HTTPHEADER, headers); - MultiPerform(hCurlMultiHandle, hCurlHandle); + VSICURLMultiPerform(hCurlMultiHandle, hCurlHandle); VSICURLResetHeaderAndWriterFunctions(hCurlHandle); From 0c800131f63938661c83bd58aa03c71f940a1adf Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 16:27:05 +0200 Subject: [PATCH 011/230] Add a VSICURLMultiCleanup() that runs with SIGPIPE ignored (fixes #9677) --- port/cpl_http.cpp | 7 ++++--- port/cpl_vsil_curl.cpp | 15 +++++++++++++-- port/cpl_vsil_curl_class.h | 2 ++ port/cpl_vsil_s3.cpp | 2 +- 4 files changed, 20 insertions(+), 6 deletions(-) diff --git a/port/cpl_http.cpp b/port/cpl_http.cpp index 787bb3d4704d..09f4ecc599ea 100644 --- a/port/cpl_http.cpp +++ b/port/cpl_http.cpp @@ -43,6 +43,7 @@ #include "cpl_http.h" #include "cpl_error.h" #include "cpl_multiproc.h" +#include "cpl_vsil_curl_class.h" // gcc or clang complains about C-style cast in #define like // CURL_ZERO_TERMINATED @@ -1630,7 +1631,7 @@ CPLHTTPResult **CPLHTTPMultiFetch(const char *const *papszURL, int nURLCount, auto oIter = poSessionMultiMap->find(osSessionName); if (oIter != poSessionMultiMap->end()) { - curl_multi_cleanup(oIter->second); + VSICURLMultiCleanup(oIter->second); poSessionMultiMap->erase(oIter); if (poSessionMultiMap->empty()) { @@ -1806,7 +1807,7 @@ CPLHTTPResult **CPLHTTPMultiFetch(const char *const *papszURL, int nURLCount, } if (!pszPersistent) - curl_multi_cleanup(hCurlMultiHandle); + VSICURLMultiCleanup(hCurlMultiHandle); for (size_t i = 0; i < aHeaders.size(); i++) curl_slist_free_all(aHeaders[i]); @@ -2557,7 +2558,7 @@ void CPLHTTPCleanup() { for (auto &kv : *poSessionMultiMap) { - curl_multi_cleanup(kv.second); + VSICURLMultiCleanup(kv.second); } delete poSessionMultiMap; poSessionMultiMap = nullptr; diff --git a/port/cpl_vsil_curl.cpp b/port/cpl_vsil_curl.cpp index 34da518cd63f..2ce2240a77de 100644 --- a/port/cpl_vsil_curl.cpp +++ b/port/cpl_vsil_curl.cpp @@ -3465,7 +3465,7 @@ void VSICurlHandle::AdviseRead(int nRanges, const vsi_l_offset *panOffsets, NetworkStatisticsLogger::LogGET(nTotalDownloaded); - curl_multi_cleanup(hMultiHandle); + VSICURLMultiCleanup(hMultiHandle); }; m_oThreadAdviseRead = std::thread(task, l_osURL); } @@ -3590,7 +3590,7 @@ void CachedConnection::clear() { if (hCurlMultiHandle) { - curl_multi_cleanup(hCurlMultiHandle); + VSICURLMultiCleanup(hCurlMultiHandle); hCurlMultiHandle = nullptr; } } @@ -5923,6 +5923,17 @@ void VSICURLDestroyCacheFileProp() } /* end of namespace cpl */ +/************************************************************************/ +/* VSICURLMultiCleanup() */ +/************************************************************************/ + +void VSICURLMultiCleanup(CURLM *hCurlMultiHandle) +{ + void *old_handler = CPLHTTPIgnoreSigPipe(); + curl_multi_cleanup(hCurlMultiHandle); + CPLHTTPRestoreSigPipeHandler(old_handler); +} + /************************************************************************/ /* VSICurlInstallReadCbk() */ /************************************************************************/ diff --git a/port/cpl_vsil_curl_class.h b/port/cpl_vsil_curl_class.h index 99c9fda4d531..75b3e84fb033 100644 --- a/port/cpl_vsil_curl_class.h +++ b/port/cpl_vsil_curl_class.h @@ -1011,6 +1011,8 @@ void VSICURLDestroyCacheFileProp(); } // namespace cpl +void VSICURLMultiCleanup(CURLM *hCurlMultiHandle); + //! @endcond #endif // HAVE_CURL diff --git a/port/cpl_vsil_s3.cpp b/port/cpl_vsil_s3.cpp index 890abe0438ba..93f8c7fa0d7d 100644 --- a/port/cpl_vsil_s3.cpp +++ b/port/cpl_vsil_s3.cpp @@ -802,7 +802,7 @@ VSIS3WriteHandle::~VSIS3WriteHandle() curl_multi_remove_handle(m_hCurlMulti, m_hCurl); curl_easy_cleanup(m_hCurl); } - curl_multi_cleanup(m_hCurlMulti); + VSICURLMultiCleanup(m_hCurlMulti); } CPLFree(m_sWriteFuncHeaderData.pBuffer); } From 32285e973576490d600f06c982c7f0ae75da48fb Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 16:33:05 +0200 Subject: [PATCH 012/230] Move VSICURLxxxx functions outside of the cpl namespace, as already namespaces by their prefix --- port/cpl_vsil.cpp | 2 +- port/cpl_vsil_curl.cpp | 35 +++++++++++++++++++++-------------- port/cpl_vsil_curl_class.h | 10 +++++----- 3 files changed, 27 insertions(+), 20 deletions(-) diff --git a/port/cpl_vsil.cpp b/port/cpl_vsil.cpp index 54c2c8d3da79..f2390f10fd6b 100644 --- a/port/cpl_vsil.cpp +++ b/port/cpl_vsil.cpp @@ -3358,7 +3358,7 @@ void VSICleanupFileManager() } #ifdef HAVE_CURL - cpl::VSICURLDestroyCacheFileProp(); + VSICURLDestroyCacheFileProp(); #endif } diff --git a/port/cpl_vsil_curl.cpp b/port/cpl_vsil_curl.cpp index 2ce2240a77de..c8d6640a7100 100644 --- a/port/cpl_vsil_curl.cpp +++ b/port/cpl_vsil_curl.cpp @@ -136,9 +136,6 @@ void VSICurlAuthParametersChanged() gnGenerationAuthParameters++; } -namespace cpl -{ - // Do not access those variables directly ! // Use VSICURLGetDownloadChunkSize() and GetMaxRegions() static int N_MAX_REGIONS_DO_NOT_USE_DIRECTLY = 0; @@ -444,6 +441,9 @@ static std::string VSICurlGetURLFromFilename( return pszFilename; } +namespace cpl +{ + /************************************************************************/ /* VSICurlHandle() */ /************************************************************************/ @@ -570,6 +570,8 @@ int VSICurlHandle::Seek(vsi_l_offset nOffset, int nWhence) return 0; } +} // namespace cpl + /************************************************************************/ /* VSICurlGetTimeStampFromRFC822DateTime() */ /************************************************************************/ @@ -621,7 +623,7 @@ static GIntBig VSICurlGetTimeStampFromRFC822DateTime(const char *pszDT) /* VSICURLInitWriteFuncStruct() */ /************************************************************************/ -void VSICURLInitWriteFuncStruct(WriteFuncStruct *psStruct, VSILFILE *fp, +void VSICURLInitWriteFuncStruct(cpl::WriteFuncStruct *psStruct, VSILFILE *fp, VSICurlReadCbkFunc pfnReadCbk, void *pReadCbkUserData) { @@ -651,7 +653,7 @@ void VSICURLInitWriteFuncStruct(WriteFuncStruct *psStruct, VSILFILE *fp, size_t VSICurlHandleWriteFunc(void *buffer, size_t count, size_t nmemb, void *req) { - WriteFuncStruct *psStruct = static_cast<WriteFuncStruct *>(req); + cpl::WriteFuncStruct *psStruct = static_cast<cpl::WriteFuncStruct *>(req); const size_t nSize = count * nmemb; if (psStruct->bInterrupted) @@ -920,6 +922,9 @@ static bool Iso8601ToUnixTime(const char *pszDT, GIntBig *pnUnixTime) return false; } +namespace cpl +{ + /************************************************************************/ /* ManagePlanetaryComputerSigning() */ /************************************************************************/ @@ -5812,6 +5817,8 @@ std::string NetworkStatisticsLogger::GetReportAsSerializedJSON() return oJSON.Format(CPLJSONObject::PrettyFormat::Pretty); } +} /* end of namespace cpl */ + /************************************************************************/ /* VSICurlParseUnixPermissions() */ /************************************************************************/ @@ -5847,19 +5854,19 @@ int VSICurlParseUnixPermissions(const char *pszPermissions) /************************************************************************/ static std::mutex oCacheFilePropMutex; -static lru11::Cache<std::string, FileProp> *poCacheFileProp = nullptr; +static lru11::Cache<std::string, cpl::FileProp> *poCacheFileProp = nullptr; /************************************************************************/ /* VSICURLGetCachedFileProp() */ /************************************************************************/ -bool VSICURLGetCachedFileProp(const char *pszURL, FileProp &oFileProp) +bool VSICURLGetCachedFileProp(const char *pszURL, cpl::FileProp &oFileProp) { std::lock_guard<std::mutex> oLock(oCacheFilePropMutex); return poCacheFileProp != nullptr && poCacheFileProp->tryGet(std::string(pszURL), oFileProp) && // Let a chance to use new auth parameters - !(oFileProp.eExists == EXIST_NO && + !(oFileProp.eExists == cpl::EXIST_NO && gnGenerationAuthParameters != oFileProp.nGenerationAuthParameters); } @@ -5867,11 +5874,12 @@ bool VSICURLGetCachedFileProp(const char *pszURL, FileProp &oFileProp) /* VSICURLSetCachedFileProp() */ /************************************************************************/ -void VSICURLSetCachedFileProp(const char *pszURL, FileProp &oFileProp) +void VSICURLSetCachedFileProp(const char *pszURL, cpl::FileProp &oFileProp) { std::lock_guard<std::mutex> oLock(oCacheFilePropMutex); if (poCacheFileProp == nullptr) - poCacheFileProp = new lru11::Cache<std::string, FileProp>(100 * 1024); + poCacheFileProp = + new lru11::Cache<std::string, cpl::FileProp>(100 * 1024); oFileProp.nGenerationAuthParameters = gnGenerationAuthParameters; poCacheFileProp->insert(std::string(pszURL), oFileProp); } @@ -5898,8 +5906,9 @@ void VSICURLInvalidateCachedFilePropPrefix(const char *pszURL) { std::list<std::string> keysToRemove; const size_t nURLSize = strlen(pszURL); - auto lambda = [&keysToRemove, &pszURL, nURLSize]( - const lru11::KeyValuePair<std::string, FileProp> &kv) + auto lambda = + [&keysToRemove, &pszURL, nURLSize]( + const lru11::KeyValuePair<std::string, cpl::FileProp> &kv) { if (strncmp(kv.key.c_str(), pszURL, nURLSize) == 0) keysToRemove.push_back(kv.key); @@ -5921,8 +5930,6 @@ void VSICURLDestroyCacheFileProp() poCacheFileProp = nullptr; } -} /* end of namespace cpl */ - /************************************************************************/ /* VSICURLMultiCleanup() */ /************************************************************************/ diff --git a/port/cpl_vsil_curl_class.h b/port/cpl_vsil_curl_class.h index 75b3e84fb033..97d1e618a35a 100644 --- a/port/cpl_vsil_curl_class.h +++ b/port/cpl_vsil_curl_class.h @@ -990,9 +990,11 @@ struct NetworkStatisticsAction } }; +} // namespace cpl + int VSICURLGetDownloadChunkSize(); -void VSICURLInitWriteFuncStruct(WriteFuncStruct *psStruct, VSILFILE *fp, +void VSICURLInitWriteFuncStruct(cpl::WriteFuncStruct *psStruct, VSILFILE *fp, VSICurlReadCbkFunc pfnReadCbk, void *pReadCbkUserData); size_t VSICurlHandleWriteFunc(void *buffer, size_t count, size_t nmemb, @@ -1003,14 +1005,12 @@ void VSICURLResetHeaderAndWriterFunctions(CURL *hCurlHandle); int VSICurlParseUnixPermissions(const char *pszPermissions); // Cache of file properties (size, etc.) -bool VSICURLGetCachedFileProp(const char *pszURL, FileProp &oFileProp); -void VSICURLSetCachedFileProp(const char *pszURL, FileProp &oFileProp); +bool VSICURLGetCachedFileProp(const char *pszURL, cpl::FileProp &oFileProp); +void VSICURLSetCachedFileProp(const char *pszURL, cpl::FileProp &oFileProp); void VSICURLInvalidateCachedFileProp(const char *pszURL); void VSICURLInvalidateCachedFilePropPrefix(const char *pszURL); void VSICURLDestroyCacheFileProp(); -} // namespace cpl - void VSICURLMultiCleanup(CURLM *hCurlMultiHandle); //! @endcond From e95ae3fbf031c0067d02b5da15dcd923b35bc30c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 11:22:02 +0200 Subject: [PATCH 013/230] Revert "ogr_gpkg.py: add a xfail for MacOS ARM64" This reverts commit a83942422fd67471aee23ae11c5d06af27db2857. --- autotest/ogr/ogr_gpkg.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/autotest/ogr/ogr_gpkg.py b/autotest/ogr/ogr_gpkg.py index 28ef09e51411..e0ede6f543f2 100755 --- a/autotest/ogr/ogr_gpkg.py +++ b/autotest/ogr/ogr_gpkg.py @@ -8964,13 +8964,7 @@ def test_ogr_gpkg_background_rtree_build( f = sql_lyr.GetNextFeature() assert f.GetField(0) == "ok" with ds.ExecuteSQL("SELECT * FROM rtree_foo_geom") as sql_lyr: - fc = sql_lyr.GetFeatureCount() - if fc != 1000 and gdaltest.is_travis_branch("macos_build_conda"): - # Fails with - # ERROR 1: failed to prepare SQL: INSERT INTO my_rtree VALUES (?,?,?,?,?) - # FAILED ogr/ogr_gpkg.py::test_ogr_gpkg_background_rtree_build[1000-in_memory] - AssertionError: assert 0 == 1000 - pytest.xfail("fails for unknown reason on MacOS ARM64") - assert fc == 1000 + assert sql_lyr.GetFeatureCount() == 1000 foo_lyr = ds.GetLayerByName("foo") for i in range(1000): foo_lyr.SetSpatialFilterRect(10000 + i - 0.5, i - 0.5, 10000 + i + 0.5, i + 0.5) From bbd69807c8c7d9cdaba0d98fd706196f6b271ee3 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 17:10:57 +0200 Subject: [PATCH 014/230] GPKG: threaded RTree: do not VSIUnlink() the temporary database used for the RTree creation --- ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp index f66297252c74..5f688bdbdcd9 100644 --- a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp +++ b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp @@ -2725,8 +2725,6 @@ void OGRGeoPackageTableLayer::StartAsyncRTree() OGRErr eErr = SQLCommand(m_poDS->GetDB(), pszSQL); sqlite3_free(pszSQL); - VSIUnlink(m_osAsyncDBName.c_str()); - if (eErr == OGRERR_NONE) { try @@ -2936,7 +2934,8 @@ void OGRGeoPackageTableLayer::AsyncRTreeThreadFunction() nullptr) != SQLITE_OK) { CPLError(CE_Failure, CPLE_AppDefined, - "failed to prepare SQL: %s", pszInsertSQL); + "failed to prepare SQL: %s: %s", pszInsertSQL, + sqlite3_errmsg(m_hAsyncDBHandle)); m_oQueueRTreeEntries.clear(); m_bErrorDuringRTreeThread = true; return; From 82d44f03f0923367fbdc614c2cb5b2e002137caa Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 19:24:59 +0200 Subject: [PATCH 015/230] GPKG: tune error handling of AsyncRTreeThreadFunction() and test it --- autotest/ogr/ogr_gpkg.py | 23 ++++++++--- .../gpkg/ogrgeopackagetablelayer.cpp | 38 +++++++++++++------ 2 files changed, 43 insertions(+), 18 deletions(-) diff --git a/autotest/ogr/ogr_gpkg.py b/autotest/ogr/ogr_gpkg.py index e0ede6f543f2..30761e28acba 100755 --- a/autotest/ogr/ogr_gpkg.py +++ b/autotest/ogr/ogr_gpkg.py @@ -8907,8 +8907,15 @@ def test_ogr_gpkg_get_geometry_types(tmp_vsimem): @pytest.mark.parametrize("write_to_disk", (True, False), ids=["on_disk", "in_memory"]) @pytest.mark.parametrize("OGR_GPKG_MAX_RAM_USAGE_RTREE", (1, 1000, None)) +@pytest.mark.parametrize( + "OGR_GPKG_SIMULATE_INSERT_INTO_MY_RTREE_PREPARATION_ERROR", (True, False) +) def test_ogr_gpkg_background_rtree_build( - tmp_path, tmp_vsimem, write_to_disk, OGR_GPKG_MAX_RAM_USAGE_RTREE + tmp_path, + tmp_vsimem, + write_to_disk, + OGR_GPKG_MAX_RAM_USAGE_RTREE, + OGR_GPKG_SIMULATE_INSERT_INTO_MY_RTREE_PREPARATION_ERROR, ): if write_to_disk: @@ -8918,13 +8925,17 @@ def test_ogr_gpkg_background_rtree_build( # Batch insertion only gdal.ErrorReset() - with gdaltest.config_option( - "OGR_GPKG_MAX_RAM_USAGE_RTREE", + + options = {} + options["OGR_GPKG_MAX_RAM_USAGE_RTREE"] = ( str(OGR_GPKG_MAX_RAM_USAGE_RTREE) if OGR_GPKG_MAX_RAM_USAGE_RTREE is not None - else None, - thread_local=False, - ): + else None + ) + options["OGR_GPKG_SIMULATE_INSERT_INTO_MY_RTREE_PREPARATION_ERROR"] = ( + "TRUE" if OGR_GPKG_SIMULATE_INSERT_INTO_MY_RTREE_PREPARATION_ERROR else None + ) + with gdaltest.config_options(options, thread_local=False): ds = gdaltest.gpkg_dr.CreateDataSource(filename) with gdaltest.config_option("OGR_GPKG_THREADED_RTREE_AT_FIRST_FEATURE", "YES"): lyr = ds.CreateLayer("foo") diff --git a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp index 5f688bdbdcd9..489fca4c40f2 100644 --- a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp +++ b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp @@ -2765,13 +2765,16 @@ void OGRGeoPackageTableLayer::StartAsyncRTree() void OGRGeoPackageTableLayer::RemoveAsyncRTreeTempDB() { - SQLCommand( - m_poDS->GetDB(), - CPLSPrintf("DETACH DATABASE \"%s\"", - SQLEscapeName(m_osAsyncDBAttachName.c_str()).c_str())); - m_osAsyncDBAttachName.clear(); - VSIUnlink(m_osAsyncDBName.c_str()); - m_osAsyncDBName.clear(); + if (!m_osAsyncDBAttachName.empty()) + { + SQLCommand( + m_poDS->GetDB(), + CPLSPrintf("DETACH DATABASE \"%s\"", + SQLEscapeName(m_osAsyncDBAttachName.c_str()).c_str())); + m_osAsyncDBAttachName.clear(); + VSIUnlink(m_osAsyncDBName.c_str()); + m_osAsyncDBName.clear(); + } } /************************************************************************/ @@ -2847,8 +2850,6 @@ bool OGRGeoPackageTableLayer::FlushInMemoryRTree(sqlite3 *hRTreeDB, m_hAsyncDBHandle = nullptr; } - VSIUnlink(m_osAsyncDBName.c_str()); - m_oQueueRTreeEntries.clear(); } sqlite3_free(pszErrMsg); @@ -2929,15 +2930,24 @@ void OGRGeoPackageTableLayer::AsyncRTreeThreadFunction() if (hStmt == nullptr) { const char *pszInsertSQL = - "INSERT INTO my_rtree VALUES (?,?,?,?,?)"; + CPLGetConfigOption( + "OGR_GPKG_SIMULATE_INSERT_INTO_MY_RTREE_PREPARATION_ERROR", + nullptr) + ? "INSERT INTO my_rtree_SIMULATE_ERROR VALUES (?,?,?,?,?)" + : "INSERT INTO my_rtree VALUES (?,?,?,?,?)"; if (sqlite3_prepare_v2(m_hAsyncDBHandle, pszInsertSQL, -1, &hStmt, nullptr) != SQLITE_OK) { CPLError(CE_Failure, CPLE_AppDefined, "failed to prepare SQL: %s: %s", pszInsertSQL, sqlite3_errmsg(m_hAsyncDBHandle)); - m_oQueueRTreeEntries.clear(); + m_bErrorDuringRTreeThread = true; + + sqlite3_close(m_hAsyncDBHandle); + m_hAsyncDBHandle = nullptr; + + m_oQueueRTreeEntries.clear(); return; } @@ -4455,7 +4465,11 @@ bool OGRGeoPackageTableLayer::CreateSpatialIndex(const char *pszTableName) sqlite3_close(m_hAsyncDBHandle); m_hAsyncDBHandle = nullptr; } - if (!m_bErrorDuringRTreeThread) + if (m_bErrorDuringRTreeThread) + { + RemoveAsyncRTreeTempDB(); + } + else { bPopulateFromThreadRTree = true; } From fc81e0f25f278883356f38caecab3a83db5e153c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 20:18:13 +0200 Subject: [PATCH 016/230] GPKG: change default value of OGR_GPKG_ALLOW_THREADED_RTREE to NO on OSX Arm64 --- ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp index 489fca4c40f2..5bc10e338233 100644 --- a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp +++ b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp @@ -2644,8 +2644,19 @@ void OGRGeoPackageTableLayer::SetDeferredSpatialIndexCreation(bool bFlag) m_bAllowedRTreeThread = m_poDS->GetLayerCount() == 0 && sqlite3_threadsafe() != 0 && CPLGetNumCPUs() >= 2 && - CPLTestBool( - CPLGetConfigOption("OGR_GPKG_ALLOW_THREADED_RTREE", "YES")); + CPLTestBool(CPLGetConfigOption("OGR_GPKG_ALLOW_THREADED_RTREE", + // For a not yet understood reason, threaded RTree building + // (randomly?) fails on OSX Arm64. This may not be at all specific + // to that platform, but a more general problem, but it can't be + // reproduced elsewhere. + // Cf https://gis.stackexchange.com/questions/479958/how-to-fix-failed-to-prepare-sql-error-when-creating-gpkg-file-from-osm-extrac/479964#479964 + // and random (frequent) failures on GDAL CI (https://github.com/OSGeo/gdal/commit/a83942422fd67471aee23ae11c5d06af27db2857) +#if defined(__arm64__) && defined(__APPLE__) + "NO" +#else + "YES" +#endif + )); // For unit tests if (CPLTestBool(CPLGetConfigOption( From cc65a48cd85c7804ae9ed1ec8aa0c7c142676b9d Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 17 Apr 2024 00:20:10 +0200 Subject: [PATCH 017/230] Parquet: make it recognize bbox field from Overture Maps 2024-04-16-beta.0 --- ..._to_overturemaps_2024-04-16-beta.0.parquet | Bin 0 -> 10479 bytes autotest/ogr/ogr_parquet.py | 49 ++++++++++++++++- .../arrow_common/ograrrowwriterlayer.hpp | 5 +- ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp | 52 +++++++++++++++++- .../parquet/ogrparquetwriterlayer.cpp | 4 +- 5 files changed, 106 insertions(+), 4 deletions(-) create mode 100644 autotest/ogr/data/parquet/bbox_similar_to_overturemaps_2024-04-16-beta.0.parquet diff --git a/autotest/ogr/data/parquet/bbox_similar_to_overturemaps_2024-04-16-beta.0.parquet b/autotest/ogr/data/parquet/bbox_similar_to_overturemaps_2024-04-16-beta.0.parquet new file mode 100644 index 0000000000000000000000000000000000000000..ef1d768a151e9f32b6e8fce366a64947ce2ed1fa GIT binary patch literal 10479 zcmcIq32<9yo&RJ%+ftjfZi$Q&()QscZVW!8r^AuAeU)VEj3ryLE!iTe{d7cnwyx-~ zEq6Pplw}8oKwVg1N|#Z~F|-smZ8<uyfZD>&zzn-BWxHjkUEFdk$qtaV4Bcs$VSoQ8 z`G|1|FxxkIpT6V&{qN)ZUOZ_F>b0CUu1$?=4`|udj0xXn`+oRWL3YJAe)#~q@~)2W zGyYSbe-C399=U?GU;faKSZ7P6!lct5{_*dh%@<x_Y%<}~GA*xtTR$*#YTLE#hr8Pp zfbKR0q>CNBXv`SvyrBi{0TtKQDNHYa9R1!qTMlclIA~(+od>%)ThL+CwsalndUs3L z`&)WHpz2j$3hBXq$icKN3{59~7{j=x+7@h*`yA7#*u$C*j(vW=ntcK919<Pl`&qo7 z+^@djGuTGY{f!^Ob+KUThnp+i>Q6R%5O0rJEbbxXvu%g9TvNz)hiz)!Gp307OiS0N zTYA6A^*+*ixO)$5uR~)`nJ=icRn?|7SNqlA#9-1H4Mx5G;66?;pwSzz*~iRyO?U-N z=q%cOoWXo>-!Y@^*p3~B-3l;6*akMi(B0@6%$B`{{fg$Wc3)H2j>(|YF}7*c)~=7X z^w!nAwT{EDgK$sDesh!drE4}Nd-b|wJ{4oCRuIQ8JHXmh{R=~$Yit<s?-~fI)Livn zHJx1b@Ar3dAM>{HT=k2ZPW8sw<87VX!XudUkE6G74`6D)-rvd2VAUG)X?Rw`)W&f% z3pb<TZ8RLGy>Irn={SB6-FjCLo2w3V*_Q%@X9iuqZ-QpO2W;=sSh?DH!f{4(2UmNJ z5Zz6Po&j9-yRJ4X$A5)(|BbV)lP&DOz}1gxcy8lP4KL_a!GLPyG~=o_YXaQHmo+wZ z{i|Y|#PRO|+n>0B$So7Dr_t0hMEKB+5YU<fVV}mz>H{jZwRjVz>INXK-AiyrqV`i8 zCO7Qp*1vBD9)6T&&yIqJ8gTO+nim<{->F{skh_fsTXzU56~~zUtt!!`5+#**n+oi4 zw{%dT<UV?WKrbtXOtosT@p>@TvBt5l+1B?eTSb$H-3dq<yMZN1KV<SMoG>HdVE zu8od?=)wi?!(RZiZ5o}LKjR?dTuuUUo~t<4g<ds`rp^;wkrsLZtQuHn1GM%dG*%sE zPRRFiEJ*9GVMJSamFFd{enQhJzPpvP#xn2D+$%oT$~Hz`Xcb#Hv4<1woS<)EUl@E| z#SZm7FSfL**=I(M=sC8ec~H%NQLpLry`Z-81~TMpeW$r9F<XC;)O588I@uVwO}(Hs zW4ZR`{gaBX@Ym34dKj*A1!?x<MiTPJnn|wym}Y_7c$WD67MkjJ3D-ZO;r|H@dj-wH z12nuFz|{|8v$c4vZGq!IgUR~inhRY0OSF97nHE+@hrQi%8&|y@z1maq3mgxh<@g29 zI#>HFde!UYmpJP~5dOgy@yQmkzzKJ#Smx28CylJV??F+|!~hfRt>VL+_!K8hGXHd& z_;8E;sg{wKTf{fDiiTEks#PquiXR3{tN2u__;RcGCXQRUUw(?K{=V-{Zeam*tou9# zuJ&Whu>%;^58YSdsvYQU+;*<anC~I+NQd3gF_`FJzZ)#Jvg`UD6GI(D8$NCQ_Ws-0 zm6~;q^}lbLtHIgW%KOGxegA`8?c3+x%T=$^@M>N?>3Er|znO5}eD0q){<*%Zm=z9c zrI2P9_UqW!(5b!$4Nv;C-goY|9Q!oQl{9&-b{Ea5;QRHzC6K!?XZ;7LxAx>z+Z|l( zK6I;Zoq?IGXVG1_F+>*oI(YdQ!0X?k<_R<lFqpcQ4Qq7lr?h3?Gy<i5=5}tQpy_1R z0YC_vJmbL`J4g}F^&t#@1EW_vu^rx_qqv3?<PFsu(W}MxcOKHJg$+be>+fdRjUJU< zVk5UP@g62V%EafHs8WeL;!V6qB|fSWpI50jHhf1;bNt`+-KMtw4+YW&qL-^*jixG4 z^SE44FMQ-3Kl={H-`n-dWt?>kv(|5*;jL&EuJ;7E+BACgr{Oaj8OZCT`TMEa2VNTv zdM$?5=g=_9G7CRJQ%^!V{#Udx-}fTNS~X|6x&_g?@hU0#(;hv?zXlCie=*y3fvl?I zzBdOBt8GxtO(#gZH_-BZCyDJKVI%^z2qqW4bu;3MWo%saX&~dDK(p~At^PZ~T?b>I zLx;VWM!!b0LBX@}G8*fDp;`DXX!z#a@?7;*pxgLRf@oapd7iU=nU=4kW(Bfdk4{yM zW`lTPcgK+`0~ppQ_tbpA^3fROOb=G8--*K1)<eCQb2TEu3vkPY{sl@^Js4Zt-~jxg zkEpqY#|JvuBx!+RitV3NGwGdeNW$oCyg$;`dF{VxI6g%r`k};)pY%P!u_IvTXMGpA z>LE?1P;X`57<{3Py+wXr9BmhGZx=t+E*?>f4wSH`yJpa?_ua$QKhfpd$Fb8u#B|iH zp9B&rQ`H%Cevb}sMpM7qyxuB4*lK^Wb>s(atSY|LCSKh>BD9OacCpYd-q|ib)Xr)n zKWG<UY8S8W5QPpg*df$Z?BrlV!`ecRsu0}od+6G?sl_oh{7zRt<W|aRwPPS?+kGTp zd!BU8_H7q`buy95Bua&fa@P3|YuB^a_Wt~6?+-8Q+w06Wsh!@e@kj68Fr0V)=TFbM zzxLue_vtg|++Y3qIrlX^=iOJHIPd-_f8PBnJMV4<%x<Q-IfCfsXb%ogJ4So6`@#Oh zT6N>Ba+HfkbF18HM$Rhy{#@1dD^>3u@93?){qP?_yXT4JB3Vw$>)ub|edyG>_i%jO zdo%jCC)d4$=&K^@-j7J@-v7n;4`rcU-5kM?EZd{KW%=yTZdoc9%ksIw-bV%?%Qm82 zXfG|xSME6Pem;KQeQ4>t`v%Q<_g$}@bAKy;-d+CsdH4MT|LuO|{pa0JTlcaHt#)$+ zPeivzd&}}oVYe)+8EJKgUF!PYCv=cy8__PbmzHHFx9<IAcHR4UX5IVMrFHKb-j^5G zy$|615Z+&;*1a{ne`pul+0D@!5#1i`Ez8rU-Lh0JmgN~^?=K9HWgF2hw0p^N@BtI+ zZZF1CiHxMZjA^gXUZ!nn+@OwJhTB+I{|Rcrl9oHt(%rmuFzr4JHc#b8*!W=rpt-GJ zWhS7ZP|&KGR(*tZD<RSBl1FXLN~>G5=yn}Ac=+H4-hlgD?;C!5>iz${vh@CiNwD{g z$VR<yL|W*5BjU67jR=&kf0^hy(5voxrvCtA2gkcFPsXM6NF!{z&!}$ciRIGeOt#oF za!U_=eW2e`C7<XS=?NyJbWeZJiX1PcdPem6{+@gx5tEB@F55F=xV68hagpD#!;>u~ zl8J)ZxN{o-o0s4nkW;0CoK5ZkGv9jat*pBh6|?)5D{tvpP81q+dh~<3K^*||1vw)X zD)S9~!QN(Z#o7)Twq_wmA{)!aiOfh(__&?m3W}U>Ylx5ORH>9Nj+_`eF_bUl76)^M z<cXoiV)4Y#vTks=v9MUoWd|GcgVYDArj91DEfFgv;-kSS%x0wwX>MZ5WjC1l6GObc zAeZD~iuX$;s7*@qu7Vr~U{oq5=9}zXv<Yi*sZ`GF(3LBZiziBQESNZzC_tu!vp{6F zsTJZ`3Df*kxv-q5@VQf<l8{OYaY@eddW*pf@I*Q-=ZiVehi;lgZE~T)>yH^o!^K2K zp3g{&xx&1(DnnCd!|<@lY#20H%>6xbmTZ!kKb4kBr9@Uyt6;GV8U&-oY=rs947mdI zEtL}U#Y(Z1P=qL!qfK3H(O2B6LE@cToNP+T1HW7<$4Njb8Z9K2Wkq*T>$p_HWL$<9 zk~T4u%@yFA{XONZT*5G(NEQ-_o?H9(z?#b@Uk|Nf(4A-#QjsX_!g>-SHk2ooOQ{?z zTLDeS<dh4r@HAN7C=3rbG@)oqVRDO?*YgD&5mULHMwu>1*`lIt-j^uEq*AT`3YQ>l zIi^!c=Oyxv5^U8ovaMaHPLZCR0|qJ=;EIqSDU&ZROX)H(Z`m#KC5aw8rqe^-EfL2z zwfx8E3<!S-9bqS(shE^b@Ta60RKvs5Ur!|6pv7V_>-55~et3&0nYD6hg+eydPMS_B zT}<%g8jZ}gsWlxvTgd6WPp*3jYXLch%$DLB$Q5_K2>XEkFGUY$IC<%q9F+>CL{XN= z4R%MQV@uRsap(Z>>xWB213P5%Q;;^K+J(r!3(>A{Zek)aim{whlN^ZRY$8F$gD)!S zuAeXE^0{O|%BSQQpONx;tPPSl$bpE@8CW|J=Z|AHp3Cq@XO8#r18~n%MTBqyoB3$2 zoQ<Qjkk8H>AD!Y?Wa5`MS{^%VG+pl}7_<JdvwmL6#(7MJX-0VLtmCiwT(*Jc;)Ht? zY^DL+&xhfsMP=VqELWnrzCqqog!6)!Ps(m0AcpD8NvXM*LwcFtN`r{Y!C@2XaH3e6 zPj3cT&&YsfP=}hV=xhhH83hy5<fl8K1<YPLe<6M8bnwe2FYh!CA2aApgC>JsF!dXU z%>w*(5WQo1v(YrTIcqZOF?+JV2N@-iEp4Xj81#xf6ieq~ODl4bqGGc;&r?aD1l2Q{ zOIMOez$caZzt0v7PJ}m)C$(#al*e}5X0rwG(u2OyW>4CjHe1p*npEcO_KVh6XwtE@ z?hkkxW3&WsPi~E+$=RWy6$Ob8D>fT#b;Z-sl`(@I{Y1qpL}tBfQrJ@VEaqh2lyPNz z(I$_NdRJnZbZKtZYnm1O%Tafz6qP+@PX>DheLC*;nmqD~JSSL+@vy!et9Z=7m5qcg zrO0fsl9*D)>1f8k9C4+~5v+UC@$rD{DS9&gN(0`r6chZpNLZgD+LAk{i@AN~*-=Yu z++eRng<Lsm@MbYyn-fy$XxO=izTg`*8t@DLET^R`c#}=dbw{}|290dc6m!kU6S6lz zaFMWI7YPT)X3{>Atvnajr-|mY-tP`9`UH=2W!dBKR;1ZrJ|-lS(;26ZbTSuSNd^tx zd}NgP_NHP&=+vCSyXINU4|_7ER5Yy6n4HnYM}2E^gnMajHmHv_<kux;?FyZs5P}{; z!0Dfggd^!#CRCmiLatcGS&B>%?rmJLOb~QUi;*e&bbOZVvy_YqbHqn_ZZ?n%WWD*g z%bAbLw$+Jg@D4jf!mCAT*1i@M(o4H|vrDj{XUt*rttob0gTC{#f<7f>ER|WoX_RJt z^2FlYntv^(_b>X&zO^L*a^#~~@R{|eXN5=+GC_Z)d^8gStj9DlWnA+u22B3NfN*M7 zM>buCt(K&)DHV5xPQ?uAHT;$oUb24u+_pW^QP*ld=Jqd3;b1DN=(Wj1y0lZLZurs? zd?`Oyu@n7#+#OnK=*lV2WxeTmCJkC?*dM;Nw3>=5xM06bXmM*kV_J@7gE`Vs9Q*?Y zJ{(L(pv%aV*D~%d%F%`mfP;KHn8w~kk85=~nu%YECeeoMyJ?0}F++fSK_AVeb>wHT zeHMJBJz0JH)YOVBg#+?LwkXfd!Y5o7@;ej31w;1fV1;l}+`zXM0~P;T+(LU0Yvp(* zREY@<nGtVWe%$bh^qMqkpCDaD1^Cmb-HjMtBH5#1h0bo@n1TP5V}^#EJq|tm(hr?l zDzxWM=W)sC8*A_wb2%#%Td=(n<6*;2{lZ4Bv@RYFradl8#^ZvoxI=(nNy2yX5g}bx z?38gLrzo<U_SE5@#aj}*IibJRoxbIRezMTH9y(TH3qGJjtdY)^p|h+MHj@2>x$vqU zK4%`EhE1}evNQ|3$=;<%I9`c>M~V~V2FiV@_-rsoa>U(B1mm1>`rRZC>_&cTo)}B6 z__F$oeJ<@^1P$mTlA1FFmt(RWerzvO4uNfo&{IVU#}Vs9KaxTorhI}Ny=0o0o?G!v zdkntm*iPOQpF{qI4{hcGr#|jVkz6{;%bulBU`7`s-WSQ=fzLQGX46v~@ARo4=*NVa zWCVE^cB34=lLwt|%(jAeCv7G7H)8b9qOIhRy=X^~qrkh8cP_SvgJ`e#Mt8=wQkT+y zp6`->z;{Eii=q!ph3XabpPn`NO)*0--OS}{@vwI*p9cgd_zIzJIu)Otn@#1Zrd{ms zlm`%t$eF;Mn#*`Ak=aPT5uZlIM~GL@gD?4&dZXkD(l_O@ZGUU(b$7mM<Tm8otm0>3 zuZ$ccfTt~7(|hKpy>enxcg_5Sy1d-1pH9kyQRFW84}8GB)~rcIj~o8#vLLTHS0p88 z>Qgbdo#bum1oeviElqKQoI7eCCtf2iOUW~u3Qbj(lH*hM<wz#7MmZ4mP8Y4%<)|T4 zjv$XDXic_<rgZ*kj~ls|{6MzpCSu1|{9`lOX_??{f)8W#Q_)NyNo%2sGGAFK*qsv< z`*Jkvr?^ae92B!oU9--o5aY??4$u--X>ZZvjHjZosmJ9kgV$-qtuCtcm&zVTdM(<_ z71LyMm!%vND5ho_vSz%?iAEk+1OGb7Y1vt`%>J>ZvVUy`HjSs@6Q`osk9^n2g^Rfo z>gkFPIlk%dSceT8`H5=tD$Zo9@FCQHziw8zs5Y5>GWIGp_)m^G)RfqY&#m~pJeRQ; zQKzH+u^7$>4*mGz%o_5Fetaxo@Gp8OH<bNTE4qm>hXB3feBh|i*uOT1^MV7pC=>^8 zlwZ=(Y;e0KqlTg8>TteXguIKAA&#?^k~6o@i6M&;Q%dYqyn(2&YKoxFdD40d@{uhu zY1ll!&d~97v~hl2Ld`I5o*iu-9aikf@6B^+7Uvn%)<DSX48Z@8SGMAICKGC$IkNG5 z#Eo;oVs4f69&<yt0_tbu9K1&LHaO#)p4FutjWf*FdD06R(-9$5!P$r6+JJM6PJ-=y z(@x4EftjU<Gw2A}ckP?TnRzo8?AQ}g<b(h?@7rueI@Y@aIPTkYbR)3SSh*V5bwkTT zLm1e{rqJ4KIcf;7u|7&lu*IoyC|3zZU&^}6i<oL$B+Qe;Lqmo&+&YdCB>r#*4=x*c zlrD9e^h`K!8<n98dlYzw?erwN%Jsvi5OHn6E1d>E?iB6~4SE^ytiT7xX3!)4ZMIdV zoujtFzcN<nCvY~6DqF`~O?W#2K{<4%vEk05++zT|d0*PZ7gN?1R7%sp=W5`Ch}b{Z z6cy_M`^qMMx?JJey<edRJ1^7ODVAMQfCQ4?w5dObQz1eZsf+YSsO(#QK&*m<0$SPB zU%;dAuMpWGied`*&WzsT4Fb!kjpR*|+zn8oq*#`a25227h}p(yev=6G1F^mF)0)FM z6`WDnY2sJ#DEO85YN(hXZLE90@;x12DkOZimAc=4OAlS)$L!AeG266be%v-SJwG!^ zeSC`$boyfhI{XuQ=xevrHD(*<4THGI#}mr~MsvcT8<tL)^aJ?1QmXXx<syCOz!$yK r<wR*9md_7Jg+gwHZ+;!;aW%)s@q-`0e`4+Vk6r&3{GB5H)balSm(7uI literal 0 HcmV?d00001 diff --git a/autotest/ogr/ogr_parquet.py b/autotest/ogr/ogr_parquet.py index 208ccb4ced5e..68bcdcea5250 100755 --- a/autotest/ogr/ogr_parquet.py +++ b/autotest/ogr/ogr_parquet.py @@ -3078,7 +3078,7 @@ def check_file(filename): ############################################################################### # Test GetExtent() using bbox.minx, bbox.miny, bbox.maxx, bbox.maxy fields -# as in Ouverture Maps datasets +# as in Overture Maps datasets def test_ogr_parquet_bbox_double(): @@ -3086,6 +3086,7 @@ def test_ogr_parquet_bbox_double(): ds = ogr.Open("data/parquet/overture_map_extract.parquet") lyr = ds.GetLayer(0) assert lyr.GetGeometryColumn() == "geometry" + assert lyr.GetLayerDefn().GetFieldIndex("bbox.minx") < 0 assert lyr.TestCapability(ogr.OLCFastGetExtent) == 1 minx, maxx, miny, maxy = lyr.GetExtent() assert (minx, miny, maxx, maxy) == pytest.approx( @@ -3109,6 +3110,7 @@ def test_ogr_parquet_bbox_double(): ds = ogr.Open("data/parquet/overture_map_extract.parquet") lyr = ds.GetLayer(0) assert lyr.GetGeometryColumn() == "geometry" + assert lyr.GetLayerDefn().GetFieldIndex("bbox.minx") >= 0 assert lyr.TestCapability(ogr.OLCFastGetExtent) == 0 minx, maxx, miny, maxy = lyr.GetExtent() assert (minx, miny, maxx, maxy) == pytest.approx( @@ -3117,6 +3119,51 @@ def test_ogr_parquet_bbox_double(): ds = None +############################################################################### +# Test GetExtent() using bbox.minx, bbox.miny, bbox.maxx, bbox.maxy fields +# as in Overture Maps datasets 2024-04-16-beta.0 + + +def test_ogr_parquet_bbox_float32_but_no_covering_in_metadata(): + + ds = ogr.Open("data/parquet/bbox_similar_to_overturemaps_2024-04-16-beta.0.parquet") + lyr = ds.GetLayer(0) + assert lyr.GetGeometryColumn() == "geometry" + assert lyr.GetLayerDefn().GetFieldIndex("bbox.xmin") < 0 + assert lyr.TestCapability(ogr.OLCFastGetExtent) == 1 + minx, maxx, miny, maxy = lyr.GetExtent() + assert (minx, miny, maxx, maxy) == pytest.approx( + (478315.53125, 4762880.5, 481645.3125, 4765610.5) + ) + + with ogrtest.spatial_filter( + lyr, + minx + (maxx - minx) / 2, + miny + (maxy - miny) / 2, + maxx - (maxx - minx) / 2, + maxy - (maxy - miny) / 2, + ): + f = lyr.GetNextFeature() + assert f.GetFID() == 8 + assert lyr.GetNextFeature() is None + + ds = None + + with gdaltest.config_option("OGR_PARQUET_USE_BBOX", "NO"): + ds = ogr.Open( + "data/parquet/bbox_similar_to_overturemaps_2024-04-16-beta.0.parquet" + ) + lyr = ds.GetLayer(0) + assert lyr.GetGeometryColumn() == "geometry" + assert lyr.GetLayerDefn().GetFieldIndex("bbox.xmin") >= 0 + assert lyr.TestCapability(ogr.OLCFastGetExtent) == 0 + minx, maxx, miny, maxy = lyr.GetExtent() + assert (minx, miny, maxx, maxy) == pytest.approx( + (478315.53125, 4762880.5, 481645.3125, 4765610.5) + ) + ds = None + + ############################################################################### diff --git a/ogr/ogrsf_frmts/arrow_common/ograrrowwriterlayer.hpp b/ogr/ogrsf_frmts/arrow_common/ograrrowwriterlayer.hpp index 32e01cd0b6ab..39c429fe523c 100644 --- a/ogr/ogrsf_frmts/arrow_common/ograrrowwriterlayer.hpp +++ b/ogr/ogrsf_frmts/arrow_common/ograrrowwriterlayer.hpp @@ -409,7 +409,10 @@ inline void OGRArrowWriterLayer::CreateSchemaCommon() auto bbox_field_xmax(arrow::field("xmax", arrow::float32(), false)); auto bbox_field_ymax(arrow::field("ymax", arrow::float32(), false)); auto bbox_field(arrow::field( - std::string(poGeomFieldDefn->GetNameRef()).append("_bbox"), + CPLGetConfigOption("OGR_PARQUET_COVERING_BBOX_NAME", + std::string(poGeomFieldDefn->GetNameRef()) + .append("_bbox") + .c_str()), arrow::struct_( {std::move(bbox_field_xmin), std::move(bbox_field_ymin), std::move(bbox_field_xmax), std::move(bbox_field_ymax)}), diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp index 18838645cea3..0fae74dc8bde 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp @@ -554,7 +554,7 @@ void OGRParquetLayer::EstablishFeatureDefn() } // Synthetize a GeoParquet bounding box column definition when detecting - // a Overture Map dataset + // a Overture Map dataset < 2024-04-16-beta.0 if (m_oMapGeometryColumns.empty() && bUseBBOX && oMapParquetColumnNameToIdx.find("geometry") != oMapParquetColumnNameToIdx.end() && @@ -600,6 +600,56 @@ void OGRParquetLayer::EstablishFeatureDefn() oDef.Add("encoding", "WKB"); m_oMapGeometryColumns["geometry"] = std::move(oDef); } + // Overture Maps 2024-04-16-beta.0 almost follows GeoParquet 1.1, except + // they don't declare the "covering" element in the GeoParquet JSON metadata + else if (m_oMapGeometryColumns.find("geometry") != + m_oMapGeometryColumns.end() && + bUseBBOX && + !m_oMapGeometryColumns["geometry"].GetObj("covering").IsValid() && + m_oMapGeometryColumns["geometry"].GetString("encoding") == "WKB" && + oMapParquetColumnNameToIdx.find("geometry") != + oMapParquetColumnNameToIdx.end() && + oMapParquetColumnNameToIdx.find("bbox.xmin") != + oMapParquetColumnNameToIdx.end() && + oMapParquetColumnNameToIdx.find("bbox.ymin") != + oMapParquetColumnNameToIdx.end() && + oMapParquetColumnNameToIdx.find("bbox.xmax") != + oMapParquetColumnNameToIdx.end() && + oMapParquetColumnNameToIdx.find("bbox.ymax") != + oMapParquetColumnNameToIdx.end()) + { + CPLJSONObject oDef = m_oMapGeometryColumns["geometry"]; + CPLJSONObject oCovering; + oDef.Add("covering", oCovering); + CPLJSONObject oBBOX; + oCovering.Add("bbox", oBBOX); + { + CPLJSONArray oArray; + oArray.Add("bbox"); + oArray.Add("xmin"); + oBBOX.Add("xmin", oArray); + } + { + CPLJSONArray oArray; + oArray.Add("bbox"); + oArray.Add("ymin"); + oBBOX.Add("ymin", oArray); + } + { + CPLJSONArray oArray; + oArray.Add("bbox"); + oArray.Add("xmax"); + oBBOX.Add("xmax", oArray); + } + { + CPLJSONArray oArray; + oArray.Add("bbox"); + oArray.Add("ymax"); + oBBOX.Add("ymax", oArray); + } + oSetBBOXColumns.insert("bbox"); + m_oMapGeometryColumns["geometry"] = std::move(oDef); + } int iParquetCol = 0; for (int i = 0; i < m_poSchema->num_fields(); ++i) diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetwriterlayer.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetwriterlayer.cpp index eac58831f317..a62bb4cb17aa 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetwriterlayer.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetwriterlayer.cpp @@ -763,7 +763,9 @@ std::string OGRParquetWriterLayer::GetGeoMetadata() const } // Bounding box column definition - if (m_bWriteBBoxStruct) + if (m_bWriteBBoxStruct && + CPLTestBool(CPLGetConfigOption( + "OGR_PARQUET_WRITE_COVERING_BBOX_IN_METADATA", "YES"))) { CPLJSONObject oCovering; oColumn.Add("covering", oCovering); From 8133fb41fb336a0b716772e7998194ddc8674163 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 17 Apr 2024 00:39:38 +0200 Subject: [PATCH 018/230] Parquet: make it recognize bbox field from Overture Maps 2024-01-17-alpha.0 --- ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp index 0fae74dc8bde..3a54d917036f 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp @@ -555,7 +555,13 @@ void OGRParquetLayer::EstablishFeatureDefn() // Synthetize a GeoParquet bounding box column definition when detecting // a Overture Map dataset < 2024-04-16-beta.0 - if (m_oMapGeometryColumns.empty() && bUseBBOX && + if ((m_oMapGeometryColumns.empty() || + // Below is for release 2024-01-17-alpha.0 + (m_oMapGeometryColumns.find("geometry") != + m_oMapGeometryColumns.end() && + !m_oMapGeometryColumns["geometry"].GetObj("covering").IsValid() && + m_oMapGeometryColumns["geometry"].GetString("encoding") == "WKB")) && + bUseBBOX && oMapParquetColumnNameToIdx.find("geometry") != oMapParquetColumnNameToIdx.end() && oMapParquetColumnNameToIdx.find("bbox.minx") != @@ -568,6 +574,11 @@ void OGRParquetLayer::EstablishFeatureDefn() oMapParquetColumnNameToIdx.end()) { CPLJSONObject oDef; + if (m_oMapGeometryColumns.find("geometry") != + m_oMapGeometryColumns.end()) + { + oDef = m_oMapGeometryColumns["geometry"]; + } CPLJSONObject oCovering; oDef.Add("covering", oCovering); CPLJSONObject oBBOX; From 901ae0369c5fa4e936b6619820924aed579b72a2 Mon Sep 17 00:00:00 2001 From: Alessandro Pasotti <elpaso@itopen.it> Date: Wed, 17 Apr 2024 17:25:36 +0200 Subject: [PATCH 019/230] gdalinfo: use argparser (#9687) --- apps/gdal_utils_priv.h | 12 +- apps/gdalargumentparser.cpp | 16 ++ apps/gdalargumentparser.h | 10 + apps/gdalinfo_bin.cpp | 147 ++++------ apps/gdalinfo_lib.cpp | 348 ++++++++++++++---------- autotest/utilities/test_gdalinfo.py | 4 +- autotest/utilities/test_gdalinfo_lib.py | 2 +- doc/source/programs/gdalinfo.rst | 8 +- 8 files changed, 295 insertions(+), 252 deletions(-) diff --git a/apps/gdal_utils_priv.h b/apps/gdal_utils_priv.h index 0490a4a35bd6..8127d5f0fac3 100644 --- a/apps/gdal_utils_priv.h +++ b/apps/gdal_utils_priv.h @@ -44,16 +44,16 @@ CPL_C_START struct GDALInfoOptionsForBinary { /* Filename to open. */ - char *pszFilename; + std::string osFilename{}; /* Open options. */ - char **papszOpenOptions; + CPLStringList aosOpenOptions{}; - /* > for reporting on a particular subdataset */ - int nSubdataset; + /* For reporting on a particular subdataset */ + int nSubdataset = 0; /* Allowed input drivers. */ - char **papszAllowInputDrivers; + CPLStringList aosAllowedInputDrivers{}; }; struct GDALDEMProcessingOptionsForBinary @@ -239,6 +239,8 @@ std::string CPL_DLL GDALVectorTranslateGetParserUsage(); std::string CPL_DLL GDALWarpAppGetParserUsage(); +std::string CPL_DLL GDALInfoAppGetParserUsage(); + #endif /* #ifndef DOXYGEN_SKIP */ #endif /* GDAL_UTILS_PRIV_H_INCLUDED */ diff --git a/apps/gdalargumentparser.cpp b/apps/gdalargumentparser.cpp index fef16fbc1f93..8870b03805d5 100644 --- a/apps/gdalargumentparser.cpp +++ b/apps/gdalargumentparser.cpp @@ -339,6 +339,22 @@ GDALArgumentParser::get_non_positional_arguments(const CPLStringList &aosArgs) return args; } +Argument &GDALArgumentParser::add_inverted_logic_flag(const std::string &name, + bool *store_into, + const std::string &help) +{ + return add_argument(name) + .default_value(true) + .implicit_value(false) + .action( + [store_into](const auto &) + { + if (store_into) + *store_into = false; + }) + .help(help); +} + /************************************************************************/ /* parse_args() */ /************************************************************************/ diff --git a/apps/gdalargumentparser.h b/apps/gdalargumentparser.h index 5310d0f464d5..c5b545748bf2 100644 --- a/apps/gdalargumentparser.h +++ b/apps/gdalargumentparser.h @@ -96,6 +96,16 @@ class GDALArgumentParser : public ArgumentParser //! Return the non positional arguments. CPLStringList get_non_positional_arguments(const CPLStringList &aosArgs); + /** + * Add an inverted logic (default true, false when set) flag + * @param name flag name + * @param store_into optional pointer to a bool variable where to store the value + * @param help optional help text + */ + Argument &add_inverted_logic_flag(const std::string &name, + bool *store_into = nullptr, + const std::string &help = ""); + private: std::map<std::string, ArgumentParser::argument_it>::iterator find_argument(const std::string &name); diff --git a/apps/gdalinfo_bin.cpp b/apps/gdalinfo_bin.cpp index 61a977ba03a2..0a72c73a35d4 100644 --- a/apps/gdalinfo_bin.cpp +++ b/apps/gdalinfo_bin.cpp @@ -35,53 +35,36 @@ #include "gdal_utils_priv.h" /************************************************************************/ -/* Usage() */ +/* GDALExit() */ +/* This function exits and cleans up GDAL and OGR resources */ +/* Perhaps it should be added to C api and used in all apps? */ /************************************************************************/ -static void Usage(bool bIsError, const char *pszErrorMsg = nullptr) - +static int GDALExit(int nCode) { - fprintf( - bIsError ? stderr : stdout, - "Usage: gdalinfo [--help] [--help-general]\n" - " [-json] [-mm] [-stats | -approx_stats] [-hist]\n" - " [-nogcp] [-nomd] [-norat] [-noct] [-nofl]\n" - " [-checksum] [-listmdd] [-mdd <domain>|all]\n" - " [-proj4] [-wkt_format {WKT1|WKT2|<other_format>}]...\n" - " [-sd <subdataset>] [-oo <NAME>=<VALUE>]... [-if " - "<format>]...\n" - " <datasetname>\n"); - - if (pszErrorMsg != nullptr) - fprintf(stderr, "\nFAILURE: %s\n", pszErrorMsg); - - exit(bIsError ? 1 : 0); -} + const char *pszDebug = CPLGetConfigOption("CPL_DEBUG", nullptr); + if (pszDebug && (EQUAL(pszDebug, "ON") || EQUAL(pszDebug, ""))) + { + GDALDumpOpenDatasets(stderr); + CPLDumpSharedList(nullptr); + } -/************************************************************************/ -/* GDALInfoOptionsForBinary() */ -/************************************************************************/ + GDALDestroyDriverManager(); -static GDALInfoOptionsForBinary *GDALInfoOptionsForBinaryNew(void) -{ - return static_cast<GDALInfoOptionsForBinary *>( - CPLCalloc(1, sizeof(GDALInfoOptionsForBinary))); + OGRCleanupAll(); + + exit(nCode); } /************************************************************************/ -/* GDALInfoOptionsForBinaryFree() */ +/* Usage() */ /************************************************************************/ -static void -GDALInfoOptionsForBinaryFree(GDALInfoOptionsForBinary *psOptionsForBinary) +static void Usage() + { - if (psOptionsForBinary) - { - CPLFree(psOptionsForBinary->pszFilename); - CSLDestroy(psOptionsForBinary->papszOpenOptions); - CSLDestroy(psOptionsForBinary->papszAllowInputDrivers); - CPLFree(psOptionsForBinary); - } + fprintf(stderr, "%s\n", GDALInfoAppGetParserUsage().c_str()); + GDALExit(1); } /************************************************************************/ @@ -93,39 +76,30 @@ MAIN_START(argc, argv) { EarlySetConfigOptions(argc, argv); - GDALAllRegister(); + /* -------------------------------------------------------------------- */ + /* Register standard GDAL drivers, and process generic GDAL */ + /* command options. */ + /* -------------------------------------------------------------------- */ + GDALAllRegister(); argc = GDALGeneralCmdLineProcessor(argc, &argv, 0); if (argc < 1) - exit(-argc); + GDALExit(-argc); - for (int i = 0; argv != nullptr && argv[i] != nullptr; i++) - { - if (EQUAL(argv[i], "--utility_version")) - { - printf("%s was compiled against GDAL %s and is running against " - "GDAL %s\n", - argv[0], GDAL_RELEASE_NAME, GDALVersionInfo("RELEASE_NAME")); - CSLDestroy(argv); - return 0; - } - else if (EQUAL(argv[i], "--help")) - { - Usage(false); - } - } - argv = CSLAddString(argv, "-stdout"); + /* -------------------------------------------------------------------- */ + /* Parse command line */ + /* -------------------------------------------------------------------- */ - GDALInfoOptionsForBinary *psOptionsForBinary = - GDALInfoOptionsForBinaryNew(); + GDALInfoOptionsForBinary sOptionsForBinary; - GDALInfoOptions *psOptions = - GDALInfoOptionsNew(argv + 1, psOptionsForBinary); - if (psOptions == nullptr) - Usage(true); + std::unique_ptr<GDALInfoOptions, decltype(&GDALInfoOptionsFree)> psOptions{ + GDALInfoOptionsNew(argv + 1, &sOptionsForBinary), GDALInfoOptionsFree}; + CSLDestroy(argv); - if (psOptionsForBinary->pszFilename == nullptr) - Usage(true, "No datasource specified."); + if (!psOptions) + { + Usage(); + } /* -------------------------------------------------------------------- */ /* Open dataset. */ @@ -138,10 +112,10 @@ MAIN_START(argc, argv) #endif GDALDatasetH hDataset = GDALOpenEx( - psOptionsForBinary->pszFilename, + sOptionsForBinary.osFilename.c_str(), GDAL_OF_READONLY | GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR, - psOptionsForBinary->papszAllowInputDrivers, - psOptionsForBinary->papszOpenOptions, nullptr); + sOptionsForBinary.aosAllowedInputDrivers, + sOptionsForBinary.aosOpenOptions, nullptr); if (hDataset == nullptr) { @@ -151,11 +125,11 @@ MAIN_START(argc, argv) VSIStatBuf sStat; CPLString message; message.Printf("gdalinfo failed - unable to open '%s'.", - psOptionsForBinary->pszFilename); - if (VSIStat(psOptionsForBinary->pszFilename, &sStat) == 0) + sOptionsForBinary.osFilename.c_str()); + if (VSIStat(sOptionsForBinary.osFilename.c_str(), &sStat) == 0) { GDALDriverH drv = - GDALIdentifyDriverEx(psOptionsForBinary->pszFilename, + GDALIdentifyDriverEx(sOptionsForBinary.osFilename.c_str(), GDAL_OF_VECTOR, nullptr, nullptr); if (drv) { @@ -169,19 +143,19 @@ MAIN_START(argc, argv) /* If argument is a VSIFILE, then print its contents */ /* -------------------------------------------------------------------- */ - if (STARTS_WITH(psOptionsForBinary->pszFilename, "/vsizip/") || - STARTS_WITH(psOptionsForBinary->pszFilename, "/vsitar/")) + if (STARTS_WITH(sOptionsForBinary.osFilename.c_str(), "/vsizip/") || + STARTS_WITH(sOptionsForBinary.osFilename.c_str(), "/vsitar/")) { const char *const apszOptions[] = {"NAME_AND_TYPE_ONLY=YES", nullptr}; - VSIDIR *psDir = - VSIOpenDir(psOptionsForBinary->pszFilename, -1, apszOptions); + VSIDIR *psDir = VSIOpenDir(sOptionsForBinary.osFilename.c_str(), -1, + apszOptions); if (psDir) { fprintf(stdout, "Unable to open source `%s' directly.\n" "The archive contains several files:\n", - psOptionsForBinary->pszFilename); + sOptionsForBinary.osFilename.c_str()); int nCount = 0; while (auto psEntry = VSIGetNextDirEntry(psDir)) { @@ -189,13 +163,13 @@ MAIN_START(argc, argv) psEntry->pszName[strlen(psEntry->pszName) - 1] != '/') { fprintf(stdout, " %s/%s/\n", - psOptionsForBinary->pszFilename, + sOptionsForBinary.osFilename.c_str(), psEntry->pszName); } else { fprintf(stdout, " %s/%s\n", - psOptionsForBinary->pszFilename, + sOptionsForBinary.osFilename.c_str(), psEntry->pszName); } nCount++; @@ -209,12 +183,6 @@ MAIN_START(argc, argv) } } - CSLDestroy(argv); - - GDALInfoOptionsForBinaryFree(psOptionsForBinary); - - GDALInfoOptionsFree(psOptions); - GDALDumpOpenDatasets(stderr); GDALDestroyDriverManager(); @@ -230,19 +198,19 @@ MAIN_START(argc, argv) /* Read specified subdataset if requested. */ /* -------------------------------------------------------------------- */ - if (psOptionsForBinary->nSubdataset > 0) + if (sOptionsForBinary.nSubdataset > 0) { char **papszSubdatasets = GDALGetMetadata(hDataset, "SUBDATASETS"); int nSubdatasets = CSLCount(papszSubdatasets); if (nSubdatasets > 0 && - psOptionsForBinary->nSubdataset <= nSubdatasets) + sOptionsForBinary.nSubdataset <= nSubdatasets) { char szKeyName[1024]; char *pszSubdatasetName; snprintf(szKeyName, sizeof(szKeyName), "SUBDATASET_%d_NAME", - psOptionsForBinary->nSubdataset); + sOptionsForBinary.nSubdataset); szKeyName[sizeof(szKeyName) - 1] = '\0'; pszSubdatasetName = CPLStrdup(CSLFetchNameValue(papszSubdatasets, szKeyName)); @@ -255,11 +223,11 @@ MAIN_START(argc, argv) fprintf(stderr, "gdalinfo warning: subdataset %d of %d requested. " "Reading the main dataset.\n", - psOptionsForBinary->nSubdataset, nSubdatasets); + sOptionsForBinary.nSubdataset, nSubdatasets); } } - char *pszGDALInfoOutput = GDALInfo(hDataset, psOptions); + char *pszGDALInfoOutput = GDALInfo(hDataset, psOptions.get()); if (pszGDALInfoOutput) printf("%s", pszGDALInfoOutput); @@ -271,17 +239,12 @@ MAIN_START(argc, argv) } #endif - GDALInfoOptionsForBinaryFree(psOptionsForBinary); - - GDALInfoOptionsFree(psOptions); - - CSLDestroy(argv); - GDALDumpOpenDatasets(stderr); GDALDestroyDriverManager(); CPLDumpSharedList(nullptr); + GDALDestroy(); exit(0); diff --git a/apps/gdalinfo_lib.cpp b/apps/gdalinfo_lib.cpp index ef98681c0ca0..6ba776d08ba2 100644 --- a/apps/gdalinfo_lib.cpp +++ b/apps/gdalinfo_lib.cpp @@ -31,6 +31,7 @@ #include "cpl_port.h" #include "gdal_utils.h" #include "gdal_utils_priv.h" +#include "gdalargumentparser.h" #include <cmath> #include <limits> @@ -79,61 +80,61 @@ typedef enum struct GDALInfoOptions { /*! output format */ - GDALInfoFormat eFormat; + GDALInfoFormat eFormat = GDALINFO_FORMAT_TEXT; - int bComputeMinMax; + bool bComputeMinMax = false; /*! report histogram information for all bands */ - int bReportHistograms; + bool bReportHistograms = false; /*! report a PROJ.4 string corresponding to the file's coordinate system */ - int bReportProj4; + bool bReportProj4 = false; /*! read and display image statistics. Force computation if no statistics are stored in an image */ - int bStats; + bool bStats = false; /*! read and display image statistics. Force computation if no statistics are stored in an image. However, they may be computed based on overviews or a subset of all tiles. Useful if you are in a hurry and don't want precise stats. */ - int bApproxStats; + bool bApproxStats = true; - int bSample; + bool bSample = false; /*! force computation of the checksum for each band in the dataset */ - int bComputeChecksum; + bool bComputeChecksum = false; /*! allow or suppress ground control points list printing. It may be useful for datasets with huge amount of GCPs, such as L1B AVHRR or HDF4 MODIS which contain thousands of them. */ - int bShowGCPs; + bool bShowGCPs = true; /*! allow or suppress metadata printing. Some datasets may contain a lot of metadata strings. */ - int bShowMetadata; + bool bShowMetadata = true; /*! allow or suppress printing of raster attribute table */ - int bShowRAT; + bool bShowRAT = true; /*! allow or suppress printing of color table */ - int bShowColorTable; + bool bShowColorTable = true; /*! list all metadata domains available for the dataset */ - int bListMDD; + bool bListMDD = false; /*! display the file list or the first file of the file list */ - int bShowFileList; + bool bShowFileList = true; /*! report metadata for the specified domains. "all" can be used to report metadata in all domains. */ - char **papszExtraMDDomains; + CPLStringList aosExtraMDDomains; /*! WKT format used for SRS */ - char *pszWKTFormat; + std::string osWKTFormat = "WKT2"; - bool bStdoutOutput; + bool bStdoutOutput = false; }; static int GDALInfoReportCorner(const GDALInfoOptions *psOptions, @@ -215,6 +216,160 @@ gdal_json_object_new_double_significant_digits(double dfVal, dfVal, nSignificantDigits); } +/************************************************************************/ +/* GDALWarpAppOptionsGetParser() */ +/************************************************************************/ + +static std::unique_ptr<GDALArgumentParser> +GDALInfoAppOptionsGetParser(GDALInfoOptions *psOptions, + GDALInfoOptionsForBinary *psOptionsForBinary) +{ + auto argParser = std::make_unique<GDALArgumentParser>( + "gdalinfo", /* bForBinary=*/psOptionsForBinary != nullptr); + + argParser->add_description(_("Raster dataset information utility.")); + + argParser->add_epilog( + _("For more details, consult https://gdal.org/programs/gdalinfo.html")); + + argParser->add_argument("-json") + .flag() + .action([psOptions](const auto &) + { psOptions->eFormat = GDALINFO_FORMAT_JSON; }) + .help(_("Display the output in json format.")); + + argParser->add_argument("-mm") + .store_into(psOptions->bComputeMinMax) + .help(_("Force computation of the actual min/max values for each band " + "in the dataset.")); + + { + auto &group = argParser->add_mutually_exclusive_group(); + group.add_argument("-stats") + .store_into(psOptions->bStats) + .help(_("Read and display image statistics computing exact values " + "if required.")); + + group.add_argument("-approx_stats") + .store_into(psOptions->bApproxStats) + .help( + _("Read and display image statistics computing approximated " + "values on overviews or a subset of all tiles if required.")); + } + + argParser->add_argument("-hist") + .store_into(psOptions->bReportHistograms) + .help(_("Report histogram information for all bands.")); + + argParser->add_inverted_logic_flag( + "-nogcp", &psOptions->bShowGCPs, + _("Suppress ground control points list printing.")); + + argParser->add_inverted_logic_flag("-nomd", &psOptions->bShowMetadata, + _("Suppress metadata printing.")); + + argParser->add_inverted_logic_flag( + "-norat", &psOptions->bShowRAT, + _("Suppress printing of raster attribute table.")); + + argParser->add_inverted_logic_flag("-noct", &psOptions->bShowColorTable, + _("Suppress printing of color table.")); + + argParser->add_inverted_logic_flag("-nofl", &psOptions->bShowFileList, + _("Suppress display of the file list.")); + + argParser->add_argument("-checksum") + .flag() + .store_into(psOptions->bComputeChecksum) + .help(_( + "Force computation of the checksum for each band in the dataset.")); + + argParser->add_argument("-listmdd") + .flag() + .store_into(psOptions->bListMDD) + .help(_("List all metadata domains available for the dataset.")); + + argParser->add_argument("-proj4") + .flag() + .store_into(psOptions->bReportProj4) + .help(_("Report a PROJ.4 string corresponding to the file's coordinate " + "system.")); + + argParser->add_argument("-wkt_format") + .metavar("<WKT1|WKT2|WKT2_2015|WKT2_2018|WKT2_2019>") + .choices("WKT1", "WKT2", "WKT2_2015", "WKT2_2018", "WKT2_2019") + .store_into(psOptions->osWKTFormat) + .help(_("WKT format used for SRS.")); + + argParser->add_argument("-sd") + .metavar("<n>") + .store_into(psOptionsForBinary->nSubdataset) + .help(_("Use subdataset of specified index (starting at 1), instead of " + "the source dataset itself.")); + + argParser->add_argument("-oo") + .metavar("<NAME>=<VALUE>") + .append() + .action( + [psOptionsForBinary](const std::string &s) + { + if (psOptionsForBinary) + psOptionsForBinary->aosOpenOptions.AddString(s.c_str()); + }) + .help(_("Open option(s) for dataset.")); + + argParser->add_input_format_argument( + psOptionsForBinary ? &psOptionsForBinary->aosAllowedInputDrivers + : nullptr); + + argParser->add_argument("-mdd") + .metavar("<domain>|all") + .action( + [psOptions](const std::string &value) + { + psOptions->aosExtraMDDomains = + CSLAddString(psOptions->aosExtraMDDomains, value.c_str()); + }) + .help(_("Report metadata for the specified domains. 'all' can be used " + "to report metadata in all domains.")); + + /* Not documented: used by gdalinfo_bin.cpp only */ + argParser->add_argument("-stdout").flag().hidden().store_into( + psOptions->bStdoutOutput); + + if (psOptionsForBinary) + { + argParser->add_argument("dataset_name") + .metavar("<dataset_name>") + .store_into(psOptionsForBinary->osFilename) + .help("Input dataset."); + } + + return argParser; +} + +/************************************************************************/ +/* GDALInfoAppGetParserUsage() */ +/************************************************************************/ + +std::string GDALInfoAppGetParserUsage() +{ + try + { + GDALInfoOptions sOptions; + GDALInfoOptionsForBinary sOptionsForBinary; + auto argParser = + GDALInfoAppOptionsGetParser(&sOptions, &sOptionsForBinary); + return argParser->usage(); + } + catch (const std::exception &err) + { + CPLError(CE_Failure, CPLE_AppDefined, "Unexpected exception: %s", + err.what()); + return std::string(); + } +} + /************************************************************************/ /* GDALInfo() */ /************************************************************************/ @@ -377,7 +532,7 @@ char *GDALInfo(GDALDatasetH hDataset, const GDALInfoOptions *psOptions) } CPLString osWKTFormat("FORMAT="); - osWKTFormat += psOptions->pszWKTFormat; + osWKTFormat += psOptions->osWKTFormat; const char *const apszWKTOptions[] = {osWKTFormat.c_str(), "MULTILINE=YES", nullptr}; @@ -404,7 +559,7 @@ char *GDALInfo(GDALDatasetH hDataset, const GDALInfoOptions *psOptions) if (bJson) { json_object *poWkt = json_object_new_string(pszPrettyWkt); - if (strcmp(psOptions->pszWKTFormat, "WKT2") == 0) + if (psOptions->osWKTFormat == "WKT2") { json_object *poStacWkt = nullptr; json_object_deep_copy(poWkt, &poStacWkt, nullptr); @@ -2021,12 +2176,12 @@ static void GDALInfoReportMetadata(const GDALInfoOptions *psOptions, /* -------------------------------------------------------------------- */ /* Report extra Metadata domains */ /* -------------------------------------------------------------------- */ - if (psOptions->papszExtraMDDomains != nullptr) + if (!psOptions->aosExtraMDDomains.empty()) { CPLStringList aosExtraMDDomainsExpanded; - if (EQUAL(psOptions->papszExtraMDDomains[0], "all") && - psOptions->papszExtraMDDomains[1] == nullptr) + if (EQUAL(psOptions->aosExtraMDDomains[0], "all") && + psOptions->aosExtraMDDomains.Count() == 1) { const CPLStringList aosMDDList(GDALGetMetadataDomainList(hObject)); for (const char *pszDomain : aosMDDList) @@ -2044,8 +2199,7 @@ static void GDALInfoReportMetadata(const GDALInfoOptions *psOptions, } else { - aosExtraMDDomainsExpanded = - CSLDuplicate(psOptions->papszExtraMDDomains); + aosExtraMDDomainsExpanded = psOptions->aosExtraMDDomains; } for (const char *pszDomain : aosExtraMDDomainsExpanded) @@ -2113,135 +2267,37 @@ GDALInfoOptions * GDALInfoOptionsNew(char **papszArgv, GDALInfoOptionsForBinary *psOptionsForBinary) { - bool bGotFilename = false; - GDALInfoOptions *psOptions = - static_cast<GDALInfoOptions *>(CPLCalloc(1, sizeof(GDALInfoOptions))); - - psOptions->eFormat = GDALINFO_FORMAT_TEXT; - psOptions->bComputeMinMax = FALSE; - psOptions->bReportHistograms = FALSE; - psOptions->bReportProj4 = FALSE; - psOptions->bStats = FALSE; - psOptions->bApproxStats = TRUE; - psOptions->bSample = FALSE; - psOptions->bComputeChecksum = FALSE; - psOptions->bShowGCPs = TRUE; - psOptions->bShowMetadata = TRUE; - psOptions->bShowRAT = TRUE; - psOptions->bShowColorTable = TRUE; - psOptions->bListMDD = FALSE; - psOptions->bShowFileList = TRUE; - psOptions->pszWKTFormat = CPLStrdup("WKT2"); + auto psOptions = std::make_unique<GDALInfoOptions>(); /* -------------------------------------------------------------------- */ /* Parse arguments. */ /* -------------------------------------------------------------------- */ - for (int i = 0; papszArgv != nullptr && papszArgv[i] != nullptr; i++) + + CPLStringList aosArgv; + + if (papszArgv) { - if (EQUAL(papszArgv[i], "-json")) - psOptions->eFormat = GDALINFO_FORMAT_JSON; - else if (EQUAL(papszArgv[i], "-mm")) - psOptions->bComputeMinMax = TRUE; - else if (EQUAL(papszArgv[i], "-hist")) - psOptions->bReportHistograms = TRUE; - else if (EQUAL(papszArgv[i], "-proj4")) - psOptions->bReportProj4 = TRUE; - else if (EQUAL(papszArgv[i], "-stats")) - { - psOptions->bStats = TRUE; - psOptions->bApproxStats = FALSE; - } - else if (EQUAL(papszArgv[i], "-approx_stats")) - { - psOptions->bStats = TRUE; - psOptions->bApproxStats = TRUE; - } - else if (EQUAL(papszArgv[i], "-sample")) - psOptions->bSample = TRUE; - else if (EQUAL(papszArgv[i], "-checksum")) - psOptions->bComputeChecksum = TRUE; - else if (EQUAL(papszArgv[i], "-nogcp")) - psOptions->bShowGCPs = FALSE; - else if (EQUAL(papszArgv[i], "-nomd")) - psOptions->bShowMetadata = FALSE; - else if (EQUAL(papszArgv[i], "-norat")) - psOptions->bShowRAT = FALSE; - else if (EQUAL(papszArgv[i], "-noct")) - psOptions->bShowColorTable = FALSE; - else if (EQUAL(papszArgv[i], "-listmdd")) - psOptions->bListMDD = TRUE; - /* Not documented: used by gdalinfo_bin.cpp only */ - else if (EQUAL(papszArgv[i], "-stdout")) - psOptions->bStdoutOutput = true; - else if (EQUAL(papszArgv[i], "-mdd") && papszArgv[i + 1] != nullptr) + const int nArgc = CSLCount(papszArgv); + for (int i = 0; i < nArgc; i++) { - psOptions->papszExtraMDDomains = - CSLAddString(psOptions->papszExtraMDDomains, papszArgv[++i]); - } - else if (EQUAL(papszArgv[i], "-oo") && papszArgv[i + 1] != nullptr) - { - i++; - if (psOptionsForBinary) - { - psOptionsForBinary->papszOpenOptions = CSLAddString( - psOptionsForBinary->papszOpenOptions, papszArgv[i]); - } - } - else if (EQUAL(papszArgv[i], "-nofl")) - psOptions->bShowFileList = FALSE; - else if (EQUAL(papszArgv[i], "-sd") && papszArgv[i + 1] != nullptr) - { - i++; - if (psOptionsForBinary) - { - psOptionsForBinary->nSubdataset = atoi(papszArgv[i]); - } - } - else if (EQUAL(papszArgv[i], "-wkt_format") && - papszArgv[i + 1] != nullptr) - { - CPLFree(psOptions->pszWKTFormat); - psOptions->pszWKTFormat = CPLStrdup(papszArgv[++i]); + aosArgv.AddString(papszArgv[i]); } + } - else if (EQUAL(papszArgv[i], "-if") && papszArgv[i + 1] != nullptr) - { - i++; - if (psOptionsForBinary) - { - if (GDALGetDriverByName(papszArgv[i]) == nullptr) - { - CPLError(CE_Warning, CPLE_AppDefined, - "%s is not a recognized driver", papszArgv[i]); - } - psOptionsForBinary->papszAllowInputDrivers = CSLAddString( - psOptionsForBinary->papszAllowInputDrivers, papszArgv[i]); - } - } + try + { + auto argParser = + GDALInfoAppOptionsGetParser(psOptions.get(), psOptionsForBinary); - else if (papszArgv[i][0] == '-') - { - CPLError(CE_Failure, CPLE_NotSupported, "Unknown option name '%s'", - papszArgv[i]); - GDALInfoOptionsFree(psOptions); - return nullptr; - } - else if (!bGotFilename) - { - bGotFilename = true; - if (psOptionsForBinary) - psOptionsForBinary->pszFilename = CPLStrdup(papszArgv[i]); - } - else - { - CPLError(CE_Failure, CPLE_NotSupported, - "Too many command options '%s'", papszArgv[i]); - GDALInfoOptionsFree(psOptions); - return nullptr; - } + argParser->parse_args_without_binary_name(aosArgv.List()); + } + catch (const std::exception &error) + { + CPLError(CE_Failure, CPLE_AppDefined, "%s", error.what()); + return nullptr; } - return psOptions; + return psOptions.release(); } /************************************************************************/ @@ -2258,11 +2314,5 @@ GDALInfoOptionsNew(char **papszArgv, void GDALInfoOptionsFree(GDALInfoOptions *psOptions) { - if (psOptions != nullptr) - { - CSLDestroy(psOptions->papszExtraMDDomains); - CPLFree(psOptions->pszWKTFormat); - - CPLFree(psOptions); - } + delete psOptions; } diff --git a/autotest/utilities/test_gdalinfo.py b/autotest/utilities/test_gdalinfo.py index b8ddf982dbc2..6efd947f3d79 100755 --- a/autotest/utilities/test_gdalinfo.py +++ b/autotest/utilities/test_gdalinfo.py @@ -60,7 +60,7 @@ def test_gdalinfo_1(gdalinfo_path): gdalinfo_path + " ../gcore/data/byte.tif", encoding="UTF-8", ) - assert err is None or err == "", "got error/warning" + assert err is None or err == "", f"got error/warning {err}" assert ret.find("Driver: GTiff/GeoTIFF") != -1 @@ -460,7 +460,7 @@ def test_gdalinfo_28(gdalinfo_path): encoding="UTF-8", ) ret = json.loads(ret) - assert err is None or err == "", "got error/warning" + assert err is None or err == "", f"got error/warning {err}" assert ret["driverShortName"] == "GTiff" diff --git a/autotest/utilities/test_gdalinfo_lib.py b/autotest/utilities/test_gdalinfo_lib.py index 57358612d7f6..fe8dc55f5a92 100755 --- a/autotest/utilities/test_gdalinfo_lib.py +++ b/autotest/utilities/test_gdalinfo_lib.py @@ -121,7 +121,7 @@ def test_gdalinfo_lib_5(): computeMinMax=True, reportHistograms=True, reportProj4=True, - stats=True, + # stats=True, this is mutually exclusive with approxStats approxStats=True, computeChecksum=True, showGCPs=False, diff --git a/doc/source/programs/gdalinfo.rst b/doc/source/programs/gdalinfo.rst index 9a7dfa0894fd..5c55f2920e39 100644 --- a/doc/source/programs/gdalinfo.rst +++ b/doc/source/programs/gdalinfo.rst @@ -103,18 +103,20 @@ The following command line parameters can appear in any order Only display the first file of the file list. -.. option:: -wkt_format WKT1|WKT2|WKT2_2015|WKT2_2018 +.. option:: -wkt_format WKT1|WKT2|WKT2_2015|WKT2_2018|WKT2_2019 WKT format used to display the SRS. Currently the supported values are: ``WKT1`` - ``WKT2`` (latest WKT version, currently *WKT2_2018*) + ``WKT2`` (latest WKT version, currently *WKT2_2019*) ``WKT2_2015`` - ``WKT2_2018`` + ``WKT2_2018`` (deprecated) + + ``WKT2_2019`` .. versionadded:: 3.0.0 From 03f30b1052a9913352fb83d50d97970048e3435f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 17 Apr 2024 17:44:50 +0200 Subject: [PATCH 020/230] typo fixes [ci skip] --- doc/source/development/building_from_source.rst | 2 +- doc/source/drivers/vector/gpkg.rst | 2 +- ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp | 2 +- ogr/ogrsf_frmts/parquet/ogr_parquet.h | 2 +- scripts/typos_allowlist.txt | 1 + swig/include/python/docs/osr_spatialreference_docs.i | 2 +- 6 files changed, 6 insertions(+), 5 deletions(-) diff --git a/doc/source/development/building_from_source.rst b/doc/source/development/building_from_source.rst index ecbb5a0bf267..bed99ec09c82 100644 --- a/doc/source/development/building_from_source.rst +++ b/doc/source/development/building_from_source.rst @@ -1699,7 +1699,7 @@ PROJ .. versionadded:: 3.9 Control the mode used for find_package(PROJ). - Alters how the default CMake seach logic + Alters how the default CMake search logic (https://cmake.org/cmake/help/latest/command/find_package.html) applies. Defaults to CUSTOM, where the CONFIG mode is applied for PROJ >= 8, and fallbacks to default MODULE mode otherwise. diff --git a/doc/source/drivers/vector/gpkg.rst b/doc/source/drivers/vector/gpkg.rst index 319278304ad7..d568dcaca289 100644 --- a/doc/source/drivers/vector/gpkg.rst +++ b/doc/source/drivers/vector/gpkg.rst @@ -639,7 +639,7 @@ custom entry of srs_id=99999 with the following properties: Note that the use of a LOCAL_CS / EngineeringCRS is mostly to provide a valid CRS definition to comply with the requirements of the GeoPackage specification and to be compatible of other applications (or GDAL 3.8 or earlier), but the -semantics of that entry is intented to be "undefined SRS of any kind". +semantics of that entry is intended to be "undefined SRS of any kind". Level of support of GeoPackage Extensions ----------------------------------------- diff --git a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp index 93c842250810..47854f431c41 100644 --- a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp +++ b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp @@ -5580,7 +5580,7 @@ inline int OGRArrowLayer::GetNextArrowArray(struct ArrowArrayStream *stream, const auto nFeatureIdxCur = m_nFeatureIdx; // TODO: We likely have an issue regarding FIDs based on m_nFeatureIdx // when m_iFIDArrowColumn < 0, only a subset of row groups is - // selected, and this batch goes accross non consecutive row groups. + // selected, and this batch goes across non consecutive row groups. for (int64_t i = 0; i < m_nIdxInBatch; ++i) IncrFeatureIdx(); diff --git a/ogr/ogrsf_frmts/parquet/ogr_parquet.h b/ogr/ogrsf_frmts/parquet/ogr_parquet.h index 0b36c7b914c1..7ebf99ee48f7 100644 --- a/ogr/ogrsf_frmts/parquet/ogr_parquet.h +++ b/ogr/ogrsf_frmts/parquet/ogr_parquet.h @@ -91,7 +91,7 @@ class OGRParquetLayer final : public OGRParquetLayerBase //! Iterator over m_asFeatureIdxRemapping std::vector<std::pair<int64_t, int64_t>>::iterator m_oFeatureIdxRemappingIter{}; - //! Feature index among the potentially restricted set of selected row gropus + //! Feature index among the potentially restricted set of selected row groups int64_t m_nFeatureIdxSelected = 0; std::vector<int> m_anRequestedParquetColumns{}; // only valid when // m_bIgnoredFields is set diff --git a/scripts/typos_allowlist.txt b/scripts/typos_allowlist.txt index 5426c423eb50..a2a7bf6390d7 100644 --- a/scripts/typos_allowlist.txt +++ b/scripts/typos_allowlist.txt @@ -305,3 +305,4 @@ either 2 or 4 comma separated values. The same rules apply for the source and de FAIL_REGEX "[Uu]nknown switch" # PGI * Esben Mose Hansen, Ange Optimization ApS SetLinearUnits("kilometre", 1000.0); + // F(ixed) S(ize) L(ist) of (x,y[,z][,m]) values / Interleaved layout diff --git a/swig/include/python/docs/osr_spatialreference_docs.i b/swig/include/python/docs/osr_spatialreference_docs.i index 3940e0ecd4c6..876e2e2d11c6 100644 --- a/swig/include/python/docs/osr_spatialreference_docs.i +++ b/swig/include/python/docs/osr_spatialreference_docs.i @@ -502,7 +502,7 @@ Examples %feature("docstring") GetSemiMajor " -Get spheroid semi major axis (in metres starting with GDAL 3.0) +Get spheroid semi major axis (in meters starting with GDAL 3.0) See :cpp:func:`OGRSpatialReference::GetSemiMajor`. From e18e14ce29892f0435dbd69ac78e1889d7620ca3 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 23:22:18 +0200 Subject: [PATCH 021/230] Rename CPLFreeReleaser to VSIFreeReleaser (only affects GDAL_COMPILATION) --- frmts/pdf/pdfobject.cpp | 4 ++-- gcore/gdalmultidim_gridded.cpp | 8 ++++---- .../flatgeobuf/ogrflatgeobuflayer.cpp | 2 +- port/cpl_string.h | 15 ++------------- port/cpl_vsi.h | 16 ++++++++++++++++ 5 files changed, 25 insertions(+), 20 deletions(-) diff --git a/frmts/pdf/pdfobject.cpp b/frmts/pdf/pdfobject.cpp index d054fe24da84..40a5cd76606b 100644 --- a/frmts/pdf/pdfobject.cpp +++ b/frmts/pdf/pdfobject.cpp @@ -2249,9 +2249,9 @@ class GDALPDFStreamPdfium : public GDALPDFStream private: RetainPtr<const CPDF_Stream> m_pStream; int64_t m_nSize = 0; - std::unique_ptr<uint8_t, CPLFreeReleaser> m_pData = nullptr; + std::unique_ptr<uint8_t, VSIFreeReleaser> m_pData = nullptr; int64_t m_nRawSize = 0; - std::unique_ptr<uint8_t, CPLFreeReleaser> m_pRawData = nullptr; + std::unique_ptr<uint8_t, VSIFreeReleaser> m_pRawData = nullptr; void Decompress(); void FillRaw(); diff --git a/gcore/gdalmultidim_gridded.cpp b/gcore/gdalmultidim_gridded.cpp index d57e7f7b562a..4a0741a903c7 100644 --- a/gcore/gdalmultidim_gridded.cpp +++ b/gcore/gdalmultidim_gridded.cpp @@ -52,7 +52,7 @@ class GDALMDArrayGridded final : public GDALPamMDArray std::shared_ptr<GDALMDArray> m_poVarY{}; std::unique_ptr<GDALDataset> m_poVectorDS{}; GDALGridAlgorithm m_eAlg; - std::unique_ptr<void, CPLFreeReleaser> m_poGridOptions; + std::unique_ptr<void, VSIFreeReleaser> m_poGridOptions; const GDALExtendedDataType m_dt; std::vector<GUInt64> m_anBlockSize{}; const double m_dfNoDataValue; @@ -71,7 +71,7 @@ class GDALMDArrayGridded final : public GDALPamMDArray const std::shared_ptr<GDALMDArray> &poVarX, const std::shared_ptr<GDALMDArray> &poVarY, std::unique_ptr<GDALDataset> &&poVectorDS, GDALGridAlgorithm eAlg, - std::unique_ptr<void, CPLFreeReleaser> &&poGridOptions, + std::unique_ptr<void, VSIFreeReleaser> &&poGridOptions, double dfNoDataValue, double dfMinX, double dfResX, double dfMinY, double dfResY, double dfRadius) : GDALAbstractMDArray(std::string(), @@ -106,7 +106,7 @@ class GDALMDArrayGridded final : public GDALPamMDArray const std::shared_ptr<GDALMDArray> &poVarX, const std::shared_ptr<GDALMDArray> &poVarY, std::unique_ptr<GDALDataset> &&poVectorDS, GDALGridAlgorithm eAlg, - std::unique_ptr<void, CPLFreeReleaser> &&poGridOptions, + std::unique_ptr<void, VSIFreeReleaser> &&poGridOptions, double dfNoDataValue, double dfMinX, double dfResX, double dfMinY, double dfResY, double dfRadius) { @@ -442,7 +442,7 @@ GDALMDArray::GetGridded(const std::string &osGridOptions, return nullptr; } - std::unique_ptr<void, CPLFreeReleaser> poGridOptions(pOptions); + std::unique_ptr<void, VSIFreeReleaser> poGridOptions(pOptions); if (GetDataType().GetClass() != GEDTC_NUMERIC) { diff --git a/ogr/ogrsf_frmts/flatgeobuf/ogrflatgeobuflayer.cpp b/ogr/ogrsf_frmts/flatgeobuf/ogrflatgeobuflayer.cpp index 08f9266cf056..1cabdd37294d 100644 --- a/ogr/ogrsf_frmts/flatgeobuf/ogrflatgeobuflayer.cpp +++ b/ogr/ogrsf_frmts/flatgeobuf/ogrflatgeobuflayer.cpp @@ -2535,7 +2535,7 @@ OGRFlatGeobufLayer *OGRFlatGeobufLayer::Open(const char *pszFilename, "Header size too large (> 10 MB)"); return nullptr; } - std::unique_ptr<GByte, CPLFreeReleaser> buf( + std::unique_ptr<GByte, VSIFreeReleaser> buf( static_cast<GByte *>(VSIMalloc(headerSize))); if (buf == nullptr) { diff --git a/port/cpl_string.h b/port/cpl_string.h index f666bab9ab2c..4f06433ad0d6 100644 --- a/port/cpl_string.h +++ b/port/cpl_string.h @@ -653,20 +653,9 @@ extern "C++" /** Unique pointer type to use with CSL functions returning a char** */ using CSLUniquePtr = std::unique_ptr<char *, CSLDestroyReleaser>; - /*! @cond Doxygen_Suppress */ - struct CPL_DLL CPLFreeReleaser - { - void operator()(void *p) const - { - CPLFree(p); - } - }; - - /*! @endcond */ - /** Unique pointer type to use with functions returning a char* to release - * with CPLFree */ - using CPLCharUniquePtr = std::unique_ptr<char, CPLFreeReleaser>; + * with VSIFree */ + using CPLCharUniquePtr = std::unique_ptr<char, VSIFreeReleaser>; namespace cpl { diff --git a/port/cpl_vsi.h b/port/cpl_vsi.h index ecf08a30a2ad..5b08588180ce 100644 --- a/port/cpl_vsi.h +++ b/port/cpl_vsi.h @@ -292,6 +292,22 @@ void CPL_DLL VSIFree(void *); void CPL_DLL *VSIRealloc(void *, size_t) CPL_WARN_UNUSED_RESULT; char CPL_DLL *VSIStrdup(const char *) CPL_WARN_UNUSED_RESULT; +#if defined(__cplusplus) && defined(GDAL_COMPILATION) +extern "C++" +{ + /*! @cond Doxygen_Suppress */ + struct CPL_DLL VSIFreeReleaser + { + void operator()(void *p) const + { + VSIFree(p); + } + }; + + /*! @endcond */ +} +#endif + void CPL_DLL *VSIMallocAligned(size_t nAlignment, size_t nSize) CPL_WARN_UNUSED_RESULT; void CPL_DLL *VSIMallocAlignedAuto(size_t nSize) CPL_WARN_UNUSED_RESULT; From b4338a68173343075cbf0c33471db750838fa588 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 23:22:58 +0200 Subject: [PATCH 022/230] Add std::string OGRSpatialReference::exportToWkt(const char* const* papszOptions = nullptr) const --- ogr/ogr_spatialref.h | 1 + ogr/ogrspatialreference.cpp | 55 +++++++++++++++++++++++++++++++++++-- 2 files changed, 54 insertions(+), 2 deletions(-) diff --git a/ogr/ogr_spatialref.h b/ogr/ogr_spatialref.h index 4d6f5de582aa..8753e2bdecac 100644 --- a/ogr/ogr_spatialref.h +++ b/ogr/ogr_spatialref.h @@ -207,6 +207,7 @@ class CPL_DLL OGRSpatialReference void dumpReadable(); OGRErr exportToWkt(char **) const; OGRErr exportToWkt(char **ppszWKT, const char *const *papszOptions) const; + std::string exportToWkt(const char *const *papszOptions = nullptr) const; OGRErr exportToPrettyWkt(char **, int = FALSE) const; // cppcheck-suppress functionStatic OGRErr exportToPROJJSON(char **, const char *const *papszOptions) const; diff --git a/ogr/ogrspatialreference.cpp b/ogr/ogrspatialreference.cpp index 8121d79687e0..b2d4c94d5364 100644 --- a/ogr/ogrspatialreference.cpp +++ b/ogr/ogrspatialreference.cpp @@ -1588,13 +1588,13 @@ static PJ *GDAL_proj_crs_create_bound_crs_to_WGS84(PJ_CONTEXT *ctx, PJ *pj, * node is returned. * WKT1 is an alias of WKT1_GDAL. * WKT2 will default to the latest revision implemented (currently - * WKT2_2018) WKT2_2019 can be used as an alias of WKT2_2018 since GDAL 3.2 + * WKT2_2018) WKT2_2019 can be used as an alias of WKT2_2018 since GDAL 3.2 + * </li> * <li>ALLOW_ELLIPSOIDAL_HEIGHT_AS_VERTICAL_CRS=YES/NO. Default is NO. If set * to YES and FORMAT=WKT1_GDAL, a Geographic 3D CRS or a Projected 3D CRS will * be exported as a compound CRS whose vertical part represents an ellipsoidal * height (for example for use with LAS 1.4 WKT1). * Requires PROJ 7.2.1 and GDAL 3.2.1.</li> - * </li> * </ul> * * Starting with GDAL 3.0.3, if the OSR_ADD_TOWGS84_ON_EXPORT_TO_WKT1 @@ -1755,6 +1755,57 @@ OGRErr OGRSpatialReference::exportToWkt(char **ppszResult, return OGRERR_NONE; } +/************************************************************************/ +/* exportToWkt() */ +/************************************************************************/ + +/** + * Convert this SRS into a WKT string. + * + * Consult also the <a href="wktproblems.html">OGC WKT Coordinate System + * Issues</a> page for implementation details of WKT 1 in OGR. + * + * @param papszOptions NULL terminated list of options, or NULL. Currently + * supported options are + * <ul> + * <li>MULTILINE=YES/NO. Defaults to NO.</li> + * <li>FORMAT=SFSQL/WKT1_SIMPLE/WKT1/WKT1_GDAL/WKT1_ESRI/WKT2_2015/WKT2_2018/WKT2/DEFAULT. + * If SFSQL, a WKT1 string without AXIS, TOWGS84, AUTHORITY or EXTENSION + * node is returned. + * If WKT1_SIMPLE, a WKT1 string without AXIS, AUTHORITY or EXTENSION + * node is returned. + * WKT1 is an alias of WKT1_GDAL. + * WKT2 will default to the latest revision implemented (currently + * WKT2_2019) + * </li> + * <li>ALLOW_ELLIPSOIDAL_HEIGHT_AS_VERTICAL_CRS=YES/NO. Default is NO. If set + * to YES and FORMAT=WKT1_GDAL, a Geographic 3D CRS or a Projected 3D CRS will + * be exported as a compound CRS whose vertical part represents an ellipsoidal + * height (for example for use with LAS 1.4 WKT1). + * Requires PROJ 7.2.1.</li> + * </ul> + * + * If the OSR_ADD_TOWGS84_ON_EXPORT_TO_WKT1 + * configuration option is set to YES, when exporting to WKT1_GDAL, this method + * will try to add a TOWGS84[] node, if there's none attached yet to the SRS and + * if the SRS has a EPSG code. See the AddGuessedTOWGS84() method for how this + * TOWGS84[] node may be added. + * + * @return a non-empty string if successful. + * @since GDAL 3.9 + */ + +std::string +OGRSpatialReference::exportToWkt(const char *const *papszOptions) const +{ + std::string osWKT; + char *pszWKT = nullptr; + if (exportToWkt(&pszWKT, papszOptions) == OGRERR_NONE) + osWKT = pszWKT; + CPLFree(pszWKT); + return osWKT; +} + /************************************************************************/ /* OSRExportToWkt() */ /************************************************************************/ From 260f16440387a9e6d4c8ae466b1c434f6e664ae5 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 23:31:45 +0200 Subject: [PATCH 023/230] Add GDALScaledProgressReleaser struct --- port/cpl_progress.h | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/port/cpl_progress.h b/port/cpl_progress.h index 2a591e1e2b3b..6bbe6f32bb6d 100644 --- a/port/cpl_progress.h +++ b/port/cpl_progress.h @@ -46,4 +46,20 @@ void CPL_DLL *CPL_STDCALL GDALCreateScaledProgress(double, double, void CPL_DLL CPL_STDCALL GDALDestroyScaledProgress(void *); CPL_C_END +#if defined(__cplusplus) && defined(GDAL_COMPILATION) +extern "C++" +{ + /*! @cond Doxygen_Suppress */ + struct CPL_DLL GDALScaledProgressReleaser + { + void operator()(void *p) const + { + GDALDestroyScaledProgress(p); + } + }; + + /*! @endcond */ +} +#endif + #endif /* ndef CPL_PROGRESS_H_INCLUDED */ From b11871d240402c731668894acb5f816040fc23ea Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 23:23:12 +0200 Subject: [PATCH 024/230] C++'ify gdal_grid_lib.cpp --- apps/gdal_grid_lib.cpp | 488 +++++++++++++++++------------------------ 1 file changed, 203 insertions(+), 285 deletions(-) diff --git a/apps/gdal_grid_lib.cpp b/apps/gdal_grid_lib.cpp index b247b89eb45d..1cb8c676db7f 100644 --- a/apps/gdal_grid_lib.cpp +++ b/apps/gdal_grid_lib.cpp @@ -66,47 +66,55 @@ struct GDALGridOptions { /*! output format. Use the short format name. */ - char *pszFormat; + std::string osFormat{}; /*! allow or suppress progress monitor and other non-error output */ - bool bQuiet; + bool bQuiet = true; /*! the progress function to use */ - GDALProgressFunc pfnProgress; + GDALProgressFunc pfnProgress = GDALDummyProgress; /*! pointer to the progress data variable */ - void *pProgressData; - - char **papszLayers; - char *pszBurnAttribute; - double dfIncreaseBurnValue; - double dfMultiplyBurnValue; - char *pszWHERE; - char *pszSQL; - GDALDataType eOutputType; - char **papszCreateOptions; - int nXSize; - int nYSize; - double dfXRes; - double dfYRes; - double dfXMin; - double dfXMax; - double dfYMin; - double dfYMax; - bool bIsXExtentSet; - bool bIsYExtentSet; - GDALGridAlgorithm eAlgorithm; - void *pOptions; - char *pszOutputSRS; - OGRGeometry *poSpatialFilter; - bool bClipSrc; - OGRGeometry *poClipSrc; - char *pszClipSrcDS; - char *pszClipSrcSQL; - char *pszClipSrcLayer; - char *pszClipSrcWhere; - bool bNoDataSet; - double dfNoDataValue; + void *pProgressData = nullptr; + + CPLStringList aosLayers{}; + std::string osBurnAttribute{}; + double dfIncreaseBurnValue = 0.0; + double dfMultiplyBurnValue = 1.0; + std::string osWHERE{}; + std::string osSQL{}; + GDALDataType eOutputType = GDT_Float64; + CPLStringList aosCreateOptions{}; + int nXSize = 0; + int nYSize = 0; + double dfXRes = 0; + double dfYRes = 0; + double dfXMin = 0; + double dfXMax = 0; + double dfYMin = 0; + double dfYMax = 0; + bool bIsXExtentSet = false; + bool bIsYExtentSet = false; + GDALGridAlgorithm eAlgorithm = GGA_InverseDistanceToAPower; + std::unique_ptr<void, VSIFreeReleaser> pOptions{}; + std::string osOutputSRS{}; + std::unique_ptr<OGRGeometry> poSpatialFilter{}; + bool bClipSrc = false; + std::unique_ptr<OGRGeometry> poClipSrc{}; + std::string osClipSrcDS{}; + std::string osClipSrcSQL{}; + std::string osClipSrcLayer{}; + std::string osClipSrcWhere{}; + bool bNoDataSet = false; + double dfNoDataValue = 0; + + GDALGridOptions() + { + void *l_pOptions = nullptr; + GDALGridParseAlgorithmAndOptions(szAlgNameInvDist, &eAlgorithm, + &l_pOptions); + pOptions.reset(l_pOptions); + } }; /************************************************************************/ @@ -304,11 +312,11 @@ class GDALGridGeometryVisitor final : public OGRDefaultConstGeometryVisitor /* geometries and burn values. */ /************************************************************************/ -static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, +static CPLErr ProcessLayer(OGRLayer *poSrcLayer, GDALDataset *poDstDS, const OGRGeometry *poClipSrc, int nXSize, int nYSize, int nBand, bool &bIsXExtentSet, bool &bIsYExtentSet, double &dfXMin, double &dfXMax, double &dfYMin, - double &dfYMax, const char *pszBurnAttribute, + double &dfYMax, const std::string &osBurnAttribute, const double dfIncreaseBurnValue, const double dfMultiplyBurnValue, GDALDataType eType, GDALGridAlgorithm eAlgorithm, void *pOptions, @@ -321,15 +329,14 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, /* -------------------------------------------------------------------- */ int iBurnField = -1; - if (pszBurnAttribute) + if (!osBurnAttribute.empty()) { - iBurnField = OGR_FD_GetFieldIndex(OGR_L_GetLayerDefn(hSrcLayer), - pszBurnAttribute); + iBurnField = + poSrcLayer->GetLayerDefn()->GetFieldIndex(osBurnAttribute.c_str()); if (iBurnField == -1) { printf("Failed to find field %s on layer %s, skipping.\n", - pszBurnAttribute, - OGR_FD_GetName(OGR_L_GetLayerDefn(hSrcLayer))); + osBurnAttribute.c_str(), poSrcLayer->GetName()); return CE_Failure; } } @@ -344,7 +351,7 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, oVisitor.dfIncreaseBurnValue = dfIncreaseBurnValue; oVisitor.dfMultiplyBurnValue = dfMultiplyBurnValue; - for (auto &&poFeat : OGRLayer::FromHandle(hSrcLayer)) + for (auto &&poFeat : poSrcLayer) { const OGRGeometry *poGeom = poFeat->GetGeometryRef(); if (poGeom) @@ -365,7 +372,7 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, if (oVisitor.adfX.empty()) { printf("No point geometry found on layer %s, skipping.\n", - OGR_FD_GetName(OGR_L_GetLayerDefn(hSrcLayer))); + poSrcLayer->GetName()); return CE_None; } @@ -375,7 +382,10 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, if (!bIsXExtentSet || !bIsYExtentSet) { OGREnvelope sEnvelope; - OGR_L_GetExtent(hSrcLayer, &sEnvelope, TRUE); + if (poSrcLayer->GetExtent(&sEnvelope, TRUE) == OGRERR_FAILURE) + { + return CE_Failure; + } if (!bIsXExtentSet) { @@ -416,14 +426,14 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, printf("\n"); } - GDALRasterBandH hBand = GDALGetRasterBand(hDstDS, nBand); + GDALRasterBand *poBand = poDstDS->GetRasterBand(nBand); int nBlockXSize = 0; int nBlockYSize = 0; const int nDataTypeSize = GDALGetDataTypeSizeBytes(eType); // Try to grow the work buffer up to 16 MB if it is smaller - GDALGetBlockSize(hBand, &nBlockXSize, &nBlockYSize); + poBand->GetBlockSize(&nBlockXSize, &nBlockYSize); if (nXSize == 0 || nYSize == 0 || nBlockXSize == 0 || nBlockYSize == 0) return CE_Failure; @@ -448,8 +458,9 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, } CPLDebug("GDAL_GRID", "Work buffer: %d * %d", nBlockXSize, nBlockYSize); - void *pData = VSIMalloc3(nBlockXSize, nBlockYSize, nDataTypeSize); - if (pData == nullptr) + std::unique_ptr<void, VSIFreeReleaser> pData( + VSIMalloc3(nBlockXSize, nBlockYSize, nDataTypeSize)); + if (!pData) { CPLError(CE_Failure, CPLE_OutOfMemory, "Cannot allocate work buffer"); return CE_Failure; @@ -460,12 +471,21 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, static_cast<double>(DIV_ROUND_UP(nXSize, nBlockXSize)) * DIV_ROUND_UP(nYSize, nBlockYSize); - GDALGridContext *psContext = GDALGridContextCreate( - eAlgorithm, pOptions, static_cast<int>(oVisitor.adfX.size()), - &(oVisitor.adfX[0]), &(oVisitor.adfY[0]), &(oVisitor.adfZ[0]), TRUE); - if (psContext == nullptr) + struct GDALGridContextReleaser + { + void operator()(GDALGridContext *psContext) + { + GDALGridContextFree(psContext); + } + }; + + std::unique_ptr<GDALGridContext, GDALGridContextReleaser> psContext( + GDALGridContextCreate(eAlgorithm, pOptions, + static_cast<int>(oVisitor.adfX.size()), + &(oVisitor.adfX[0]), &(oVisitor.adfY[0]), + &(oVisitor.adfZ[0]), TRUE)); + if (!psContext) { - CPLFree(pData); return CE_Failure; } @@ -476,10 +496,11 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, for (int nXOffset = 0; nXOffset < nXSize && eErr == CE_None; nXOffset += nBlockXSize) { - void *pScaledProgress = GDALCreateScaledProgress( - static_cast<double>(nBlock) / dfBlockCount, - static_cast<double>(nBlock + 1) / dfBlockCount, pfnProgress, - pProgressData); + std::unique_ptr<void, GDALScaledProgressReleaser> pScaledProgress( + GDALCreateScaledProgress( + static_cast<double>(nBlock) / dfBlockCount, + static_cast<double>(nBlock + 1) / dfBlockCount, pfnProgress, + pProgressData)); nBlock++; int nXRequest = nBlockXSize; @@ -491,26 +512,22 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, nYRequest = nYSize - nYOffset; eErr = GDALGridContextProcess( - psContext, dfXMin + dfDeltaX * nXOffset, + psContext.get(), dfXMin + dfDeltaX * nXOffset, dfXMin + dfDeltaX * (nXOffset + nXRequest), dfYMin + dfDeltaY * nYOffset, dfYMin + dfDeltaY * (nYOffset + nYRequest), nXRequest, - nYRequest, eType, pData, GDALScaledProgress, pScaledProgress); + nYRequest, eType, pData.get(), GDALScaledProgress, + pScaledProgress.get()); if (eErr == CE_None) - eErr = GDALRasterIO(hBand, GF_Write, nXOffset, nYOffset, - nXRequest, nYRequest, pData, nXRequest, - nYRequest, eType, 0, 0); - - GDALDestroyScaledProgress(pScaledProgress); + eErr = poBand->RasterIO(GF_Write, nXOffset, nYOffset, nXRequest, + nYRequest, pData.get(), nXRequest, + nYRequest, eType, 0, 0, nullptr); } } if (eErr == CE_None && pfnProgress) pfnProgress(1.0, "", pProgressData); - GDALGridContextFree(psContext); - - CPLFree(pData); return eErr; } @@ -521,21 +538,21 @@ static CPLErr ProcessLayer(OGRLayerH hSrcLayer, GDALDatasetH hDstDS, /* returns a collection of read geometries. */ /************************************************************************/ -static OGRGeometryCollection *LoadGeometry(const char *pszDS, - const char *pszSQL, - const char *pszLyr, - const char *pszWhere) +static std::unique_ptr<OGRGeometry> LoadGeometry(const std::string &osDS, + const std::string &osSQL, + const std::string &osLyr, + const std::string &osWhere) { - GDALDataset *poDS = static_cast<GDALDataset *>( - GDALOpenEx(pszDS, GDAL_OF_VECTOR, nullptr, nullptr, nullptr)); - if (poDS == nullptr) + auto poDS = std::unique_ptr<GDALDataset>(GDALDataset::Open( + osDS.c_str(), GDAL_OF_VECTOR, nullptr, nullptr, nullptr)); + if (!poDS) return nullptr; OGRLayer *poLyr = nullptr; - if (pszSQL != nullptr) - poLyr = poDS->ExecuteSQL(pszSQL, nullptr, nullptr); - else if (pszLyr != nullptr) - poLyr = poDS->GetLayerByName(pszLyr); + if (!osSQL.empty()) + poLyr = poDS->ExecuteSQL(osSQL.c_str(), nullptr, nullptr); + else if (!osLyr.empty()) + poLyr = poDS->GetLayerByName(osLyr.c_str()); else poLyr = poDS->GetLayer(0); @@ -543,24 +560,23 @@ static OGRGeometryCollection *LoadGeometry(const char *pszDS, { CPLError(CE_Failure, CPLE_AppDefined, "Failed to identify source layer from datasource."); - GDALClose(poDS); return nullptr; } - if (pszWhere) - poLyr->SetAttributeFilter(pszWhere); + if (!osWhere.empty()) + poLyr->SetAttributeFilter(osWhere.c_str()); - OGRGeometryCollection *poGeom = nullptr; + std::unique_ptr<OGRGeometryCollection> poGeom; for (auto &poFeat : poLyr) { - OGRGeometry *poSrcGeom = poFeat->GetGeometryRef(); + const OGRGeometry *poSrcGeom = poFeat->GetGeometryRef(); if (poSrcGeom) { const OGRwkbGeometryType eType = wkbFlatten(poSrcGeom->getGeometryType()); - if (poGeom == nullptr) - poGeom = new OGRMultiPolygon(); + if (!poGeom) + poGeom = std::make_unique<OGRMultiPolygon>(); if (eType == wkbPolygon) { @@ -568,32 +584,28 @@ static OGRGeometryCollection *LoadGeometry(const char *pszDS, } else if (eType == wkbMultiPolygon) { - const int nGeomCount = static_cast<OGRMultiPolygon *>(poSrcGeom) - ->getNumGeometries(); + const int nGeomCount = + poSrcGeom->toMultiPolygon()->getNumGeometries(); for (int iGeom = 0; iGeom < nGeomCount; iGeom++) { poGeom->addGeometry( - static_cast<OGRMultiPolygon *>(poSrcGeom) - ->getGeometryRef(iGeom)); + poSrcGeom->toMultiPolygon()->getGeometryRef(iGeom)); } } else { CPLError(CE_Failure, CPLE_AppDefined, "Geometry not of polygon type."); - OGRGeometryFactory::destroyGeometry(poGeom); - if (pszSQL != nullptr) + if (!osSQL.empty()) poDS->ReleaseResultSet(poLyr); - GDALClose(poDS); return nullptr; } } } - if (pszSQL != nullptr) + if (!osSQL.empty()) poDS->ReleaseResultSet(poLyr); - GDALClose(poDS); return poGeom; } @@ -646,17 +658,17 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, return nullptr; } - GDALGridOptions *psOptionsToFree = nullptr; + std::unique_ptr<GDALGridOptions> psOptionsToFree; const GDALGridOptions *psOptions = psOptionsIn; if (psOptions == nullptr) { - psOptionsToFree = GDALGridOptionsNew(nullptr, nullptr); - psOptions = psOptionsToFree; + psOptionsToFree = std::make_unique<GDALGridOptions>(); + psOptions = psOptionsToFree.get(); } - GDALDataset *poSrcDS = static_cast<GDALDataset *>(hSrcDataset); + GDALDataset *poSrcDS = GDALDataset::FromHandle(hSrcDataset); - if (psOptions->pszSQL == nullptr && psOptions->papszLayers == nullptr && + if (psOptions->osSQL.empty() && psOptions->aosLayers.empty() && poSrcDS->GetLayerCount() != 1) { CPLError(CE_Failure, CPLE_NotSupported, @@ -664,7 +676,6 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, "has not one single layer."); if (pbUsageError) *pbUsageError = TRUE; - GDALGridOptionsFree(psOptionsToFree); return nullptr; } @@ -673,29 +684,27 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, { CPLError(CE_Failure, CPLE_IllegalArg, "-outsize and -tr options cannot be used at the same time."); - GDALGridOptionsFree(psOptionsToFree); return nullptr; } /* -------------------------------------------------------------------- */ /* Find the output driver. */ /* -------------------------------------------------------------------- */ - CPLString osFormat; - if (psOptions->pszFormat == nullptr) + std::string osFormat; + if (psOptions->osFormat.empty()) { osFormat = GetOutputDriverForRaster(pszDest); if (osFormat.empty()) { - GDALGridOptionsFree(psOptionsToFree); return nullptr; } } else { - osFormat = psOptions->pszFormat; + osFormat = psOptions->osFormat; } - GDALDriverH hDriver = GDALGetDriverByName(osFormat); + GDALDriverH hDriver = GDALGetDriverByName(osFormat.c_str()); if (hDriver == nullptr) { CPLError(CE_Failure, CPLE_AppDefined, @@ -718,20 +727,19 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, } } printf("\n"); - GDALGridOptionsFree(psOptionsToFree); return nullptr; } /* -------------------------------------------------------------------- */ /* Create target raster file. */ /* -------------------------------------------------------------------- */ - int nLayerCount = CSLCount(psOptions->papszLayers); - if (nLayerCount == 0 && psOptions->pszSQL == nullptr) + int nLayerCount = psOptions->aosLayers.size(); + if (nLayerCount == 0 && psOptions->osSQL.empty()) nLayerCount = 1; /* due to above check */ int nBands = nLayerCount; - if (psOptions->pszSQL) + if (!psOptions->osSQL.empty()) nBands++; int nXSize; @@ -747,7 +755,6 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, if (pbUsageError) *pbUsageError = TRUE; - GDALGridOptionsFree(psOptionsToFree); return nullptr; } @@ -772,7 +779,6 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, if (pbUsageError) *pbUsageError = TRUE; - GDALGridOptionsFree(psOptionsToFree); return nullptr; } } @@ -787,12 +793,11 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, nYSize = 256; } - GDALDatasetH hDstDS = - GDALCreate(hDriver, pszDest, nXSize, nYSize, nBands, - psOptions->eOutputType, psOptions->papszCreateOptions); - if (hDstDS == nullptr) + std::unique_ptr<GDALDataset> poDstDS(GDALDataset::FromHandle(GDALCreate( + hDriver, pszDest, nXSize, nYSize, nBands, psOptions->eOutputType, + psOptions->aosCreateOptions.List()))); + if (!poDstDS) { - GDALGridOptionsFree(psOptionsToFree); return nullptr; } @@ -800,8 +805,7 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, { for (int i = 1; i <= nBands; i++) { - GDALRasterBandH hBand = GDALGetRasterBand(hDstDS, i); - GDALSetRasterNoDataValue(hBand, psOptions->dfNoDataValue); + poDstDS->GetRasterBand(i)->SetNoDataValue(psOptions->dfNoDataValue); } } @@ -817,26 +821,24 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, /* Process SQL request. */ /* -------------------------------------------------------------------- */ - if (psOptions->pszSQL != nullptr) + if (!psOptions->osSQL.empty()) { - OGRLayer *poLayer = poSrcDS->ExecuteSQL( - psOptions->pszSQL, psOptions->poSpatialFilter, nullptr); + OGRLayer *poLayer = + poSrcDS->ExecuteSQL(psOptions->osSQL.c_str(), + psOptions->poSpatialFilter.get(), nullptr); if (poLayer == nullptr) { - GDALGridOptionsFree(psOptionsToFree); - GDALClose(hDstDS); return nullptr; } // Custom layer will be rasterized in the first band. eErr = ProcessLayer( - OGRLayer::ToHandle(poLayer), hDstDS, psOptions->poSpatialFilter, - nXSize, nYSize, 1, bIsXExtentSet, bIsYExtentSet, dfXMin, dfXMax, - dfYMin, dfYMax, psOptions->pszBurnAttribute, - psOptions->dfIncreaseBurnValue, psOptions->dfMultiplyBurnValue, - psOptions->eOutputType, psOptions->eAlgorithm, psOptions->pOptions, - psOptions->bQuiet, psOptions->pfnProgress, - psOptions->pProgressData); + poLayer, poDstDS.get(), psOptions->poSpatialFilter.get(), nXSize, + nYSize, 1, bIsXExtentSet, bIsYExtentSet, dfXMin, dfXMax, dfYMin, + dfYMax, psOptions->osBurnAttribute, psOptions->dfIncreaseBurnValue, + psOptions->dfMultiplyBurnValue, psOptions->eOutputType, + psOptions->eAlgorithm, psOptions->pOptions.get(), psOptions->bQuiet, + psOptions->pfnProgress, psOptions->pProgressData); poSrcDS->ReleaseResultSet(poLayer); } @@ -844,28 +846,26 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, /* -------------------------------------------------------------------- */ /* Process each layer. */ /* -------------------------------------------------------------------- */ - char *pszOutputSRS = - psOptions->pszOutputSRS ? CPLStrdup(psOptions->pszOutputSRS) : nullptr; + std::string osOutputSRS(psOptions->osOutputSRS); for (int i = 0; i < nLayerCount; i++) { - OGRLayerH hLayer = psOptions->papszLayers == nullptr - ? GDALDatasetGetLayer(hSrcDataset, 0) - : GDALDatasetGetLayerByName( - hSrcDataset, psOptions->papszLayers[i]); - if (hLayer == nullptr) + auto poLayer = psOptions->aosLayers.empty() + ? poSrcDS->GetLayer(0) + : poSrcDS->GetLayerByName(psOptions->aosLayers[i]); + if (!poLayer) { CPLError(CE_Failure, CPLE_AppDefined, "Unable to find layer \"%s\".", - psOptions->papszLayers && psOptions->papszLayers[i] - ? psOptions->papszLayers[i] + !psOptions->aosLayers.empty() && psOptions->aosLayers[i] + ? psOptions->aosLayers[i] : "null"); eErr = CE_Failure; break; } - if (psOptions->pszWHERE) + if (!psOptions->osWHERE.empty()) { - if (OGR_L_SetAttributeFilter(hLayer, psOptions->pszWHERE) != + if (poLayer->SetAttributeFilter(psOptions->osWHERE.c_str()) != OGRERR_NONE) { eErr = CE_Failure; @@ -873,26 +873,25 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, } } - if (psOptions->poSpatialFilter != nullptr) - OGR_L_SetSpatialFilter( - hLayer, OGRGeometry::ToHandle(psOptions->poSpatialFilter)); + if (psOptions->poSpatialFilter) + poLayer->SetSpatialFilter(psOptions->poSpatialFilter.get()); // Fetch the first meaningful SRS definition - if (!pszOutputSRS) + if (osOutputSRS.empty()) { - OGRSpatialReferenceH hSRS = OGR_L_GetSpatialRef(hLayer); - if (hSRS) - OSRExportToWkt(hSRS, &pszOutputSRS); + auto poSRS = poLayer->GetSpatialRef(); + if (poSRS) + osOutputSRS = poSRS->exportToWkt(); } eErr = ProcessLayer( - hLayer, hDstDS, psOptions->poSpatialFilter, nXSize, nYSize, - i + 1 + nBands - nLayerCount, bIsXExtentSet, bIsYExtentSet, dfXMin, - dfXMax, dfYMin, dfYMax, psOptions->pszBurnAttribute, + poLayer, poDstDS.get(), psOptions->poSpatialFilter.get(), nXSize, + nYSize, i + 1 + nBands - nLayerCount, bIsXExtentSet, bIsYExtentSet, + dfXMin, dfXMax, dfYMin, dfYMax, psOptions->osBurnAttribute, psOptions->dfIncreaseBurnValue, psOptions->dfMultiplyBurnValue, - psOptions->eOutputType, psOptions->eAlgorithm, psOptions->pOptions, - psOptions->bQuiet, psOptions->pfnProgress, - psOptions->pProgressData); + psOptions->eOutputType, psOptions->eAlgorithm, + psOptions->pOptions.get(), psOptions->bQuiet, + psOptions->pfnProgress, psOptions->pProgressData); if (eErr != CE_None) break; } @@ -903,29 +902,26 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, double adfGeoTransform[6] = {dfXMin, (dfXMax - dfXMin) / nXSize, 0.0, dfYMin, 0.0, (dfYMax - dfYMin) / nYSize}; - GDALSetGeoTransform(hDstDS, adfGeoTransform); + poDstDS->SetGeoTransform(adfGeoTransform); /* -------------------------------------------------------------------- */ /* Apply SRS definition if set. */ /* -------------------------------------------------------------------- */ - if (pszOutputSRS) + if (!osOutputSRS.empty()) { - GDALSetProjection(hDstDS, pszOutputSRS); - CPLFree(pszOutputSRS); + poDstDS->SetProjection(osOutputSRS.c_str()); } /* -------------------------------------------------------------------- */ /* End */ /* -------------------------------------------------------------------- */ - GDALGridOptionsFree(psOptionsToFree); if (eErr != CE_None) { - GDALClose(hDstDS); return nullptr; } - return hDstDS; + return GDALDataset::ToHandle(poDstDS.release()); } /************************************************************************/ @@ -965,46 +961,7 @@ GDALGridOptions * GDALGridOptionsNew(char **papszArgv, GDALGridOptionsForBinary *psOptionsForBinary) { - GDALGridOptions *psOptions = - static_cast<GDALGridOptions *>(CPLCalloc(1, sizeof(GDALGridOptions))); - - psOptions->pszFormat = nullptr; - psOptions->bQuiet = true; - psOptions->pfnProgress = GDALDummyProgress; - psOptions->pProgressData = nullptr; - psOptions->papszLayers = nullptr; - psOptions->pszBurnAttribute = nullptr; - psOptions->dfIncreaseBurnValue = 0.0; - psOptions->dfMultiplyBurnValue = 1.0; - psOptions->pszWHERE = nullptr; - psOptions->pszSQL = nullptr; - psOptions->eOutputType = GDT_Float64; - psOptions->papszCreateOptions = nullptr; - psOptions->nXSize = 0; - psOptions->nYSize = 0; - psOptions->dfXRes = 0; - psOptions->dfYRes = 0; - psOptions->dfXMin = 0.0; - psOptions->dfXMax = 0.0; - psOptions->dfYMin = 0.0; - psOptions->dfYMax = 0.0; - psOptions->bIsXExtentSet = false; - psOptions->bIsYExtentSet = false; - psOptions->eAlgorithm = GGA_InverseDistanceToAPower; - psOptions->pOptions = nullptr; - psOptions->pszOutputSRS = nullptr; - psOptions->poSpatialFilter = nullptr; - psOptions->poClipSrc = nullptr; - psOptions->bClipSrc = false; - psOptions->pszClipSrcDS = nullptr; - psOptions->pszClipSrcSQL = nullptr; - psOptions->pszClipSrcLayer = nullptr; - psOptions->pszClipSrcWhere = nullptr; - psOptions->bNoDataSet = false; - psOptions->dfNoDataValue = 0; - - GDALGridParseAlgorithmAndOptions(szAlgNameInvDist, &psOptions->eAlgorithm, - &psOptions->pOptions); + auto psOptions = std::make_unique<GDALGridOptions>(); bool bGotSourceFilename = false; bool bGotDestFilename = false; @@ -1019,8 +976,7 @@ GDALGridOptionsNew(char **papszArgv, (EQUAL(papszArgv[i], "-of") || EQUAL(papszArgv[i], "-f"))) { ++i; - CPLFree(psOptions->pszFormat); - psOptions->pszFormat = CPLStrdup(papszArgv[i]); + psOptions->osFormat = papszArgv[i]; } else if (EQUAL(papszArgv[i], "-q") || EQUAL(papszArgv[i], "-quiet")) @@ -1054,7 +1010,6 @@ GDALGridOptionsNew(char **papszArgv, { CPLError(CE_Failure, CPLE_NotSupported, "Unknown output pixel type: %s.", papszArgv[i + 1]); - GDALGridOptionsFree(psOptions); return nullptr; } i++; @@ -1091,21 +1046,18 @@ GDALGridOptionsNew(char **papszArgv, { CPLError(CE_Failure, CPLE_IllegalArg, "Wrong value for -tr parameters."); - GDALGridOptionsFree(psOptions); return nullptr; } } else if (i + 1 < argc && EQUAL(papszArgv[i], "-co")) { - psOptions->papszCreateOptions = - CSLAddString(psOptions->papszCreateOptions, papszArgv[++i]); + psOptions->aosCreateOptions.AddString(papszArgv[++i]); } else if (i + 1 < argc && EQUAL(papszArgv[i], "-zfield")) { - CPLFree(psOptions->pszBurnAttribute); - psOptions->pszBurnAttribute = CPLStrdup(papszArgv[++i]); + psOptions->osBurnAttribute = papszArgv[++i]; } else if (i + 1 < argc && EQUAL(papszArgv[i], "-z_increase")) @@ -1120,20 +1072,17 @@ GDALGridOptionsNew(char **papszArgv, else if (i + 1 < argc && EQUAL(papszArgv[i], "-where")) { - CPLFree(psOptions->pszWHERE); - psOptions->pszWHERE = CPLStrdup(papszArgv[++i]); + psOptions->osWHERE = papszArgv[++i]; } else if (i + 1 < argc && EQUAL(papszArgv[i], "-l")) { - psOptions->papszLayers = - CSLAddString(psOptions->papszLayers, papszArgv[++i]); + psOptions->aosLayers.AddString(papszArgv[++i]); } else if (i + 1 < argc && EQUAL(papszArgv[i], "-sql")) { - CPLFree(psOptions->pszSQL); - psOptions->pszSQL = CPLStrdup(papszArgv[++i]); + psOptions->osSQL = papszArgv[++i]; } else if (i + 4 < argc && EQUAL(papszArgv[i], "-spat")) @@ -1151,10 +1100,9 @@ GDALGridOptionsNew(char **papszArgv, oRing.addPoint(CPLAtof(papszArgv[i + 1]), CPLAtof(papszArgv[i + 2])); - delete psOptions->poSpatialFilter; - OGRPolygon *poPoly = new OGRPolygon(); + auto poPoly = std::make_unique<OGRPolygon>(); poPoly->addRing(&oRing); - psOptions->poSpatialFilter = poPoly; + psOptions->poSpatialFilter = std::move(poPoly); i += 4; } @@ -1164,7 +1112,6 @@ GDALGridOptionsNew(char **papszArgv, { CPLError(CE_Failure, CPLE_IllegalArg, "%s option requires 1 or 4 arguments", papszArgv[i]); - GDALGridOptionsFree(psOptions); return nullptr; } @@ -1186,28 +1133,26 @@ GDALGridOptionsNew(char **papszArgv, oRing.addPoint(CPLAtof(papszArgv[i + 1]), CPLAtof(papszArgv[i + 2])); - delete psOptions->poClipSrc; - OGRPolygon *poPoly = static_cast<OGRPolygon *>( - OGRGeometryFactory::createGeometry(wkbPolygon)); + auto poPoly = std::make_unique<OGRPolygon>(); poPoly->addRing(&oRing); - psOptions->poClipSrc = poPoly; + psOptions->poClipSrc = std::move(poPoly); i += 4; } else if ((STARTS_WITH_CI(papszArgv[i + 1], "POLYGON") || STARTS_WITH_CI(papszArgv[i + 1], "MULTIPOLYGON")) && VSIStatL(papszArgv[i + 1], &sStat) != 0) { - delete psOptions->poClipSrc; + OGRGeometry *poClipSrc = nullptr; OGRGeometryFactory::createFromWkt(papszArgv[i + 1], nullptr, - &psOptions->poClipSrc); - if (psOptions->poClipSrc == nullptr) + &poClipSrc); + if (!poClipSrc) { CPLError(CE_Failure, CPLE_IllegalArg, "Invalid geometry. Must be a valid POLYGON or " "MULTIPOLYGON WKT"); - GDALGridOptionsFree(psOptions); return nullptr; } + psOptions->poClipSrc.reset(poClipSrc); i++; } else if (EQUAL(papszArgv[i + 1], "spat_extent")) @@ -1216,27 +1161,23 @@ GDALGridOptionsNew(char **papszArgv, } else { - CPLFree(psOptions->pszClipSrcDS); - psOptions->pszClipSrcDS = CPLStrdup(papszArgv[i + 1]); + psOptions->osClipSrcDS = papszArgv[i + 1]; i++; } } else if (i + 1 < argc && EQUAL(papszArgv[i], "-clipsrcsql")) { - CPLFree(psOptions->pszClipSrcSQL); - psOptions->pszClipSrcSQL = CPLStrdup(papszArgv[i + 1]); + psOptions->osClipSrcSQL = papszArgv[i + 1]; i++; } else if (i + 1 < argc && EQUAL(papszArgv[i], "-clipsrclayer")) { - CPLFree(psOptions->pszClipSrcLayer); - psOptions->pszClipSrcLayer = CPLStrdup(papszArgv[i + 1]); + psOptions->osClipSrcLayer = papszArgv[i + 1]; i++; } else if (i + 1 < argc && EQUAL(papszArgv[i], "-clipsrcwhere")) { - CPLFree(psOptions->pszClipSrcWhere); - psOptions->pszClipSrcWhere = CPLStrdup(papszArgv[i + 1]); + psOptions->osClipSrcWhere = papszArgv[i + 1]; i++; } @@ -1249,38 +1190,38 @@ GDALGridOptionsNew(char **papszArgv, CPLError(CE_Failure, CPLE_AppDefined, "Failed to process SRS definition: %s", papszArgv[i + 1]); - GDALGridOptionsFree(psOptions); return nullptr; } - CPLFree(psOptions->pszOutputSRS); - oOutputSRS.exportToWkt(&(psOptions->pszOutputSRS)); + char *pszWKT = nullptr; + oOutputSRS.exportToWkt(&pszWKT); + if (pszWKT) + psOptions->osOutputSRS = pszWKT; + CPLFree(pszWKT); i++; } else if (i + 1 < argc && EQUAL(papszArgv[i], "-a")) { const char *pszAlgorithm = papszArgv[++i]; - CPLFree(psOptions->pOptions); + void *pOptions = nullptr; if (GDALGridParseAlgorithmAndOptions( - pszAlgorithm, &psOptions->eAlgorithm, - &psOptions->pOptions) != CE_None) + pszAlgorithm, &psOptions->eAlgorithm, &pOptions) != CE_None) { CPLError(CE_Failure, CPLE_AppDefined, "Failed to process algorithm name and parameters"); - GDALGridOptionsFree(psOptions); return nullptr; } + psOptions->pOptions.reset(pOptions); - char **papszParams = CSLTokenizeString2(pszAlgorithm, ":", FALSE); - const char *pszNoDataValue = - CSLFetchNameValue(papszParams, "nodata"); + const CPLStringList aosParams( + CSLTokenizeString2(pszAlgorithm, ":", FALSE)); + const char *pszNoDataValue = aosParams.FetchNameValue("nodata"); if (pszNoDataValue != nullptr) { psOptions->bNoDataSet = true; psOptions->dfNoDataValue = CPLAtofM(pszNoDataValue); } - CSLDestroy(papszParams); } else if (i + 1 < argc && EQUAL(papszArgv[i], "-oo")) { @@ -1299,7 +1240,6 @@ GDALGridOptionsNew(char **papszArgv, { CPLError(CE_Failure, CPLE_NotSupported, "Unknown option name '%s'", papszArgv[i]); - GDALGridOptionsFree(psOptions); return nullptr; } else if (!bGotSourceFilename) @@ -1335,32 +1275,29 @@ GDALGridOptionsNew(char **papszArgv, { CPLError(CE_Failure, CPLE_NotSupported, "Too many command options '%s'", papszArgv[i]); - GDALGridOptionsFree(psOptions); return nullptr; } } - if (psOptions->bClipSrc && psOptions->pszClipSrcDS != nullptr) + if (psOptions->bClipSrc && !psOptions->osClipSrcDS.empty()) { - psOptions->poClipSrc = LoadGeometry( - psOptions->pszClipSrcDS, psOptions->pszClipSrcSQL, - psOptions->pszClipSrcLayer, psOptions->pszClipSrcWhere); - if (psOptions->poClipSrc == nullptr) + psOptions->poClipSrc = + LoadGeometry(psOptions->osClipSrcDS, psOptions->osClipSrcSQL, + psOptions->osClipSrcLayer, psOptions->osClipSrcWhere); + if (!psOptions->poClipSrc) { CPLError(CE_Failure, CPLE_AppDefined, "Cannot load source clip geometry."); - GDALGridOptionsFree(psOptions); return nullptr; } } - else if (psOptions->bClipSrc && psOptions->poClipSrc == nullptr && + else if (psOptions->bClipSrc && !psOptions->poClipSrc && !psOptions->poSpatialFilter) { CPLError(CE_Failure, CPLE_AppDefined, "-clipsrc must be used with -spat option or \n" "a bounding box, WKT string or datasource must be " "specified."); - GDALGridOptionsFree(psOptions); return nullptr; } @@ -1368,28 +1305,26 @@ GDALGridOptionsNew(char **papszArgv, { if (psOptions->poClipSrc) { - OGRGeometry *poTemp = - psOptions->poSpatialFilter->Intersection(psOptions->poClipSrc); + auto poTemp = std::unique_ptr<OGRGeometry>( + psOptions->poSpatialFilter->Intersection( + psOptions->poClipSrc.get())); if (poTemp) { - delete psOptions->poSpatialFilter; - psOptions->poSpatialFilter = poTemp; + psOptions->poSpatialFilter = std::move(poTemp); } - delete psOptions->poClipSrc; - psOptions->poClipSrc = nullptr; + psOptions->poClipSrc.reset(); } } else { if (psOptions->poClipSrc) { - psOptions->poSpatialFilter = psOptions->poClipSrc; - psOptions->poClipSrc = nullptr; + psOptions->poSpatialFilter = std::move(psOptions->poClipSrc); } } - return psOptions; + return psOptions.release(); } /************************************************************************/ @@ -1406,24 +1341,7 @@ GDALGridOptionsNew(char **papszArgv, void GDALGridOptionsFree(GDALGridOptions *psOptions) { - if (psOptions == nullptr) - return; - - CPLFree(psOptions->pszFormat); - CSLDestroy(psOptions->papszLayers); - CPLFree(psOptions->pszBurnAttribute); - CPLFree(psOptions->pszWHERE); - CPLFree(psOptions->pszSQL); - CSLDestroy(psOptions->papszCreateOptions); - CPLFree(psOptions->pOptions); - CPLFree(psOptions->pszOutputSRS); - delete psOptions->poSpatialFilter; - delete psOptions->poClipSrc; - CPLFree(psOptions->pszClipSrcDS); - CPLFree(psOptions->pszClipSrcSQL); - CPLFree(psOptions->pszClipSrcLayer); - CPLFree(psOptions->pszClipSrcWhere); - CPLFree(psOptions); + delete psOptions; } /************************************************************************/ From 4ca5011bbc14bc4954b5d72db22833c9d53e0346 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 15 Apr 2024 23:45:13 +0200 Subject: [PATCH 025/230] gdal_grid: use GDALArgumentParser --- apps/gdal_grid_bin.cpp | 85 +----- apps/gdal_grid_lib.cpp | 670 ++++++++++++++++++++++++----------------- apps/gdal_utils_priv.h | 3 +- 3 files changed, 395 insertions(+), 363 deletions(-) diff --git a/apps/gdal_grid_bin.cpp b/apps/gdal_grid_bin.cpp index cb6b72cc3839..d46a0c1756d4 100644 --- a/apps/gdal_grid_bin.cpp +++ b/apps/gdal_grid_bin.cpp @@ -35,65 +35,11 @@ /* Usage() */ /************************************************************************/ -static void Usage(bool bIsError, const char *pszErrorMsg = nullptr) +static void Usage() { - fprintf( - bIsError ? stderr : stdout, - "Usage: gdal_grid [--help] [--help-general]\n" - " [-oo <NAME>=<VALUE>]...\n" - " [-ot {Byte/Int16/UInt16/UInt32/Int32/Float32/Float64/\n" - " CInt16/CInt32/CFloat32/CFloat64}]\n" - " [-of <format>] [-co <NAME>=<VALUE>]...\n" - " [-zfield <field_name>] [-z_increase <increase_value>] " - "[-z_multiply " - "<multiply_value>]\n" - " [-a_srs <srs_def>] [-spat <xmin> <ymin> <xmax> <ymax>]\n" - " [-clipsrc <xmin> <ymin> <xmax> " - "<ymax>|<WKT>|<datasource>|spat_extent]\n" - " [-clipsrcsql <sql_statement>] [-clipsrclayer <layer>]\n" - " [-clipsrcwhere <expression>]\n" - " [-l <layername>]... [-where <expression>] [-sql " - "<select_statement>]\n" - " [-txe <xmin> <xmax>] [-tye <ymin> <ymax>] [-tr <xres> <yres>] " - "[-outsize <xsize> " - "<ysize>]\n" - " [-a <algorithm>[:<parameter1>=<value1>]...]" - " [-q]\n" - " <src_datasource> <dst_filename>\n" - "\n" - "Available algorithms and parameters with their defaults:\n" - " Inverse distance to a power (default)\n" - " " - "invdist:power=2.0:smoothing=0.0:radius1=0.0:radius2=0.0:angle=0.0:max_" - "points=0:min_points=0:nodata=0.0\n" - " Inverse distance to a power with nearest neighbor search\n" - " " - "invdistnn:power=2.0:radius=1.0:max_points=12:min_points=0:nodata=0\n" - " Moving average\n" - " " - "average:radius1=0.0:radius2=0.0:angle=0.0:min_points=0:nodata=0.0\n" - " Nearest neighbor\n" - " nearest:radius1=0.0:radius2=0.0:angle=0.0:nodata=0.0\n" - " Various data metrics\n" - " <metric " - "name>:radius1=0.0:radius2=0.0:angle=0.0:min_points=0:nodata=0.0\n" - " possible metrics are:\n" - " minimum\n" - " maximum\n" - " range\n" - " count\n" - " average_distance\n" - " average_distance_pts\n" - " Linear\n" - " linear:radius=-1.0:nodata=0.0\n" - "\n"); - - if (pszErrorMsg != nullptr) - fprintf(stderr, "\nFAILURE: %s\n", pszErrorMsg); - - GDALDestroyDriverManager(); - exit(bIsError ? 1 : 0); + fprintf(stderr, "%s\n", GDALGridGetParserUsage().c_str()); + exit(1); } /************************************************************************/ @@ -116,22 +62,6 @@ MAIN_START(argc, argv) if (argc < 1) exit(-argc); - for (int i = 0; i < argc; i++) - { - if (EQUAL(argv[i], "--utility_version")) - { - printf("%s was compiled against GDAL %s and is running against " - "GDAL %s\n", - argv[0], GDAL_RELEASE_NAME, GDALVersionInfo("RELEASE_NAME")); - CSLDestroy(argv); - return 0; - } - else if (EQUAL(argv[i], "--help")) - { - Usage(false); - } - } - GDALGridOptionsForBinary sOptionsForBinary; /* coverity[tainted_data] */ GDALGridOptions *psOptions = @@ -140,7 +70,7 @@ MAIN_START(argc, argv) if (psOptions == nullptr) { - Usage(true); + Usage(); } if (!(sOptionsForBinary.bQuiet)) @@ -148,11 +78,6 @@ MAIN_START(argc, argv) GDALGridOptionsSetProgress(psOptions, GDALTermProgress, nullptr); } - if (sOptionsForBinary.osSource.empty()) - Usage(true, "No input file specified."); - if (!sOptionsForBinary.bDestSpecified) - Usage(true, "No output file specified."); - /* -------------------------------------------------------------------- */ /* Open input file. */ /* -------------------------------------------------------------------- */ @@ -168,7 +93,7 @@ MAIN_START(argc, argv) GDALDatasetH hOutDS = GDALGrid(sOptionsForBinary.osDest.c_str(), hInDS, psOptions, &bUsageError); if (bUsageError == TRUE) - Usage(true); + Usage(); int nRetCode = hOutDS ? 0 : 1; GDALClose(hInDS); diff --git a/apps/gdal_grid_lib.cpp b/apps/gdal_grid_lib.cpp index 1cb8c676db7f..1531f6d1ebca 100644 --- a/apps/gdal_grid_lib.cpp +++ b/apps/gdal_grid_lib.cpp @@ -31,6 +31,7 @@ #include "gdal_utils.h" #include "gdal_utils_priv.h" #include "commonutils.h" +#include "gdalargumentparser.h" #include <cmath> #include <cstdint> @@ -925,18 +926,274 @@ GDALDatasetH GDALGrid(const char *pszDest, GDALDatasetH hSrcDataset, } /************************************************************************/ -/* IsNumber() */ +/* GDALGridOptionsGetParser() */ /************************************************************************/ -static bool IsNumber(const char *pszStr) +/*! @cond Doxygen_Suppress */ + +static std::unique_ptr<GDALArgumentParser> +GDALGridOptionsGetParser(GDALGridOptions *psOptions, + GDALGridOptionsForBinary *psOptionsForBinary, + int nCountClipSrc) +{ + auto argParser = std::make_unique<GDALArgumentParser>( + "gdal_grid", /* bForBinary=*/psOptionsForBinary != nullptr); + + argParser->add_description( + _("Creates a regular grid (raster) from the scattered data read from a " + "vector datasource.")); + + argParser->add_epilog(_( + "Available algorithms and parameters with their defaults:\n" + " Inverse distance to a power (default)\n" + " " + "invdist:power=2.0:smoothing=0.0:radius1=0.0:radius2=0.0:angle=0.0:max_" + "points=0:min_points=0:nodata=0.0\n" + " Inverse distance to a power with nearest neighbor search\n" + " " + "invdistnn:power=2.0:radius=1.0:max_points=12:min_points=0:nodata=0\n" + " Moving average\n" + " " + "average:radius1=0.0:radius2=0.0:angle=0.0:min_points=0:nodata=0.0\n" + " Nearest neighbor\n" + " nearest:radius1=0.0:radius2=0.0:angle=0.0:nodata=0.0\n" + " Various data metrics\n" + " <metric " + "name>:radius1=0.0:radius2=0.0:angle=0.0:min_points=0:nodata=0.0\n" + " possible metrics are:\n" + " minimum\n" + " maximum\n" + " range\n" + " count\n" + " average_distance\n" + " average_distance_pts\n" + " Linear\n" + " linear:radius=-1.0:nodata=0.0\n" + "\n" + "For more details, consult https://gdal.org/programs/gdal_grid.html")); + + argParser->add_quiet_argument( + psOptionsForBinary ? &psOptionsForBinary->bQuiet : nullptr); + + argParser->add_output_format_argument(psOptions->osFormat); + + argParser->add_output_type_argument(psOptions->eOutputType); + + argParser->add_argument("-txe") + .metavar("<xmin> <xmax>") + .nargs(2) + .scan<'g', double>() + .help(_("Set georeferenced X extents of output file to be created.")); + + argParser->add_argument("-tye") + .metavar("<ymin> <ymax>") + .nargs(2) + .scan<'g', double>() + .help(_("Set georeferenced Y extents of output file to be created.")); + + argParser->add_argument("-outsize") + .metavar("<xsize> <ysize>") + .nargs(2) + .scan<'i', int>() + .help(_("Set the size of the output file.")); + + argParser->add_argument("-tr") + .metavar("<xres> <yes>") + .nargs(2) + .scan<'g', double>() + .help(_("Set target resolution.")); + + argParser->add_creation_options_argument(psOptions->aosCreateOptions); + + argParser->add_argument("-zfield") + .metavar("<field_name>") + .store_into(psOptions->osBurnAttribute) + .help(_("Field name from which to get Z values.")); + + argParser->add_argument("-z_increase") + .metavar("<increase_value>") + .store_into(psOptions->dfIncreaseBurnValue) + .help(_("Addition to the attribute field on the features to be used to " + "get a Z value from.")); + + argParser->add_argument("-z_multiply") + .metavar("<multiply_value>") + .store_into(psOptions->dfMultiplyBurnValue) + .help(_("Multiplication ratio for the Z field..")); + + argParser->add_argument("-where") + .metavar("<expression>") + .store_into(psOptions->osWHERE) + .help(_("Query expression to be applied to select features to process " + "from the input layer(s).")); + + argParser->add_argument("-l") + .metavar("<layer_name>") + .append() + .action([psOptions](const std::string &s) + { psOptions->aosLayers.AddString(s.c_str()); }) + .help(_("Layer(s) from the datasource that will be used for input " + "features.")); + + argParser->add_argument("-sql") + .metavar("<select_statement>") + .store_into(psOptions->osSQL) + .help(_("SQL statement to be evaluated to produce a layer of features " + "to be processed.")); + + argParser->add_argument("-spat") + .metavar("<xmin> <ymin> <xmax> <ymax>") + .nargs(4) + .scan<'g', double>() + .help(_("The area of interest. Only features within the rectangle will " + "be reported.")); + + argParser->add_argument("-clipsrc") + .nargs(nCountClipSrc) + .metavar("[<xmin> <ymin> <xmax> <ymax>]|<WKT>|<datasource>|spat_extent") + .help(_("Clip geometries (in source SRS).")); + + argParser->add_argument("-clipsrcsql") + .metavar("<sql_statement>") + .store_into(psOptions->osClipSrcSQL) + .help(_("Select desired geometries from the source clip datasource " + "using an SQL query.")); + + argParser->add_argument("-clipsrclayer") + .metavar("<layername>") + .store_into(psOptions->osClipSrcLayer) + .help(_("Select the named layer from the source clip datasource.")); + + argParser->add_argument("-clipsrcwhere") + .metavar("<expression>") + .store_into(psOptions->osClipSrcWhere) + .help(_("Restrict desired geometries from the source clip layer based " + "on an attribute query.")); + + argParser->add_argument("-a_srs") + .metavar("<srs_def>") + .action( + [psOptions](const std::string &osOutputSRSDef) + { + OGRSpatialReference oOutputSRS; + + if (oOutputSRS.SetFromUserInput(osOutputSRSDef.c_str()) != + OGRERR_NONE) + { + throw std::invalid_argument( + std::string("Failed to process SRS definition: ") + .append(osOutputSRSDef)); + } + + char *pszWKT = nullptr; + oOutputSRS.exportToWkt(&pszWKT); + if (pszWKT) + psOptions->osOutputSRS = pszWKT; + CPLFree(pszWKT); + }) + .help(_("Assign an output SRS, but without reprojecting.")); + + argParser->add_argument("-a") + .metavar("<algorithm>[[:<parameter1>=<value1>]...]") + .action( + [psOptions](const std::string &s) + { + const char *pszAlgorithm = s.c_str(); + void *pOptions = nullptr; + if (GDALGridParseAlgorithmAndOptions(pszAlgorithm, + &psOptions->eAlgorithm, + &pOptions) != CE_None) + { + throw std::invalid_argument( + "Failed to process algorithm name and parameters"); + } + psOptions->pOptions.reset(pOptions); + + const CPLStringList aosParams( + CSLTokenizeString2(pszAlgorithm, ":", FALSE)); + const char *pszNoDataValue = aosParams.FetchNameValue("nodata"); + if (pszNoDataValue != nullptr) + { + psOptions->bNoDataSet = true; + psOptions->dfNoDataValue = CPLAtofM(pszNoDataValue); + } + }) + .help(_("Set the interpolation algorithm or data metric name and " + "(optionally) its parameters.")); + + if (psOptionsForBinary) + { + argParser->add_open_options_argument( + &(psOptionsForBinary->aosOpenOptions)); + } + + if (psOptionsForBinary) + { + argParser->add_argument("src_dataset_name") + .metavar("<src_dataset_name>") + .store_into(psOptionsForBinary->osSource) + .help(_("Input dataset.")); + + argParser->add_argument("dst_dataset_name") + .metavar("<dst_dataset_name>") + .store_into(psOptionsForBinary->osDest) + .help(_("Output dataset.")); + } + + return argParser; +} + +/*! @endcond */ + +/************************************************************************/ +/* GDALGridGetParserUsage() */ +/************************************************************************/ + +std::string GDALGridGetParserUsage() { - if (*pszStr == '-' || *pszStr == '+') - pszStr++; - if (*pszStr == '.') - pszStr++; - return *pszStr >= '0' && *pszStr <= '9'; + try + { + GDALGridOptions sOptions; + GDALGridOptionsForBinary sOptionsForBinary; + auto argParser = + GDALGridOptionsGetParser(&sOptions, &sOptionsForBinary, 1); + return argParser->usage(); + } + catch (const std::exception &err) + { + CPLError(CE_Failure, CPLE_AppDefined, "Unexpected exception: %s", + err.what()); + return std::string(); + } } +/************************************************************************/ +/* CHECK_HAS_ENOUGH_ADDITIONAL_ARGS() */ +/************************************************************************/ + +#ifndef CheckHasEnoughAdditionalArgs_defined +#define CheckHasEnoughAdditionalArgs_defined + +static bool CheckHasEnoughAdditionalArgs(CSLConstList papszArgv, int i, + int nExtraArg, int nArgc) +{ + if (i + nExtraArg >= nArgc) + { + CPLError(CE_Failure, CPLE_IllegalArg, + "%s option requires %d argument%s", papszArgv[i], nExtraArg, + nExtraArg == 1 ? "" : "s"); + return false; + } + return true; +} +#endif + +#define CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(nExtraArg) \ + if (!CheckHasEnoughAdditionalArgs(papszArgv, i, nExtraArg, nArgc)) \ + { \ + return nullptr; \ + } + /************************************************************************/ /* GDALGridOptionsNew() */ /************************************************************************/ @@ -963,85 +1220,81 @@ GDALGridOptionsNew(char **papszArgv, { auto psOptions = std::make_unique<GDALGridOptions>(); - bool bGotSourceFilename = false; - bool bGotDestFilename = false; /* -------------------------------------------------------------------- */ - /* Handle command line arguments. */ + /* Pre-processing for custom syntax that ArgumentParser does not */ + /* support. */ /* -------------------------------------------------------------------- */ - const int argc = CSLCount(papszArgv); - for (int i = 0; i < argc && papszArgv != nullptr && papszArgv[i] != nullptr; - i++) - { - if (i < argc - 1 && - (EQUAL(papszArgv[i], "-of") || EQUAL(papszArgv[i], "-f"))) - { - ++i; - psOptions->osFormat = papszArgv[i]; - } - else if (EQUAL(papszArgv[i], "-q") || EQUAL(papszArgv[i], "-quiet")) + CPLStringList aosArgv; + const int nArgc = CSLCount(papszArgv); + int nCountClipSrc = 0; + for (int i = 0; + i < nArgc && papszArgv != nullptr && papszArgv[i] != nullptr; i++) + { + if (EQUAL(papszArgv[i], "-clipsrc")) { - if (psOptionsForBinary) + if (nCountClipSrc) { - psOptionsForBinary->bQuiet = true; - } - else - { - CPLError(CE_Failure, CPLE_NotSupported, - "%s switch only supported from gdal_grid binary.", + CPLError(CE_Failure, CPLE_AppDefined, "Duplicate argument %s", papszArgv[i]); + return nullptr; } - } - - else if (EQUAL(papszArgv[i], "-ot") && papszArgv[i + 1]) - { - for (int iType = 1; iType < GDT_TypeCount; iType++) + // argparse doesn't handle well variable number of values + // just before the positional arguments, so we have to detect + // it manually and set the correct number. + nCountClipSrc = 1; + CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); + if (CPLGetValueType(papszArgv[i + 1]) != CPL_VALUE_STRING && + i + 4 < nArgc) { - if (GDALGetDataTypeName(static_cast<GDALDataType>(iType)) != - nullptr && - EQUAL(GDALGetDataTypeName(static_cast<GDALDataType>(iType)), - papszArgv[i + 1])) - { - psOptions->eOutputType = static_cast<GDALDataType>(iType); - } + nCountClipSrc = 4; } - if (psOptions->eOutputType == GDT_Unknown) + for (int j = 0; j < 1 + nCountClipSrc; ++j) { - CPLError(CE_Failure, CPLE_NotSupported, - "Unknown output pixel type: %s.", papszArgv[i + 1]); - return nullptr; + aosArgv.AddString(papszArgv[i]); + ++i; } - i++; + --i; } - else if (i + 2 < argc && EQUAL(papszArgv[i], "-txe")) + else { - psOptions->dfXMin = CPLAtof(papszArgv[++i]); - psOptions->dfXMax = CPLAtof(papszArgv[++i]); + aosArgv.AddString(papszArgv[i]); + } + } + + try + { + auto argParser = GDALGridOptionsGetParser( + psOptions.get(), psOptionsForBinary, nCountClipSrc); + + argParser->parse_args_without_binary_name(aosArgv.List()); + + if (auto oTXE = argParser->present<std::vector<double>>("-txe")) + { + psOptions->dfXMin = (*oTXE)[0]; + psOptions->dfXMax = (*oTXE)[1]; psOptions->bIsXExtentSet = true; } - else if (i + 2 < argc && EQUAL(papszArgv[i], "-tye")) + if (auto oTYE = argParser->present<std::vector<double>>("-tye")) { - psOptions->dfYMin = CPLAtof(papszArgv[++i]); - psOptions->dfYMax = CPLAtof(papszArgv[++i]); + psOptions->dfYMin = (*oTYE)[0]; + psOptions->dfYMax = (*oTYE)[1]; psOptions->bIsYExtentSet = true; } - else if (i + 2 < argc && EQUAL(papszArgv[i], "-outsize")) + if (auto oOutsize = argParser->present<std::vector<int>>("-outsize")) { - CPLAssert(papszArgv[i + 1]); - CPLAssert(papszArgv[i + 2]); - psOptions->nXSize = atoi(papszArgv[i + 1]); - psOptions->nYSize = atoi(papszArgv[i + 2]); - i += 2; + psOptions->nXSize = (*oOutsize)[0]; + psOptions->nYSize = (*oOutsize)[1]; } - else if (i + 2 < argc && EQUAL(papszArgv[i], "-tr")) + if (auto adfTargetRes = argParser->present<std::vector<double>>("-tr")) { - psOptions->dfXRes = CPLAtofM(papszArgv[++i]); - psOptions->dfYRes = CPLAtofM(papszArgv[++i]); + psOptions->dfXRes = (*adfTargetRes)[0]; + psOptions->dfYRes = (*adfTargetRes)[1]; if (psOptions->dfXRes <= 0 || psOptions->dfYRes <= 0) { CPLError(CE_Failure, CPLE_IllegalArg, @@ -1050,281 +1303,132 @@ GDALGridOptionsNew(char **papszArgv, } } - else if (i + 1 < argc && EQUAL(papszArgv[i], "-co")) - { - psOptions->aosCreateOptions.AddString(papszArgv[++i]); - } - - else if (i + 1 < argc && EQUAL(papszArgv[i], "-zfield")) - { - psOptions->osBurnAttribute = papszArgv[++i]; - } - - else if (i + 1 < argc && EQUAL(papszArgv[i], "-z_increase")) - { - psOptions->dfIncreaseBurnValue = CPLAtof(papszArgv[++i]); - } - - else if (i + 1 < argc && EQUAL(papszArgv[i], "-z_multiply")) - { - psOptions->dfMultiplyBurnValue = CPLAtof(papszArgv[++i]); - } - - else if (i + 1 < argc && EQUAL(papszArgv[i], "-where")) - { - psOptions->osWHERE = papszArgv[++i]; - } - - else if (i + 1 < argc && EQUAL(papszArgv[i], "-l")) - { - psOptions->aosLayers.AddString(papszArgv[++i]); - } - - else if (i + 1 < argc && EQUAL(papszArgv[i], "-sql")) - { - psOptions->osSQL = papszArgv[++i]; - } - - else if (i + 4 < argc && EQUAL(papszArgv[i], "-spat")) + if (auto oSpat = argParser->present<std::vector<double>>("-spat")) { OGRLinearRing oRing; - - oRing.addPoint(CPLAtof(papszArgv[i + 1]), - CPLAtof(papszArgv[i + 2])); - oRing.addPoint(CPLAtof(papszArgv[i + 1]), - CPLAtof(papszArgv[i + 4])); - oRing.addPoint(CPLAtof(papszArgv[i + 3]), - CPLAtof(papszArgv[i + 4])); - oRing.addPoint(CPLAtof(papszArgv[i + 3]), - CPLAtof(papszArgv[i + 2])); - oRing.addPoint(CPLAtof(papszArgv[i + 1]), - CPLAtof(papszArgv[i + 2])); - - auto poPoly = std::make_unique<OGRPolygon>(); - poPoly->addRing(&oRing); - psOptions->poSpatialFilter = std::move(poPoly); - i += 4; + const double dfMinX = (*oSpat)[0]; + const double dfMinY = (*oSpat)[1]; + const double dfMaxX = (*oSpat)[2]; + const double dfMaxY = (*oSpat)[3]; + + oRing.addPoint(dfMinX, dfMinY); + oRing.addPoint(dfMinX, dfMaxY); + oRing.addPoint(dfMaxX, dfMaxY); + oRing.addPoint(dfMaxX, dfMinY); + oRing.addPoint(dfMinX, dfMinY); + + auto poPolygon = std::make_unique<OGRPolygon>(); + poPolygon->addRing(&oRing); + psOptions->poSpatialFilter = std::move(poPolygon); } - else if (EQUAL(papszArgv[i], "-clipsrc")) + if (auto oClipSrc = + argParser->present<std::vector<std::string>>("-clipsrc")) { - if (i + 1 >= argc || papszArgv[i + 1] == nullptr) - { - CPLError(CE_Failure, CPLE_IllegalArg, - "%s option requires 1 or 4 arguments", papszArgv[i]); - return nullptr; - } + const std::string &osVal = (*oClipSrc)[0]; + + psOptions->poClipSrc.reset(); + psOptions->osClipSrcDS.clear(); VSIStatBufL sStat; psOptions->bClipSrc = true; - if (IsNumber(papszArgv[i + 1]) && papszArgv[i + 2] != nullptr && - papszArgv[i + 3] != nullptr && papszArgv[i + 4] != nullptr) + if (oClipSrc->size() == 4) { + const double dfMinX = CPLAtofM((*oClipSrc)[0].c_str()); + const double dfMinY = CPLAtofM((*oClipSrc)[1].c_str()); + const double dfMaxX = CPLAtofM((*oClipSrc)[2].c_str()); + const double dfMaxY = CPLAtofM((*oClipSrc)[3].c_str()); + OGRLinearRing oRing; - oRing.addPoint(CPLAtof(papszArgv[i + 1]), - CPLAtof(papszArgv[i + 2])); - oRing.addPoint(CPLAtof(papszArgv[i + 1]), - CPLAtof(papszArgv[i + 4])); - oRing.addPoint(CPLAtof(papszArgv[i + 3]), - CPLAtof(papszArgv[i + 4])); - oRing.addPoint(CPLAtof(papszArgv[i + 3]), - CPLAtof(papszArgv[i + 2])); - oRing.addPoint(CPLAtof(papszArgv[i + 1]), - CPLAtof(papszArgv[i + 2])); + oRing.addPoint(dfMinX, dfMinY); + oRing.addPoint(dfMinX, dfMaxY); + oRing.addPoint(dfMaxX, dfMaxY); + oRing.addPoint(dfMaxX, dfMinY); + oRing.addPoint(dfMinX, dfMinY); auto poPoly = std::make_unique<OGRPolygon>(); poPoly->addRing(&oRing); psOptions->poClipSrc = std::move(poPoly); - i += 4; } - else if ((STARTS_WITH_CI(papszArgv[i + 1], "POLYGON") || - STARTS_WITH_CI(papszArgv[i + 1], "MULTIPOLYGON")) && - VSIStatL(papszArgv[i + 1], &sStat) != 0) + else if ((STARTS_WITH_CI(osVal.c_str(), "POLYGON") || + STARTS_WITH_CI(osVal.c_str(), "MULTIPOLYGON")) && + VSIStatL(osVal.c_str(), &sStat) != 0) { - OGRGeometry *poClipSrc = nullptr; - OGRGeometryFactory::createFromWkt(papszArgv[i + 1], nullptr, - &poClipSrc); - if (!poClipSrc) + OGRGeometry *poGeom = nullptr; + OGRGeometryFactory::createFromWkt(osVal.c_str(), nullptr, + &poGeom); + psOptions->poClipSrc.reset(poGeom); + if (psOptions->poClipSrc == nullptr) { CPLError(CE_Failure, CPLE_IllegalArg, "Invalid geometry. Must be a valid POLYGON or " "MULTIPOLYGON WKT"); return nullptr; } - psOptions->poClipSrc.reset(poClipSrc); - i++; } - else if (EQUAL(papszArgv[i + 1], "spat_extent")) + else if (EQUAL(osVal.c_str(), "spat_extent")) { - i++; + // Nothing to do } else { - psOptions->osClipSrcDS = papszArgv[i + 1]; - i++; - } - } - else if (i + 1 < argc && EQUAL(papszArgv[i], "-clipsrcsql")) - { - psOptions->osClipSrcSQL = papszArgv[i + 1]; - i++; - } - else if (i + 1 < argc && EQUAL(papszArgv[i], "-clipsrclayer")) - { - psOptions->osClipSrcLayer = papszArgv[i + 1]; - i++; - } - else if (i + 1 < argc && EQUAL(papszArgv[i], "-clipsrcwhere")) - { - psOptions->osClipSrcWhere = papszArgv[i + 1]; - i++; - } - - else if (i + 1 < argc && EQUAL(papszArgv[i], "-a_srs")) - { - OGRSpatialReference oOutputSRS; - - if (oOutputSRS.SetFromUserInput(papszArgv[i + 1]) != OGRERR_NONE) - { - CPLError(CE_Failure, CPLE_AppDefined, - "Failed to process SRS definition: %s", - papszArgv[i + 1]); - return nullptr; + psOptions->osClipSrcDS = osVal; } - - char *pszWKT = nullptr; - oOutputSRS.exportToWkt(&pszWKT); - if (pszWKT) - psOptions->osOutputSRS = pszWKT; - CPLFree(pszWKT); - i++; } - else if (i + 1 < argc && EQUAL(papszArgv[i], "-a")) + if (psOptions->bClipSrc && !psOptions->osClipSrcDS.empty()) { - const char *pszAlgorithm = papszArgv[++i]; - void *pOptions = nullptr; - if (GDALGridParseAlgorithmAndOptions( - pszAlgorithm, &psOptions->eAlgorithm, &pOptions) != CE_None) + psOptions->poClipSrc = LoadGeometry( + psOptions->osClipSrcDS, psOptions->osClipSrcSQL, + psOptions->osClipSrcLayer, psOptions->osClipSrcWhere); + if (!psOptions->poClipSrc) { CPLError(CE_Failure, CPLE_AppDefined, - "Failed to process algorithm name and parameters"); + "Cannot load source clip geometry."); return nullptr; } - psOptions->pOptions.reset(pOptions); - - const CPLStringList aosParams( - CSLTokenizeString2(pszAlgorithm, ":", FALSE)); - const char *pszNoDataValue = aosParams.FetchNameValue("nodata"); - if (pszNoDataValue != nullptr) - { - psOptions->bNoDataSet = true; - psOptions->dfNoDataValue = CPLAtofM(pszNoDataValue); - } - } - else if (i + 1 < argc && EQUAL(papszArgv[i], "-oo")) - { - i++; - if (psOptionsForBinary) - { - psOptionsForBinary->aosOpenOptions.AddString(papszArgv[i]); - } - else - { - CPLError(CE_Failure, CPLE_NotSupported, - "-oo switch only supported from gdal_grid binary."); - } } - else if (papszArgv[i][0] == '-') + else if (psOptions->bClipSrc && !psOptions->poClipSrc && + !psOptions->poSpatialFilter) { - CPLError(CE_Failure, CPLE_NotSupported, "Unknown option name '%s'", - papszArgv[i]); + CPLError(CE_Failure, CPLE_AppDefined, + "-clipsrc must be used with -spat option or \n" + "a bounding box, WKT string or datasource must be " + "specified."); return nullptr; } - else if (!bGotSourceFilename) + + if (psOptions->poSpatialFilter) { - bGotSourceFilename = true; - if (psOptionsForBinary) - { - psOptionsForBinary->osSource = papszArgv[i]; - } - else + if (psOptions->poClipSrc) { - CPLError( - CE_Failure, CPLE_NotSupported, - "{source_filename} only supported from gdal_grid binary."); + auto poTemp = std::unique_ptr<OGRGeometry>( + psOptions->poSpatialFilter->Intersection( + psOptions->poClipSrc.get())); + if (poTemp) + { + psOptions->poSpatialFilter = std::move(poTemp); + } + + psOptions->poClipSrc.reset(); } } - else if (!bGotDestFilename) + else { - bGotDestFilename = true; - if (psOptionsForBinary) - { - psOptionsForBinary->bDestSpecified = true; - psOptionsForBinary->osDest = papszArgv[i]; - } - else + if (psOptions->poClipSrc) { - CPLError( - CE_Failure, CPLE_NotSupported, - "{dest_filename} only supported from gdal_grid binary."); + psOptions->poSpatialFilter = std::move(psOptions->poClipSrc); } } - else - { - CPLError(CE_Failure, CPLE_NotSupported, - "Too many command options '%s'", papszArgv[i]); - return nullptr; - } - } - if (psOptions->bClipSrc && !psOptions->osClipSrcDS.empty()) - { - psOptions->poClipSrc = - LoadGeometry(psOptions->osClipSrcDS, psOptions->osClipSrcSQL, - psOptions->osClipSrcLayer, psOptions->osClipSrcWhere); - if (!psOptions->poClipSrc) - { - CPLError(CE_Failure, CPLE_AppDefined, - "Cannot load source clip geometry."); - return nullptr; - } + return psOptions.release(); } - else if (psOptions->bClipSrc && !psOptions->poClipSrc && - !psOptions->poSpatialFilter) + catch (const std::exception &err) { - CPLError(CE_Failure, CPLE_AppDefined, - "-clipsrc must be used with -spat option or \n" - "a bounding box, WKT string or datasource must be " - "specified."); + CPLError(CE_Failure, CPLE_AppDefined, "%s", err.what()); return nullptr; } - - if (psOptions->poSpatialFilter) - { - if (psOptions->poClipSrc) - { - auto poTemp = std::unique_ptr<OGRGeometry>( - psOptions->poSpatialFilter->Intersection( - psOptions->poClipSrc.get())); - if (poTemp) - { - psOptions->poSpatialFilter = std::move(poTemp); - } - - psOptions->poClipSrc.reset(); - } - } - else - { - if (psOptions->poClipSrc) - { - psOptions->poSpatialFilter = std::move(psOptions->poClipSrc); - } - } - - return psOptions.release(); } /************************************************************************/ @@ -1367,3 +1471,5 @@ void GDALGridOptionsSetProgress(GDALGridOptions *psOptions, if (pfnProgress == GDALTermProgress) psOptions->bQuiet = false; } + +#undef CHECK_HAS_ENOUGH_ADDITIONAL_ARGS diff --git a/apps/gdal_utils_priv.h b/apps/gdal_utils_priv.h index 8127d5f0fac3..f7a310794533 100644 --- a/apps/gdal_utils_priv.h +++ b/apps/gdal_utils_priv.h @@ -149,7 +149,6 @@ struct GDALVectorInfoOptionsForBinary struct GDALGridOptionsForBinary { std::string osSource{}; - bool bDestSpecified = false; std::string osDest{}; bool bQuiet = false; CPLStringList aosOpenOptions{}; @@ -241,6 +240,8 @@ std::string CPL_DLL GDALWarpAppGetParserUsage(); std::string CPL_DLL GDALInfoAppGetParserUsage(); +std::string CPL_DLL GDALGridGetParserUsage(); + #endif /* #ifndef DOXYGEN_SKIP */ #endif /* GDAL_UTILS_PRIV_H_INCLUDED */ From 065c8264a61238b06f3e02a81d35923331f02080 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 17:40:34 +0200 Subject: [PATCH 026/230] GDALBuildVRT: C++ify GDALBuildVRTOptions and GDALBuildVRTOptionsForBinary --- apps/gdal_utils_priv.h | 17 ++- apps/gdalbuildvrt_bin.cpp | 58 +++----- apps/gdalbuildvrt_lib.cpp | 281 +++++++++++++------------------------- 3 files changed, 119 insertions(+), 237 deletions(-) diff --git a/apps/gdal_utils_priv.h b/apps/gdal_utils_priv.h index f7a310794533..6f27644231a0 100644 --- a/apps/gdal_utils_priv.h +++ b/apps/gdal_utils_priv.h @@ -65,15 +65,6 @@ struct GDALDEMProcessingOptionsForBinary int bQuiet; }; -struct GDALBuildVRTOptionsForBinary -{ - int nSrcFiles; - char **papszSrcFiles; - char *pszDstFilename; - int bQuiet; - int bOverwrite; -}; - CPL_C_END /* Access modes */ @@ -228,6 +219,14 @@ struct GDALWarpAppOptionsForBinary CPLStringList aosAllowedInputDrivers{}; }; +struct GDALBuildVRTOptionsForBinary +{ + CPLStringList aosSrcFiles{}; + std::string osDstFilename{}; + bool bQuiet = false; + bool bOverwrite = false; +}; + std::string CPL_DLL GDALNearblackGetParserUsage(); std::string CPL_DLL GDALVectorInfoGetParserUsage(); diff --git a/apps/gdalbuildvrt_bin.cpp b/apps/gdalbuildvrt_bin.cpp index e3ba4d1125ee..6f6b2622dd07 100644 --- a/apps/gdalbuildvrt_bin.cpp +++ b/apps/gdalbuildvrt_bin.cpp @@ -105,31 +105,6 @@ static void Usage(bool bIsError, const char *pszErrorMsg) exit(bIsError ? 1 : 0); } -/************************************************************************/ -/* GDALBuildVRTOptionsForBinaryNew() */ -/************************************************************************/ - -static GDALBuildVRTOptionsForBinary *GDALBuildVRTOptionsForBinaryNew(void) -{ - return static_cast<GDALBuildVRTOptionsForBinary *>( - CPLCalloc(1, sizeof(GDALBuildVRTOptionsForBinary))); -} - -/************************************************************************/ -/* GDALBuildVRTOptionsForBinaryFree() */ -/************************************************************************/ - -static void GDALBuildVRTOptionsForBinaryFree( - GDALBuildVRTOptionsForBinary *psOptionsForBinary) -{ - if (psOptionsForBinary) - { - CSLDestroy(psOptionsForBinary->papszSrcFiles); - CPLFree(psOptionsForBinary->pszDstFilename); - CPLFree(psOptionsForBinary); - } -} - /************************************************************************/ /* main() */ /************************************************************************/ @@ -164,11 +139,10 @@ MAIN_START(argc, argv) } } - GDALBuildVRTOptionsForBinary *psOptionsForBinary = - GDALBuildVRTOptionsForBinaryNew(); + GDALBuildVRTOptionsForBinary sOptionsForBinary; /* coverity[tainted_data] */ GDALBuildVRTOptions *psOptions = - GDALBuildVRTOptionsNew(argv + 1, psOptionsForBinary); + GDALBuildVRTOptionsNew(argv + 1, &sOptionsForBinary); CSLDestroy(argv); if (psOptions == nullptr) @@ -176,12 +150,12 @@ MAIN_START(argc, argv) Usage(true, nullptr); } - if (psOptionsForBinary->pszDstFilename == nullptr) + if (sOptionsForBinary.osDstFilename.c_str() == nullptr) { Usage(true, "No target filename specified."); } - if (!(psOptionsForBinary->bQuiet)) + if (!(sOptionsForBinary.bQuiet)) { GDALBuildVRTOptionsSetProgress(psOptions, GDALTermProgress, nullptr); } @@ -189,18 +163,20 @@ MAIN_START(argc, argv) /* Avoid overwriting a non VRT dataset if the user did not put the */ /* filenames in the right order */ VSIStatBuf sBuf; - if (!psOptionsForBinary->bOverwrite) + if (!sOptionsForBinary.bOverwrite) { - int bExists = (VSIStat(psOptionsForBinary->pszDstFilename, &sBuf) == 0); + int bExists = + (VSIStat(sOptionsForBinary.osDstFilename.c_str(), &sBuf) == 0); if (bExists) { - GDALDriverH hDriver = - GDALIdentifyDriver(psOptionsForBinary->pszDstFilename, nullptr); + GDALDriverH hDriver = GDALIdentifyDriver( + sOptionsForBinary.osDstFilename.c_str(), nullptr); if (hDriver && !(EQUAL(GDALGetDriverShortName(hDriver), "VRT") || (EQUAL(GDALGetDriverShortName(hDriver), "API_PROXY") && - EQUAL(CPLGetExtension(psOptionsForBinary->pszDstFilename), - "VRT")))) + EQUAL( + CPLGetExtension(sOptionsForBinary.osDstFilename.c_str()), + "VRT")))) { fprintf( stderr, @@ -209,9 +185,9 @@ MAIN_START(argc, argv) "right order.\n" "If you want to overwrite %s, add -overwrite option to the " "command line.\n\n", - psOptionsForBinary->pszDstFilename, + sOptionsForBinary.osDstFilename.c_str(), GDALGetDriverShortName(hDriver), - psOptionsForBinary->pszDstFilename); + sOptionsForBinary.osDstFilename.c_str()); Usage(true); } } @@ -219,14 +195,14 @@ MAIN_START(argc, argv) int bUsageError = FALSE; GDALDatasetH hOutDS = GDALBuildVRT( - psOptionsForBinary->pszDstFilename, psOptionsForBinary->nSrcFiles, - nullptr, psOptionsForBinary->papszSrcFiles, psOptions, &bUsageError); + sOptionsForBinary.osDstFilename.c_str(), + sOptionsForBinary.aosSrcFiles.size(), nullptr, + sOptionsForBinary.aosSrcFiles.List(), psOptions, &bUsageError); if (bUsageError) Usage(true); int nRetCode = (hOutDS) ? 0 : 1; GDALBuildVRTOptionsFree(psOptions); - GDALBuildVRTOptionsForBinaryFree(psOptionsForBinary); CPLErrorReset(); // The flush to disk is only done at that stage, so check if any error has diff --git a/apps/gdalbuildvrt_lib.cpp b/apps/gdalbuildvrt_lib.cpp index b07f416cf5a9..3eea1cf162bb 100644 --- a/apps/gdalbuildvrt_lib.cpp +++ b/apps/gdalbuildvrt_lib.cpp @@ -52,6 +52,7 @@ #include "cpl_progress.h" #include "cpl_string.h" #include "cpl_vsi.h" +#include "cpl_vsi_virtual.h" #include "gdal.h" #include "gdal_vrt.h" #include "gdal_priv.h" @@ -1662,12 +1663,9 @@ GDALDataset *VRTBuilder::Build(GDALProgressFunc pfnProgress, /************************************************************************/ static bool add_file_to_list(const char *filename, const char *tile_index, - int *pnInputFiles, char ***pppszInputFilenames) + CPLStringList &aosList) { - int nInputFiles = *pnInputFiles; - char **ppszInputFilenames = *pppszInputFilenames; - if (EQUAL(CPLGetExtension(filename), "SHP")) { /* Handle gdaltindex Shapefile as a special case */ @@ -1713,27 +1711,16 @@ static bool add_file_to_list(const char *filename, const char *tile_index, return false; } - ppszInputFilenames = static_cast<char **>(CPLRealloc( - ppszInputFilenames, - sizeof(char *) * - (nInputFiles + static_cast<int>(nTileIndexFiles) + 1))); for (auto &&poFeature : poLayer) { - ppszInputFilenames[nInputFiles++] = - CPLStrdup(poFeature->GetFieldAsString(ti_field)); + aosList.AddString(poFeature->GetFieldAsString(ti_field)); } - ppszInputFilenames[nInputFiles] = nullptr; } else { - ppszInputFilenames = static_cast<char **>(CPLRealloc( - ppszInputFilenames, sizeof(char *) * (nInputFiles + 1 + 1))); - ppszInputFilenames[nInputFiles++] = CPLStrdup(filename); - ppszInputFilenames[nInputFiles] = nullptr; + aosList.AddString(filename); } - *pnInputFiles = nInputFiles; - *pppszInputFilenames = ppszInputFilenames; return true; } @@ -1747,74 +1734,40 @@ static bool add_file_to_list(const char *filename, const char *tile_index, */ struct GDALBuildVRTOptions { - bool bStrict; - char *pszResolution; - int bSeparate; - int bAllowProjectionDifference; - double we_res; - double ns_res; - int bTargetAlignedPixels; - double xmin; - double ymin; - double xmax; - double ymax; - int bAddAlpha; - int bHideNoData; - int nSubdataset; - char *pszSrcNoData; - char *pszVRTNoData; - char *pszOutputSRS; - int *panSelectedBandList; - int nBandCount; - char *pszResampling; - char **papszOpenOptions; - bool bUseSrcMaskBand; - bool bNoDataFromMask; - double dfMaskValueThreshold; + bool bStrict = false; + std::string osResolution{}; + bool bSeparate = false; + bool bAllowProjectionDifference = false; + double we_res = 0; + double ns_res = 0; + bool bTargetAlignedPixels = false; + double xmin = 0; + double ymin = 0; + double xmax = 0; + double ymax = 0; + bool bAddAlpha = false; + bool bHideNoData = false; + int nSubdataset = -1; + std::string osSrcNoData{}; + std::string osVRTNoData{}; + std::string osOutputSRS{}; + std::vector<int> anSelectedBandList{}; + std::string osResampling{}; + CPLStringList aosOpenOptions{}; + bool bUseSrcMaskBand = true; + bool bNoDataFromMask = false; + double dfMaskValueThreshold = 0; /*! allow or suppress progress monitor and other non-error output */ - int bQuiet; + bool bQuiet = true; /*! the progress function to use */ - GDALProgressFunc pfnProgress; + GDALProgressFunc pfnProgress = GDALDummyProgress; /*! pointer to the progress data variable */ - void *pProgressData; + void *pProgressData = nullptr; }; -/************************************************************************/ -/* GDALBuildVRTOptionsClone() */ -/************************************************************************/ - -static GDALBuildVRTOptions * -GDALBuildVRTOptionsClone(const GDALBuildVRTOptions *psOptionsIn) -{ - GDALBuildVRTOptions *psOptions = static_cast<GDALBuildVRTOptions *>( - CPLMalloc(sizeof(GDALBuildVRTOptions))); - memcpy(psOptions, psOptionsIn, sizeof(GDALBuildVRTOptions)); - if (psOptionsIn->pszResolution) - psOptions->pszResolution = CPLStrdup(psOptionsIn->pszResolution); - if (psOptionsIn->pszSrcNoData) - psOptions->pszSrcNoData = CPLStrdup(psOptionsIn->pszSrcNoData); - if (psOptionsIn->pszVRTNoData) - psOptions->pszVRTNoData = CPLStrdup(psOptionsIn->pszVRTNoData); - if (psOptionsIn->pszOutputSRS) - psOptions->pszOutputSRS = CPLStrdup(psOptionsIn->pszOutputSRS); - if (psOptionsIn->pszResampling) - psOptions->pszResampling = CPLStrdup(psOptionsIn->pszResampling); - if (psOptionsIn->panSelectedBandList) - { - psOptions->panSelectedBandList = static_cast<int *>( - CPLMalloc(sizeof(int) * psOptionsIn->nBandCount)); - memcpy(psOptions->panSelectedBandList, psOptionsIn->panSelectedBandList, - sizeof(int) * psOptionsIn->nBandCount); - } - if (psOptionsIn->papszOpenOptions) - psOptions->papszOpenOptions = - CSLDuplicate(psOptionsIn->papszOpenOptions); - return psOptions; -} - /************************************************************************/ /* GDALBuildVRT() */ /************************************************************************/ @@ -1872,93 +1825,89 @@ GDALDatasetH GDALBuildVRT(const char *pszDest, int nSrcCount, return nullptr; } - GDALBuildVRTOptions *psOptions = - (psOptionsIn) ? GDALBuildVRTOptionsClone(psOptionsIn) - : GDALBuildVRTOptionsNew(nullptr, nullptr); + // cppcheck-suppress unreadVariable + GDALBuildVRTOptions sOptions(psOptionsIn ? *psOptionsIn + : GDALBuildVRTOptions()); - if (psOptions->we_res != 0 && psOptions->ns_res != 0 && - psOptions->pszResolution != nullptr && - !EQUAL(psOptions->pszResolution, "user")) + if (sOptions.we_res != 0 && sOptions.ns_res != 0 && + !sOptions.osResolution.empty() && + !EQUAL(sOptions.osResolution.c_str(), "user")) { CPLError(CE_Failure, CPLE_NotSupported, "-tr option is not compatible with -resolution %s", - psOptions->pszResolution); + sOptions.osResolution.c_str()); if (pbUsageError) *pbUsageError = TRUE; - GDALBuildVRTOptionsFree(psOptions); return nullptr; } - if (psOptions->bTargetAlignedPixels && psOptions->we_res == 0 && - psOptions->ns_res == 0) + if (sOptions.bTargetAlignedPixels && sOptions.we_res == 0 && + sOptions.ns_res == 0) { CPLError(CE_Failure, CPLE_NotSupported, "-tap option cannot be used without using -tr"); if (pbUsageError) *pbUsageError = TRUE; - GDALBuildVRTOptionsFree(psOptions); return nullptr; } - if (psOptions->bAddAlpha && psOptions->bSeparate) + if (sOptions.bAddAlpha && sOptions.bSeparate) { CPLError(CE_Failure, CPLE_NotSupported, "-addalpha option is not compatible with -separate."); if (pbUsageError) *pbUsageError = TRUE; - GDALBuildVRTOptionsFree(psOptions); return nullptr; } ResolutionStrategy eStrategy = AVERAGE_RESOLUTION; - if (psOptions->pszResolution == nullptr || - EQUAL(psOptions->pszResolution, "user")) + if (sOptions.osResolution.empty() || + EQUAL(sOptions.osResolution.c_str(), "user")) { - if (psOptions->we_res != 0 || psOptions->ns_res != 0) + if (sOptions.we_res != 0 || sOptions.ns_res != 0) eStrategy = USER_RESOLUTION; - else if (psOptions->pszResolution != nullptr && - EQUAL(psOptions->pszResolution, "user")) + else if (EQUAL(sOptions.osResolution.c_str(), "user")) { CPLError(CE_Failure, CPLE_NotSupported, "-tr option must be used with -resolution user."); if (pbUsageError) *pbUsageError = TRUE; - GDALBuildVRTOptionsFree(psOptions); return nullptr; } } - else if (EQUAL(psOptions->pszResolution, "average")) + else if (EQUAL(sOptions.osResolution.c_str(), "average")) eStrategy = AVERAGE_RESOLUTION; - else if (EQUAL(psOptions->pszResolution, "highest")) + else if (EQUAL(sOptions.osResolution.c_str(), "highest")) eStrategy = HIGHEST_RESOLUTION; - else if (EQUAL(psOptions->pszResolution, "lowest")) + else if (EQUAL(sOptions.osResolution.c_str(), "lowest")) eStrategy = LOWEST_RESOLUTION; /* If -srcnodata is specified, use it as the -vrtnodata if the latter is not */ /* specified */ - if (psOptions->pszSrcNoData != nullptr && - psOptions->pszVRTNoData == nullptr) - psOptions->pszVRTNoData = CPLStrdup(psOptions->pszSrcNoData); + if (!sOptions.osSrcNoData.empty() && sOptions.osVRTNoData.empty()) + sOptions.osVRTNoData = sOptions.osSrcNoData; VRTBuilder oBuilder( - psOptions->bStrict, pszDest, nSrcCount, papszSrcDSNames, pahSrcDS, - psOptions->panSelectedBandList, psOptions->nBandCount, eStrategy, - psOptions->we_res, psOptions->ns_res, psOptions->bTargetAlignedPixels, - psOptions->xmin, psOptions->ymin, psOptions->xmax, psOptions->ymax, - psOptions->bSeparate, psOptions->bAllowProjectionDifference, - psOptions->bAddAlpha, psOptions->bHideNoData, psOptions->nSubdataset, - psOptions->pszSrcNoData, psOptions->pszVRTNoData, - psOptions->bUseSrcMaskBand, psOptions->bNoDataFromMask, - psOptions->dfMaskValueThreshold, psOptions->pszOutputSRS, - psOptions->pszResampling, psOptions->papszOpenOptions); - - GDALDatasetH hDstDS = static_cast<GDALDatasetH>( - oBuilder.Build(psOptions->pfnProgress, psOptions->pProgressData)); - - GDALBuildVRTOptionsFree(psOptions); - - return hDstDS; + sOptions.bStrict, pszDest, nSrcCount, papszSrcDSNames, pahSrcDS, + sOptions.anSelectedBandList.empty() + ? nullptr + : sOptions.anSelectedBandList.data(), + static_cast<int>(sOptions.anSelectedBandList.size()), eStrategy, + sOptions.we_res, sOptions.ns_res, sOptions.bTargetAlignedPixels, + sOptions.xmin, sOptions.ymin, sOptions.xmax, sOptions.ymax, + sOptions.bSeparate, sOptions.bAllowProjectionDifference, + sOptions.bAddAlpha, sOptions.bHideNoData, sOptions.nSubdataset, + sOptions.osSrcNoData.empty() ? nullptr : sOptions.osSrcNoData.c_str(), + sOptions.osVRTNoData.empty() ? nullptr : sOptions.osVRTNoData.c_str(), + sOptions.bUseSrcMaskBand, sOptions.bNoDataFromMask, + sOptions.dfMaskValueThreshold, + sOptions.osOutputSRS.empty() ? nullptr : sOptions.osOutputSRS.c_str(), + sOptions.osResampling.empty() ? nullptr : sOptions.osResampling.c_str(), + sOptions.aosOpenOptions.List()); + + return GDALDataset::ToHandle( + oBuilder.Build(sOptions.pfnProgress, sOptions.pProgressData)); } /************************************************************************/ @@ -2011,20 +1960,10 @@ GDALBuildVRTOptions * GDALBuildVRTOptionsNew(char **papszArgv, GDALBuildVRTOptionsForBinary *psOptionsForBinary) { - GDALBuildVRTOptions *psOptions = static_cast<GDALBuildVRTOptions *>( - CPLCalloc(1, sizeof(GDALBuildVRTOptions))); + auto psOptions = std::make_unique<GDALBuildVRTOptions>(); const char *tile_index = "location"; - psOptions->nSubdataset = -1; - psOptions->bQuiet = TRUE; - psOptions->pfnProgress = GDALDummyProgress; - psOptions->pProgressData = nullptr; - psOptions->bUseSrcMaskBand = true; - psOptions->bNoDataFromMask = false; - psOptions->dfMaskValueThreshold = 0; - psOptions->bStrict = false; - /* -------------------------------------------------------------------- */ /* Parse arguments. */ /* -------------------------------------------------------------------- */ @@ -2045,17 +1984,15 @@ GDALBuildVRTOptionsNew(char **papszArgv, } else if (EQUAL(papszArgv[iArg], "-resolution") && iArg + 1 < argc) { - CPLFree(psOptions->pszResolution); - psOptions->pszResolution = CPLStrdup(papszArgv[++iArg]); - if (!EQUAL(psOptions->pszResolution, "user") && - !EQUAL(psOptions->pszResolution, "average") && - !EQUAL(psOptions->pszResolution, "highest") && - !EQUAL(psOptions->pszResolution, "lowest")) + psOptions->osResolution = papszArgv[++iArg]; + if (!EQUAL(psOptions->osResolution.c_str(), "user") && + !EQUAL(psOptions->osResolution.c_str(), "average") && + !EQUAL(psOptions->osResolution.c_str(), "highest") && + !EQUAL(psOptions->osResolution.c_str(), "lowest")) { CPLError(CE_Failure, CPLE_IllegalArg, "Illegal resolution value (%s).", - psOptions->pszResolution); - GDALBuildVRTOptionsFree(psOptions); + psOptions->osResolution.c_str()); return nullptr; } } @@ -2065,25 +2002,21 @@ GDALBuildVRTOptionsNew(char **papszArgv, if (psOptionsForBinary) { const char *input_file_list = papszArgv[iArg]; - VSILFILE *f = VSIFOpenL(input_file_list, "r"); + auto f = + VSIVirtualHandleUniquePtr(VSIFOpenL(input_file_list, "r")); if (f) { while (1) { - const char *filename = CPLReadLineL(f); + const char *filename = CPLReadLineL(f.get()); if (filename == nullptr) break; - if (!add_file_to_list( - filename, tile_index, - &psOptionsForBinary->nSrcFiles, - &psOptionsForBinary->papszSrcFiles)) + if (!add_file_to_list(filename, tile_index, + psOptionsForBinary->aosSrcFiles)) { - VSIFCloseL(f); - GDALBuildVRTOptionsFree(psOptions); return nullptr; } } - VSIFCloseL(f); } } else @@ -2110,8 +2043,7 @@ GDALBuildVRTOptionsNew(char **papszArgv, ++iArg; if (psOptionsForBinary) { - CPLFree(psOptionsForBinary->pszDstFilename); - psOptionsForBinary->pszDstFilename = CPLStrdup(papszArgv[iArg]); + psOptionsForBinary->osDstFilename = papszArgv[iArg]; } else { @@ -2155,15 +2087,10 @@ GDALBuildVRTOptionsNew(char **papszArgv, { CPLError(CE_Failure, CPLE_IllegalArg, "Illegal band number (%s).", papszArgv[iArg]); - GDALBuildVRTOptionsFree(psOptions); return nullptr; } - psOptions->nBandCount++; - psOptions->panSelectedBandList = static_cast<int *>( - CPLRealloc(psOptions->panSelectedBandList, - sizeof(int) * psOptions->nBandCount)); - psOptions->panSelectedBandList[psOptions->nBandCount - 1] = nBand; + psOptions->anSelectedBandList.push_back(nBand); } else if (EQUAL(papszArgv[iArg], "-hidenodata")) { @@ -2176,34 +2103,29 @@ GDALBuildVRTOptionsNew(char **papszArgv, } else if (EQUAL(papszArgv[iArg], "-srcnodata") && iArg + 1 < argc) { - CPLFree(psOptions->pszSrcNoData); - psOptions->pszSrcNoData = CPLStrdup(papszArgv[++iArg]); + psOptions->osSrcNoData = papszArgv[++iArg]; } else if (EQUAL(papszArgv[iArg], "-vrtnodata") && iArg + 1 < argc) { - CPLFree(psOptions->pszVRTNoData); - psOptions->pszVRTNoData = CPLStrdup(papszArgv[++iArg]); + psOptions->osVRTNoData = papszArgv[++iArg]; } else if (EQUAL(papszArgv[iArg], "-a_srs") && iArg + 1 < argc) { char *pszSRS = SanitizeSRS(papszArgv[++iArg]); if (pszSRS == nullptr) { - GDALBuildVRTOptionsFree(psOptions); return nullptr; } - CPLFree(psOptions->pszOutputSRS); - psOptions->pszOutputSRS = pszSRS; + psOptions->osOutputSRS = pszSRS; + CPLFree(pszSRS); } else if (EQUAL(papszArgv[iArg], "-r") && iArg + 1 < argc) { - CPLFree(psOptions->pszResampling); - psOptions->pszResampling = CPLStrdup(papszArgv[++iArg]); + psOptions->osResampling = papszArgv[++iArg]; } else if (EQUAL(papszArgv[iArg], "-oo") && iArg + 1 < argc) { - psOptions->papszOpenOptions = - CSLAddString(psOptions->papszOpenOptions, papszArgv[++iArg]); + psOptions->aosOpenOptions.AddString(papszArgv[++iArg]); } else if (EQUAL(papszArgv[iArg], "-ignore_srcmaskband")) { @@ -2219,23 +2141,19 @@ GDALBuildVRTOptionsNew(char **papszArgv, { CPLError(CE_Failure, CPLE_NotSupported, "Unknown option name '%s'", papszArgv[iArg]); - GDALBuildVRTOptionsFree(psOptions); return nullptr; } else { if (psOptionsForBinary) { - if (psOptionsForBinary->pszDstFilename == nullptr) - psOptionsForBinary->pszDstFilename = - CPLStrdup(papszArgv[iArg]); + if (psOptionsForBinary->osDstFilename.empty()) + psOptionsForBinary->osDstFilename = papszArgv[iArg]; else { if (!add_file_to_list(papszArgv[iArg], tile_index, - &psOptionsForBinary->nSrcFiles, - &psOptionsForBinary->papszSrcFiles)) + psOptionsForBinary->aosSrcFiles)) { - GDALBuildVRTOptionsFree(psOptions); return nullptr; } } @@ -2243,7 +2161,7 @@ GDALBuildVRTOptionsNew(char **papszArgv, } } - return psOptions; + return psOptions.release(); } /************************************************************************/ @@ -2260,18 +2178,7 @@ GDALBuildVRTOptionsNew(char **papszArgv, void GDALBuildVRTOptionsFree(GDALBuildVRTOptions *psOptions) { - if (psOptions) - { - CPLFree(psOptions->pszResolution); - CPLFree(psOptions->pszSrcNoData); - CPLFree(psOptions->pszVRTNoData); - CPLFree(psOptions->pszOutputSRS); - CPLFree(psOptions->panSelectedBandList); - CPLFree(psOptions->pszResampling); - CSLDestroy(psOptions->papszOpenOptions); - } - - CPLFree(psOptions); + delete psOptions; } /************************************************************************/ @@ -2295,5 +2202,5 @@ void GDALBuildVRTOptionsSetProgress(GDALBuildVRTOptions *psOptions, psOptions->pfnProgress = pfnProgress ? pfnProgress : GDALDummyProgress; psOptions->pProgressData = pProgressData; if (pfnProgress == GDALTermProgress) - psOptions->bQuiet = FALSE; + psOptions->bQuiet = false; } From 7dcead87671937d6dcdbb35a56ce5f7461a2e8cb Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 16 Apr 2024 18:33:23 +0200 Subject: [PATCH 027/230] gdalbuildvrt: use GDALArgumentParser which makes sure that -sd value is an integer (fixes #9672) --- apps/gdal_utils_priv.h | 2 + apps/gdalbuildvrt_bin.cpp | 94 +---- apps/gdalbuildvrt_lib.cpp | 518 +++++++++++++++++---------- doc/source/programs/gdalbuildvrt.rst | 13 +- 4 files changed, 344 insertions(+), 283 deletions(-) diff --git a/apps/gdal_utils_priv.h b/apps/gdal_utils_priv.h index 6f27644231a0..28784130975c 100644 --- a/apps/gdal_utils_priv.h +++ b/apps/gdal_utils_priv.h @@ -241,6 +241,8 @@ std::string CPL_DLL GDALInfoAppGetParserUsage(); std::string CPL_DLL GDALGridGetParserUsage(); +std::string CPL_DLL GDALBuildVRTGetParserUsage(); + #endif /* #ifndef DOXYGEN_SKIP */ #endif /* GDAL_UTILS_PRIV_H_INCLUDED */ diff --git a/apps/gdalbuildvrt_bin.cpp b/apps/gdalbuildvrt_bin.cpp index 6f6b2622dd07..d11ddf0cdec5 100644 --- a/apps/gdalbuildvrt_bin.cpp +++ b/apps/gdalbuildvrt_bin.cpp @@ -37,72 +37,13 @@ /* Usage() */ /************************************************************************/ -static void Usage(bool bIsError, - const char *pszErrorMsg = nullptr) CPL_NO_RETURN; +static void Usage() CPL_NO_RETURN; -static void Usage(bool bIsError, const char *pszErrorMsg) +static void Usage() { - fprintf( - bIsError ? stderr : stdout, "%s", - "Usage: gdalbuildvrt [--help] [--help-general]\n" - " [-tileindex <field_name>]\n" - " [-resolution {highest|lowest|average|user}]\n" - " [-te <xmin> <ymin> <xmax> <ymax>] [-tr <xres> " - "<yres>] [-tap]\n" - " [-separate] [-b <band>]... [-sd <subdataset>]\n" - " [-allow_projection_difference] [-q]\n" - " [-addalpha] [-hidenodata]\n" - " [-srcnodata \"<value>[ <value>]...\"] [-vrtnodata " - "\"<value>[ <value>]...\"\n" - " [-ignore_srcmaskband]\n" - " [-nodata_max_mask_threshold <threshold>]\n" - " [-a_srs <srs_def>]\n" - " [-r " - "{nearest|bilinear|cubic|cubicspline|lanczos|average|mode}]\n" - " [-oo <NAME>=<VALUE>]...\n" - " [-input_file_list <filename>] [-overwrite]\n" - " [-strict | -non_strict]\n" - " <output_filename.vrt> <input_raster> " - "[<input_raster>]...\n" - "\n" - "e.g.\n" - " % gdalbuildvrt doq_index.vrt doq/*.tif\n" - " % gdalbuildvrt -input_file_list my_list.txt doq_index.vrt\n" - "\n" - "NOTES:\n" - " o With -separate, each files goes into a separate band in the VRT " - "band.\n" - " Otherwise, the files are considered as tiles of a larger mosaic.\n" - " o -b option selects a band to add into vrt. Multiple bands can be " - "listed.\n" - " By default all bands are queried.\n" - " o The default tile index field is 'location' unless otherwise " - "specified by\n" - " -tileindex.\n" - " o In case the resolution of all input files is not the same, the " - "-resolution\n" - " flag enable the user to control the way the output resolution is " - "computed.\n" - " Average is the default.\n" - " o Input files may be any valid GDAL dataset or a GDAL raster tile " - "index.\n" - " o For a GDAL raster tile index, all entries will be added to the " - "VRT.\n" - " o If one GDAL dataset is made of several subdatasets and has 0 " - "raster bands,\n" - " its datasets will be added to the VRT rather than the dataset " - "itself.\n" - " Single subdataset could be selected by its number using the -sd " - "option.\n" - " o By default, only datasets of same projection and band " - "characteristics\n" - " may be added to the VRT.\n"); - - if (pszErrorMsg != nullptr) - fprintf(stderr, "\nFAILURE: %s\n", pszErrorMsg); - - exit(bIsError ? 1 : 0); + fprintf(stderr, "%s\n", GDALBuildVRTGetParserUsage().c_str()); + exit(1); } /************************************************************************/ @@ -123,22 +64,6 @@ MAIN_START(argc, argv) if (argc < 1) exit(-argc); - for (int i = 0; argv != nullptr && argv[i] != nullptr; i++) - { - if (EQUAL(argv[i], "--utility_version")) - { - printf("%s was compiled against GDAL %s and is running against " - "GDAL %s\n", - argv[0], GDAL_RELEASE_NAME, GDALVersionInfo("RELEASE_NAME")); - CSLDestroy(argv); - return 0; - } - else if (EQUAL(argv[i], "--help")) - { - Usage(false, nullptr); - } - } - GDALBuildVRTOptionsForBinary sOptionsForBinary; /* coverity[tainted_data] */ GDALBuildVRTOptions *psOptions = @@ -147,12 +72,7 @@ MAIN_START(argc, argv) if (psOptions == nullptr) { - Usage(true, nullptr); - } - - if (sOptionsForBinary.osDstFilename.c_str() == nullptr) - { - Usage(true, "No target filename specified."); + Usage(); } if (!(sOptionsForBinary.bQuiet)) @@ -188,7 +108,7 @@ MAIN_START(argc, argv) sOptionsForBinary.osDstFilename.c_str(), GDALGetDriverShortName(hDriver), sOptionsForBinary.osDstFilename.c_str()); - Usage(true); + Usage(); } } } @@ -199,7 +119,7 @@ MAIN_START(argc, argv) sOptionsForBinary.aosSrcFiles.size(), nullptr, sOptionsForBinary.aosSrcFiles.List(), psOptions, &bUsageError); if (bUsageError) - Usage(true); + Usage(); int nRetCode = (hOutDS) ? 0 : 1; GDALBuildVRTOptionsFree(psOptions); diff --git a/apps/gdalbuildvrt_lib.cpp b/apps/gdalbuildvrt_lib.cpp index 3eea1cf162bb..ae642786f383 100644 --- a/apps/gdalbuildvrt_lib.cpp +++ b/apps/gdalbuildvrt_lib.cpp @@ -33,6 +33,7 @@ #include "cpl_port.h" #include "gdal_utils.h" #include "gdal_utils_priv.h" +#include "gdalargumentparser.h" #include <cassert> #include <cmath> @@ -1734,6 +1735,7 @@ static bool add_file_to_list(const char *filename, const char *tile_index, */ struct GDALBuildVRTOptions { + std::string osTileIndex = "location"; bool bStrict = false; std::string osResolution{}; bool bSeparate = false; @@ -1936,6 +1938,298 @@ static char *SanitizeSRS(const char *pszUserInput) return pszResult; } +/************************************************************************/ +/* GDALBuildVRTOptionsGetParser() */ +/************************************************************************/ + +static std::unique_ptr<GDALArgumentParser> +GDALBuildVRTOptionsGetParser(GDALBuildVRTOptions *psOptions, + GDALBuildVRTOptionsForBinary *psOptionsForBinary) +{ + auto argParser = std::make_unique<GDALArgumentParser>( + "gdalbuildvrt", /* bForBinary=*/psOptionsForBinary != nullptr); + + argParser->add_description(_("Builds a VRT from a list of datasets.")); + + argParser->add_epilog(_( + "\n" + "e.g.\n" + " % gdalbuildvrt doq_index.vrt doq/*.tif\n" + " % gdalbuildvrt -input_file_list my_list.txt doq_index.vrt\n" + "\n" + "NOTES:\n" + " o With -separate, each files goes into a separate band in the VRT " + "band.\n" + " Otherwise, the files are considered as tiles of a larger mosaic.\n" + " o -b option selects a band to add into vrt. Multiple bands can be " + "listed.\n" + " By default all bands are queried.\n" + " o The default tile index field is 'location' unless otherwise " + "specified by\n" + " -tileindex.\n" + " o In case the resolution of all input files is not the same, the " + "-resolution\n" + " flag enable the user to control the way the output resolution is " + "computed.\n" + " Average is the default.\n" + " o Input files may be any valid GDAL dataset or a GDAL raster tile " + "index.\n" + " o For a GDAL raster tile index, all entries will be added to the " + "VRT.\n" + " o If one GDAL dataset is made of several subdatasets and has 0 " + "raster bands,\n" + " its datasets will be added to the VRT rather than the dataset " + "itself.\n" + " Single subdataset could be selected by its number using the -sd " + "option.\n" + " o By default, only datasets of same projection and band " + "characteristics\n" + " may be added to the VRT.\n" + "\n" + "For more details, consult " + "https://gdal.org/programs/gdalbuildvrt.html")); + + argParser->add_quiet_argument( + psOptionsForBinary ? &psOptionsForBinary->bQuiet : nullptr); + + { + auto &group = argParser->add_mutually_exclusive_group(); + + group.add_argument("-strict") + .flag() + .store_into(psOptions->bStrict) + .help(_("Turn warnings as failures.")); + + group.add_argument("-non_strict") + .flag() + .action([psOptions](const std::string &) + { psOptions->bStrict = false; }) + .help(_("Skip source datasets that have issues with warnings, and " + "continue processing.")); + } + + argParser->add_argument("-tile_index") + .metavar("<field_name>") + .store_into(psOptions->osTileIndex) + .help(_("Use the specified value as the tile index field, instead of " + "the default value which is 'location'.")); + + argParser->add_argument("-resolution") + .metavar("user|average|highest|lowest") + .action( + [psOptions](const std::string &s) + { + psOptions->osResolution = s; + if (!EQUAL(psOptions->osResolution.c_str(), "user") && + !EQUAL(psOptions->osResolution.c_str(), "average") && + !EQUAL(psOptions->osResolution.c_str(), "highest") && + !EQUAL(psOptions->osResolution.c_str(), "lowest")) + { + throw std::invalid_argument( + CPLSPrintf("Illegal resolution value (%s).", + psOptions->osResolution.c_str())); + } + }) + .help(_("Control the way the output resolution is computed.")); + + argParser->add_argument("-tr") + .metavar("<xres> <yes>") + .nargs(2) + .scan<'g', double>() + .help(_("Set target resolution.")); + + if (psOptionsForBinary) + { + argParser->add_argument("-input_file_list") + .metavar("<filename>") + .action( + [psOptions, psOptionsForBinary](const std::string &s) + { + const char *input_file_list = s.c_str(); + auto f = VSIVirtualHandleUniquePtr( + VSIFOpenL(input_file_list, "r")); + if (f) + { + while (1) + { + const char *filename = CPLReadLineL(f.get()); + if (filename == nullptr) + break; + if (!add_file_to_list( + filename, psOptions->osTileIndex.c_str(), + psOptionsForBinary->aosSrcFiles)) + { + throw std::invalid_argument( + std::string("Cannot add ") + .append(filename) + .append(" to input file list")); + } + } + } + }) + .help(_("Text file with an input filename on each line")); + } + + argParser->add_argument("-separate") + .flag() + .store_into(psOptions->bSeparate) + .help(_("Place each input file into a separate band.")); + + argParser->add_argument("-allow_projection_difference") + .flag() + .store_into(psOptions->bAllowProjectionDifference) + .help(_("Accept source files not in the same projection (but without " + "reprojecting them!).")); + + argParser->add_argument("-sd") + .metavar("<n>") + .store_into(psOptions->nSubdataset) + .help(_("Use subdataset of specified index (starting at 1), instead of " + "the source dataset itself.")); + + argParser->add_argument("-tap") + .flag() + .store_into(psOptions->bTargetAlignedPixels) + .help(_("Align the coordinates of the extent of the output file to the " + "values of the resolution.")); + + argParser->add_argument("-te") + .metavar("<xmin> <ymin> <xmax> <ymax>") + .nargs(4) + .scan<'g', double>() + .help(_("Set georeferenced extents of output file to be created.")); + + argParser->add_argument("-addalpha") + .flag() + .store_into(psOptions->bAddAlpha) + .help(_("Adds an alpha mask band to the VRT when the source raster " + "have none.")); + + argParser->add_argument("-b") + .metavar("<band>") + .append() + .store_into(psOptions->anSelectedBandList) + .help(_("Specify input band(s) number.")); + + argParser->add_argument("-hidenodata") + .flag() + .store_into(psOptions->bHideNoData) + .help(_("Makes the VRT band not report the NoData.")); + + if (psOptionsForBinary) + { + argParser->add_argument("-overwrite") + .flag() + .store_into(psOptionsForBinary->bOverwrite) + .help(_("Overwrite the VRT if it already exists.")); + } + + argParser->add_argument("-srcnodata") + .metavar("\"<value>[ <value>]...\"") + .store_into(psOptions->osSrcNoData) + .help(_("Set nodata values for input bands.")); + + argParser->add_argument("-vrtnodata") + .metavar("\"<value>[ <value>]...\"") + .store_into(psOptions->osVRTNoData) + .help(_("Set nodata values at the VRT band level.")); + + argParser->add_argument("-a_srs") + .metavar("<srs_def>") + .action( + [psOptions](const std::string &s) + { + char *pszSRS = SanitizeSRS(s.c_str()); + if (pszSRS == nullptr) + { + throw std::invalid_argument("Invalid value for -a_srs"); + } + psOptions->osOutputSRS = pszSRS; + CPLFree(pszSRS); + }) + .help(_("Override the projection for the output file..")); + + argParser->add_argument("-r") + .metavar("nearest|bilinear|cubic|cubicspline|lanczos|average|mode") + .store_into(psOptions->osResampling) + .help(_("Resampling algorithm.")); + + argParser->add_open_options_argument(&psOptions->aosOpenOptions); + + argParser->add_argument("-ignore_srcmaskband") + .flag() + .action([psOptions](const std::string &) + { psOptions->bUseSrcMaskBand = false; }) + .help(_("Cause mask band of sources will not be taken into account.")); + + argParser->add_argument("-nodata_max_mask_threshold") + .metavar("<threshold>") + .scan<'g', double>() + .action( + [psOptions](const std::string &s) + { + psOptions->bNoDataFromMask = true; + psOptions->dfMaskValueThreshold = CPLAtofM(s.c_str()); + }) + .help(_("Replaces the value of the source with the value of -vrtnodata " + "when the value of the mask band of the source is less or " + "equal to the threshold.")); + + if (psOptionsForBinary) + { + if (psOptionsForBinary->osDstFilename.empty()) + { + // We normally go here, unless undocumented -o switch is used + argParser->add_argument("vrt_dataset_name") + .metavar("<vrt_dataset_name>") + .store_into(psOptionsForBinary->osDstFilename) + .help(_("Output VRT.")); + } + + argParser->add_argument("src_dataset_name") + .metavar("<src_dataset_name>") + .nargs(argparse::nargs_pattern::any) + .action( + [psOptions, psOptionsForBinary](const std::string &s) + { + if (!add_file_to_list(s.c_str(), + psOptions->osTileIndex.c_str(), + psOptionsForBinary->aosSrcFiles)) + { + throw std::invalid_argument( + std::string("Cannot add ") + .append(s) + .append(" to input file list")); + } + }) + .help(_("Input dataset(s).")); + } + + return argParser; +} + +/************************************************************************/ +/* GDALBuildVRTGetParserUsage() */ +/************************************************************************/ + +std::string GDALBuildVRTGetParserUsage() +{ + try + { + GDALBuildVRTOptions sOptions; + GDALBuildVRTOptionsForBinary sOptionsForBinary; + auto argParser = + GDALBuildVRTOptionsGetParser(&sOptions, &sOptionsForBinary); + return argParser->usage(); + } + catch (const std::exception &err) + { + CPLError(CE_Failure, CPLE_AppDefined, "Unexpected exception: %s", + err.what()); + return std::string(); + } +} + /************************************************************************/ /* GDALBuildVRTOptionsNew() */ /************************************************************************/ @@ -1962,206 +2256,52 @@ GDALBuildVRTOptionsNew(char **papszArgv, { auto psOptions = std::make_unique<GDALBuildVRTOptions>(); - const char *tile_index = "location"; - - /* -------------------------------------------------------------------- */ - /* Parse arguments. */ - /* -------------------------------------------------------------------- */ - int argc = CSLCount(papszArgv); - for (int iArg = 0; papszArgv != nullptr && iArg < argc; iArg++) + CPLStringList aosArgv; + const int nArgc = CSLCount(papszArgv); + for (int i = 0; + i < nArgc && papszArgv != nullptr && papszArgv[i] != nullptr; i++) { - if (strcmp(papszArgv[iArg], "-strict") == 0) - { - psOptions->bStrict = true; - } - else if (strcmp(papszArgv[iArg], "-non_strict") == 0) + if (psOptionsForBinary && EQUAL(papszArgv[i], "-o") && i + 1 < nArgc && + papszArgv[i + 1] != nullptr) { - psOptions->bStrict = false; + // Undocumented alternate way of specifying the destination file + psOptionsForBinary->osDstFilename = papszArgv[i + 1]; + ++i; } - else if (EQUAL(papszArgv[iArg], "-tileindex") && iArg + 1 < argc) - { - tile_index = papszArgv[++iArg]; - } - else if (EQUAL(papszArgv[iArg], "-resolution") && iArg + 1 < argc) - { - psOptions->osResolution = papszArgv[++iArg]; - if (!EQUAL(psOptions->osResolution.c_str(), "user") && - !EQUAL(psOptions->osResolution.c_str(), "average") && - !EQUAL(psOptions->osResolution.c_str(), "highest") && - !EQUAL(psOptions->osResolution.c_str(), "lowest")) - { - CPLError(CE_Failure, CPLE_IllegalArg, - "Illegal resolution value (%s).", - psOptions->osResolution.c_str()); - return nullptr; - } - } - else if (EQUAL(papszArgv[iArg], "-input_file_list") && iArg + 1 < argc) - { - ++iArg; - if (psOptionsForBinary) - { - const char *input_file_list = papszArgv[iArg]; - auto f = - VSIVirtualHandleUniquePtr(VSIFOpenL(input_file_list, "r")); - if (f) - { - while (1) - { - const char *filename = CPLReadLineL(f.get()); - if (filename == nullptr) - break; - if (!add_file_to_list(filename, tile_index, - psOptionsForBinary->aosSrcFiles)) - { - return nullptr; - } - } - } - } - else - { - CPLError(CE_Failure, CPLE_NotSupported, - "-input_file_list not supported in non binary mode"); - } - } - else if (EQUAL(papszArgv[iArg], "-separate")) - { - psOptions->bSeparate = TRUE; - } - else if (EQUAL(papszArgv[iArg], "-allow_projection_difference")) - { - psOptions->bAllowProjectionDifference = TRUE; - } - else if (EQUAL(papszArgv[iArg], "-sd") && iArg + 1 < argc) - { - psOptions->nSubdataset = atoi(papszArgv[++iArg]); - } - /* Alternate syntax for output file */ - else if (EQUAL(papszArgv[iArg], "-o") && iArg + 1 < argc) - { - ++iArg; - if (psOptionsForBinary) - { - psOptionsForBinary->osDstFilename = papszArgv[iArg]; - } - else - { - CPLError(CE_Failure, CPLE_NotSupported, - "-o not supported in non binary mode"); - } - } - else if (EQUAL(papszArgv[iArg], "-q") || - EQUAL(papszArgv[iArg], "-quiet")) - { - if (psOptionsForBinary) - { - psOptionsForBinary->bQuiet = TRUE; - } - } - else if (EQUAL(papszArgv[iArg], "-tr") && iArg + 2 < argc) - { - psOptions->we_res = CPLAtofM(papszArgv[++iArg]); - psOptions->ns_res = CPLAtofM(papszArgv[++iArg]); - } - else if (EQUAL(papszArgv[iArg], "-tap")) - { - psOptions->bTargetAlignedPixels = TRUE; - } - else if (EQUAL(papszArgv[iArg], "-te") && iArg + 4 < argc) - { - psOptions->xmin = CPLAtofM(papszArgv[++iArg]); - psOptions->ymin = CPLAtofM(papszArgv[++iArg]); - psOptions->xmax = CPLAtofM(papszArgv[++iArg]); - psOptions->ymax = CPLAtofM(papszArgv[++iArg]); - } - else if (EQUAL(papszArgv[iArg], "-addalpha")) + else { - psOptions->bAddAlpha = TRUE; + aosArgv.AddString(papszArgv[i]); } - else if (EQUAL(papszArgv[iArg], "-b") && iArg + 1 < argc) - { - const char *pszBand = papszArgv[++iArg]; - int nBand = atoi(pszBand); - if (nBand < 1) - { - CPLError(CE_Failure, CPLE_IllegalArg, - "Illegal band number (%s).", papszArgv[iArg]); - return nullptr; - } + } - psOptions->anSelectedBandList.push_back(nBand); - } - else if (EQUAL(papszArgv[iArg], "-hidenodata")) - { - psOptions->bHideNoData = TRUE; - } - else if (EQUAL(papszArgv[iArg], "-overwrite")) - { - if (psOptionsForBinary) - psOptionsForBinary->bOverwrite = TRUE; - } - else if (EQUAL(papszArgv[iArg], "-srcnodata") && iArg + 1 < argc) - { - psOptions->osSrcNoData = papszArgv[++iArg]; - } - else if (EQUAL(papszArgv[iArg], "-vrtnodata") && iArg + 1 < argc) - { - psOptions->osVRTNoData = papszArgv[++iArg]; - } - else if (EQUAL(papszArgv[iArg], "-a_srs") && iArg + 1 < argc) - { - char *pszSRS = SanitizeSRS(papszArgv[++iArg]); - if (pszSRS == nullptr) - { - return nullptr; - } - psOptions->osOutputSRS = pszSRS; - CPLFree(pszSRS); - } - else if (EQUAL(papszArgv[iArg], "-r") && iArg + 1 < argc) - { - psOptions->osResampling = papszArgv[++iArg]; - } - else if (EQUAL(papszArgv[iArg], "-oo") && iArg + 1 < argc) - { - psOptions->aosOpenOptions.AddString(papszArgv[++iArg]); - } - else if (EQUAL(papszArgv[iArg], "-ignore_srcmaskband")) - { - psOptions->bUseSrcMaskBand = false; - } - else if (EQUAL(papszArgv[iArg], "-nodata_max_mask_threshold") && - iArg + 1 < argc) - { - psOptions->bNoDataFromMask = true; - psOptions->dfMaskValueThreshold = CPLAtofM(papszArgv[++iArg]); - } - else if (papszArgv[iArg][0] == '-') + try + { + auto argParser = + GDALBuildVRTOptionsGetParser(psOptions.get(), psOptionsForBinary); + + argParser->parse_args_without_binary_name(aosArgv.List()); + + if (auto adfTargetRes = argParser->present<std::vector<double>>("-tr")) { - CPLError(CE_Failure, CPLE_NotSupported, "Unknown option name '%s'", - papszArgv[iArg]); - return nullptr; + psOptions->we_res = (*adfTargetRes)[0]; + psOptions->ns_res = (*adfTargetRes)[1]; } - else + + if (auto oTE = argParser->present<std::vector<double>>("-te")) { - if (psOptionsForBinary) - { - if (psOptionsForBinary->osDstFilename.empty()) - psOptionsForBinary->osDstFilename = papszArgv[iArg]; - else - { - if (!add_file_to_list(papszArgv[iArg], tile_index, - psOptionsForBinary->aosSrcFiles)) - { - return nullptr; - } - } - } + psOptions->xmin = (*oTE)[0]; + psOptions->ymin = (*oTE)[1]; + psOptions->xmax = (*oTE)[2]; + psOptions->ymax = (*oTE)[3]; } - } - return psOptions.release(); + return psOptions.release(); + } + catch (const std::exception &err) + { + CPLError(CE_Failure, CPLE_AppDefined, "%s", err.what()); + return nullptr; + } } /************************************************************************/ diff --git a/doc/source/programs/gdalbuildvrt.rst b/doc/source/programs/gdalbuildvrt.rst index f3bf3eec0a11..0a8a28cf9cab 100644 --- a/doc/source/programs/gdalbuildvrt.rst +++ b/doc/source/programs/gdalbuildvrt.rst @@ -19,7 +19,7 @@ Synopsis [-tileindex <field_name>] [-resolution {highest|lowest|average|user}] [-te <xmin> <ymin> <xmax> <ymax>] [-tr <xres> <yres>] [-tap] - [-separate] [-b <band>]... [-sd <subdataset>] + [-separate] [-b <band>]... [-sd <n>] [-allow_projection_difference] [-q] [-addalpha] [-hidenodata] [-srcnodata "<value>[ <value>]..."] [-vrtnodata "<value>[ <value>]..." @@ -72,7 +72,7 @@ changed in later versions. .. include:: options/help_and_help_general.rst -.. option:: -tileindex +.. option:: -tileindex <field_name> Use the specified value as the tile index field, instead of the default value which is 'location'. @@ -161,12 +161,11 @@ changed in later versions. If input bands not set all bands will be added to vrt. Multiple :option:`-b` switches may be used to select a set of input bands. -.. option:: -sd< <subdataset> +.. option:: -sd <n> - If the input - dataset contains several subdatasets use a subdataset with the specified - number (starting from 1). This is an alternative of giving the full subdataset - name as an input. + If the input dataset contains several subdatasets, use a subdataset with the + specified number (starting from 1). This is an alternative of giving the full subdataset + name as an input to the utility. .. option:: -vrtnodata "<value>[ <value>]..." From 0387bac1dffe2b98cb86a3da5ea8d7162763e606 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 22 Feb 2024 03:20:33 +0100 Subject: [PATCH 028/230] GDALDataset: make GetRasterXSize/YSize/Count/Band() const --- gcore/gdal_priv.h | 11 ++++++----- gcore/gdaldataset.cpp | 45 +++++++++++++++++++++++++++++++++++++++---- 2 files changed, 47 insertions(+), 9 deletions(-) diff --git a/gcore/gdal_priv.h b/gcore/gdal_priv.h index 3ebdd14bb439..541d2ae5cd27 100644 --- a/gcore/gdal_priv.h +++ b/gcore/gdal_priv.h @@ -621,10 +621,11 @@ class CPL_DLL GDALDataset : public GDALMajorObject virtual CPLErr Close(); - int GetRasterXSize(); - int GetRasterYSize(); - int GetRasterCount(); + int GetRasterXSize() const; + int GetRasterYSize() const; + int GetRasterCount() const; GDALRasterBand *GetRasterBand(int); + const GDALRasterBand *GetRasterBand(int) const; /** * @brief SetQueryLoggerFunc @@ -812,8 +813,8 @@ class CPL_DLL GDALDataset : public GDALMajorObject ); #ifndef DOXYGEN_XML - void ReportError(CPLErr eErrClass, CPLErrorNum err_no, const char *fmt, ...) - CPL_PRINT_FUNC_FORMAT(4, 5); + void ReportError(CPLErr eErrClass, CPLErrorNum err_no, const char *fmt, + ...) const CPL_PRINT_FUNC_FORMAT(4, 5); static void ReportError(const char *pszDSName, CPLErr eErrClass, CPLErrorNum err_no, const char *fmt, ...) diff --git a/gcore/gdaldataset.cpp b/gcore/gdaldataset.cpp index 49118992ed81..45b333a10d69 100644 --- a/gcore/gdaldataset.cpp +++ b/gcore/gdaldataset.cpp @@ -931,7 +931,7 @@ void GDALDataset::SetBand(int nNewBand, std::unique_ptr<GDALRasterBand> poBand) */ -int GDALDataset::GetRasterXSize() +int GDALDataset::GetRasterXSize() const { return nRasterXSize; } @@ -968,7 +968,7 @@ int CPL_STDCALL GDALGetRasterXSize(GDALDatasetH hDataset) */ -int GDALDataset::GetRasterYSize() +int GDALDataset::GetRasterYSize() const { return nRasterYSize; } @@ -1028,6 +1028,43 @@ GDALRasterBand *GDALDataset::GetRasterBand(int nBandId) return nullptr; } +/************************************************************************/ +/* GetRasterBand() */ +/************************************************************************/ + +/** + + \brief Fetch a band object for a dataset. + + See GetBands() for a C++ iterator version of this method. + + Equivalent of the C function GDALGetRasterBand(). + + @param nBandId the index number of the band to fetch, from 1 to + GetRasterCount(). + + @return the nBandId th band object + +*/ + +const GDALRasterBand *GDALDataset::GetRasterBand(int nBandId) const + +{ + if (papoBands) + { + if (nBandId < 1 || nBandId > nBands) + { + ReportError(CE_Failure, CPLE_IllegalArg, + "GDALDataset::GetRasterBand(%d) - Illegal band #\n", + nBandId); + return nullptr; + } + + return papoBands[nBandId - 1]; + } + return nullptr; +} + /************************************************************************/ /* GDALGetRasterBand() */ /************************************************************************/ @@ -1058,7 +1095,7 @@ GDALRasterBandH CPL_STDCALL GDALGetRasterBand(GDALDatasetH hDS, int nBandId) * @return the number of raster bands. */ -int GDALDataset::GetRasterCount() +int GDALDataset::GetRasterCount() const { return papoBands ? nBands : 0; } @@ -4500,7 +4537,7 @@ int GDALDataset::CloseDependentDatasets() */ void GDALDataset::ReportError(CPLErr eErrClass, CPLErrorNum err_no, - const char *fmt, ...) + const char *fmt, ...) const { va_list args; va_start(args, fmt); From d5fd5086a56514b54734a58de4b781f41ceeec48 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 5 Mar 2024 23:31:40 +0100 Subject: [PATCH 029/230] VRT: add a new mode to apply chained processing steps that apply to several bands at the same time The following built-in algorithms are introduced, and typically applied in the following order: - Dehazing: remove haze effects by applying (subsampled) gain and offset auxiliary datasets. - BandAffineCombination: to perform an affine transformation combination of bands. - Trimming: local thresholding of saturation - LUT: apply a look-up table (band per band) --- autotest/gdrivers/vrtprocesseddataset.py | 1258 +++++++++++++ doc/source/drivers/raster/vrt.rst | 16 + .../drivers/raster/vrt_processed_dataset.rst | 261 +++ frmts/vrt/CMakeLists.txt | 2 + frmts/vrt/data/gdalvrt.xsd | 136 +- frmts/vrt/vrtdataset.cpp | 170 +- frmts/vrt/vrtdataset.h | 182 +- frmts/vrt/vrtdriver.cpp | 14 +- frmts/vrt/vrtprocesseddataset.cpp | 1342 ++++++++++++++ frmts/vrt/vrtprocesseddatasetfunctions.cpp | 1579 +++++++++++++++++ frmts/vrt/vrtsources.cpp | 92 +- gcore/gdal.h | 109 ++ 12 files changed, 5008 insertions(+), 153 deletions(-) create mode 100755 autotest/gdrivers/vrtprocesseddataset.py create mode 100644 doc/source/drivers/raster/vrt_processed_dataset.rst create mode 100644 frmts/vrt/vrtprocesseddataset.cpp create mode 100644 frmts/vrt/vrtprocesseddatasetfunctions.cpp diff --git a/autotest/gdrivers/vrtprocesseddataset.py b/autotest/gdrivers/vrtprocesseddataset.py new file mode 100755 index 000000000000..7ebec7613d0d --- /dev/null +++ b/autotest/gdrivers/vrtprocesseddataset.py @@ -0,0 +1,1258 @@ +#!/usr/bin/env pytest +############################################################################### +# $Id$ +# +# Project: GDAL/OGR Test Suite +# Purpose: Test VRTProcessedDataset support. +# Author: Even Rouault <even.rouault at spatialys.com> +# +############################################################################### +# Copyright (c) 2024, Even Rouault <even.rouault at spatialys.com> +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. +############################################################################### + +import struct + +import gdaltest +import pytest + +from osgeo import gdal + +############################################################################### +# Test error cases in general VRTProcessedDataset XML structure + + +def test_vrtprocesseddataset_errors(tmp_vsimem): + + with pytest.raises(Exception, match="Input element missing"): + gdal.Open( + """<VRTDataset subclass='VRTProcessedDataset'> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, + match="Input element should have a SourceFilename or VRTDataset element", + ): + gdal.Open( + """<VRTDataset subclass='VRTProcessedDataset'> + <Input/> + </VRTDataset> + """ + ) + + with pytest.raises(Exception): # "No such file or directory'", but O/S dependent + gdal.Open( + """<VRTDataset subclass='VRTProcessedDataset'> + <Input><SourceFilename/></Input> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, + match="Missing one of rasterXSize, rasterYSize or bands on VRTDataset", + ): + gdal.Open( + """<VRTDataset subclass='VRTProcessedDataset'> + <Input><VRTDataset/></Input> + </VRTDataset> + """ + ) + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 10, 5, 3) + src_ds.GetRasterBand(1).Fill(1) + src_ds.GetRasterBand(2).Fill(2) + src_ds.GetRasterBand(3).Fill(3) + src_ds.Close() + + with pytest.raises(Exception, match="ProcessingSteps element missing"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, match="Inconsistent declared VRT dimensions with input dataset" + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset' rasterXSize='1'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, match="Inconsistent declared VRT dimensions with input dataset" + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset' rasterYSize='1'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + </VRTDataset> + """ + ) + + with pytest.raises(Exception, match="At least one step should be defined"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps/> + </VRTDataset> + """ + ) + + +############################################################################### +# Test nominal cases of BandAffineCombination algorithm + + +def test_vrtprocesseddataset_affine_combination_nominal(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 3) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x03") + src_ds.GetRasterBand(2).WriteRaster(0, 0, 2, 1, b"\x02\x06") + src_ds.GetRasterBand(3).WriteRaster(0, 0, 2, 1, b"\x03\x03") + src_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step name="Affine combination of band values"> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">10,0,1,0</Argument> + <Argument name="coefficients_2">20,0,0,1</Argument> + <Argument name="coefficients_3">30,1,0,0</Argument> + <Argument name="min">15</Argument> + <Argument name="max">32</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert ds.RasterXSize == 2 + assert ds.RasterYSize == 1 + assert ds.RasterCount == 3 + assert ds.GetSpatialRef() is None + assert ds.GetGeoTransform(can_return_null=True) is None + assert ds.GetRasterBand(1).DataType == gdal.GDT_Byte + assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (15, 10 + 6) + assert struct.unpack("B" * 2, ds.GetRasterBand(2).ReadRaster()) == (20 + 3, 20 + 3) + assert struct.unpack("B" * 2, ds.GetRasterBand(3).ReadRaster()) == (30 + 1, 32) + + +############################################################################### +# Test several steps in a VRTProcessedDataset + + +def test_vrtprocesseddataset_several_steps(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 10, 5, 3) + src_ds.GetRasterBand(1).Fill(1) + src_ds.GetRasterBand(2).Fill(2) + src_ds.GetRasterBand(3).Fill(3) + src_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">0,0,1,0</Argument> + <Argument name="coefficients_2">0,0,0,1</Argument> + <Argument name="coefficients_3">0,1,0,0</Argument> + </Step> + <Step> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">0,0,1,0</Argument> + <Argument name="coefficients_2">0,0,0,1</Argument> + <Argument name="coefficients_3">0,1,0,0</Argument> + </Step> + <Step> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">0,0,1,0</Argument> + <Argument name="coefficients_2">0,0,0,1</Argument> + <Argument name="coefficients_3">0,1,0,0</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert ds.RasterXSize == 10 + assert ds.RasterYSize == 5 + assert ds.RasterCount == 3 + assert ds.GetSpatialRef() is None + assert ds.GetGeoTransform(can_return_null=True) is None + assert ds.GetRasterBand(1).DataType == gdal.GDT_Byte + assert ds.GetRasterBand(1).ComputeRasterMinMax(False) == (1, 1) + assert ds.GetRasterBand(2).ComputeRasterMinMax(False) == (2, 2) + assert ds.GetRasterBand(3).ComputeRasterMinMax(False) == (3, 3) + + +############################################################################### +# Test nominal cases of BandAffineCombination algorithm with nodata + + +def test_vrtprocesseddataset_affine_combination_nodata(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x02") + src_ds.GetRasterBand(1).SetNoDataValue(1) + src_ds.GetRasterBand(2).WriteRaster(0, 0, 2, 1, b"\x03\x03") + src_ds.GetRasterBand(2).SetNoDataValue(1) + src_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step name="Affine combination of band values"> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">0,1,1</Argument> + <Argument name="coefficients_2">0,1,-1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert ds.GetRasterBand(1).DataType == gdal.GDT_Byte + assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (1, 5) + # 0 should actually be 3-2=1, but this is the nodata value hence the replacement value + assert struct.unpack("B" * 2, ds.GetRasterBand(2).ReadRaster()) == (1, 0) + + +def test_vrtprocesseddataset_affine_combination_nodata_as_parameter(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x02") + src_ds.GetRasterBand(2).WriteRaster(0, 0, 2, 1, b"\x03\x03") + src_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step name="Affine combination of band values"> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">0,1,1</Argument> + <Argument name="coefficients_2">256,1,-1</Argument> + <Argument name="src_nodata">1</Argument> + <Argument name="dst_nodata">255</Argument> + <Argument name="dst_intended_datatype">Byte</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert ds.GetRasterBand(1).DataType == gdal.GDT_Byte + assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (255, 5) + # 254 should actually be 256+1*2+(-1)*3=255, but this is the nodata value hence the replacement value + assert struct.unpack("B" * 2, ds.GetRasterBand(2).ReadRaster()) == (255, 254) + + +############################################################################### +# Test replacement_nodata logic of BandAffineCombination + + +def test_vrtprocesseddataset_affine_combination_replacement_nodata(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x02") + src_ds.GetRasterBand(2).WriteRaster(0, 0, 2, 1, b"\x03\x03") + src_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step name="Affine combination of band values"> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">0,1,1</Argument> + <Argument name="coefficients_2">256,1,-1</Argument> + <Argument name="src_nodata">1</Argument> + <Argument name="dst_nodata">255</Argument> + <Argument name="replacement_nodata">128</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert ds.GetRasterBand(1).DataType == gdal.GDT_Byte + assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (255, 5) + # 254 should actually be 256+1*2+(-1)*3=255, but this is the nodata value hence the replacement value + assert struct.unpack("B" * 2, ds.GetRasterBand(2).ReadRaster()) == (255, 128) + + +############################################################################### +# Test error cases of BandAffineCombination algorithm + + +def test_vrtprocesseddataset_affine_combination_errors(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 10, 5, 3) + src_ds.GetRasterBand(1).Fill(1) + src_ds.GetRasterBand(2).Fill(2) + src_ds.GetRasterBand(3).Fill(3) + src_ds.Close() + + with pytest.raises( + Exception, + match="Step 'Affine combination of band values' lacks required Argument 'coefficients_{band}'", + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step name="Affine combination of band values"> + <Algorithm>BandAffineCombination</Algorithm> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, match="Argument coefficients_1 has 3 values, whereas 4 are expected" + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step name="Affine combination of band values"> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">10,0,1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises(Exception, match="Argument coefficients_3 is missing"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step name="Affine combination of band values"> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">10,0,1,0</Argument> + <Argument name="coefficients_2">10,0,1,0</Argument> + <Argument name="coefficients_4">10,0,1,0</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, + match="Final step expect 3 bands, but only 1 coefficient_XX are provided", + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step name="Affine combination of band values"> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">10,0,1,0</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + +############################################################################### +# Test nominal cases of LUT algorithm + + +def test_vrtprocesseddataset_lut_nominal(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 3, 1, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + src_ds.GetRasterBand(2).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + src_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>LUT</Algorithm> + <Argument name="lut_1">1.5:10,2.5:20</Argument> + <Argument name="lut_2">1.5:100,2.5:200</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert struct.unpack("B" * 3, ds.GetRasterBand(1).ReadRaster()) == (10, 15, 20) + assert struct.unpack("B" * 3, ds.GetRasterBand(2).ReadRaster()) == (100, 150, 200) + + +############################################################################### +# Test nominal cases of LUT algorithm with nodata coming from input dataset + + +def test_vrtprocesseddataset_lut_nodata(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 4, 1, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 4, 1, b"\x00\x01\x02\x03") + src_ds.GetRasterBand(1).SetNoDataValue(0) + src_ds.GetRasterBand(2).WriteRaster(0, 0, 4, 1, b"\x00\x01\x02\x03") + src_ds.GetRasterBand(2).SetNoDataValue(0) + src_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>LUT</Algorithm> + <Argument name="lut_1">1.5:10,2.5:20</Argument> + <Argument name="lut_2">1.5:100,2.5:200</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert struct.unpack("B" * 4, ds.GetRasterBand(1).ReadRaster()) == (0, 10, 15, 20) + assert struct.unpack("B" * 4, ds.GetRasterBand(2).ReadRaster()) == ( + 0, + 100, + 150, + 200, + ) + + +############################################################################### +# Test nominal cases of LUT algorithm with nodata set as a parameter + + +def test_vrtprocesseddataset_lut_nodata_as_parameter(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 4, 1, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 4, 1, b"\x00\x01\x02\x03") + src_ds.GetRasterBand(2).WriteRaster(0, 0, 4, 1, b"\x00\x01\x02\x03") + src_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>LUT</Algorithm> + <Argument name="lut_1">1.5:10,2.5:20</Argument> + <Argument name="lut_2">1.5:100,2.5:200</Argument> + <Argument name="src_nodata">0</Argument> + <Argument name="dst_nodata">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert struct.unpack("B" * 4, ds.GetRasterBand(1).ReadRaster()) == (1, 10, 15, 20) + assert struct.unpack("B" * 4, ds.GetRasterBand(2).ReadRaster()) == ( + 1, + 100, + 150, + 200, + ) + + +############################################################################### +# Test error cases of LUT algorithm + + +def test_vrtprocesseddataset_lut_errors(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 3, 1, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + src_ds.GetRasterBand(2).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + src_ds.Close() + + with pytest.raises(Exception, match="Step 'nr 1' lacks required Argument"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>LUT</Algorithm> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises(Exception, match="Invalid value for argument 'lut_1'"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>LUT</Algorithm> + <Argument name="lut_1">1.5:10,2.5</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises(Exception, match="Invalid band in argument 'lut_3'"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>LUT</Algorithm> + <Argument name="lut_1">1.5:10,2.5:20</Argument> + <Argument name="lut_3">1.5:10,2.5:20</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises(Exception, match="Missing lut_XX element"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>LUT</Algorithm> + <Argument name="lut_1">1.5:10,2.5:20</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + +############################################################################### +# Test nominal case of Dehazing algorithm + + +def test_vrtprocesseddataset_dehazing_nominal(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 6, 1, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 6, 1, b"\x01\x02\x03\xff\x01\x01") + src_ds.GetRasterBand(2).WriteRaster(0, 0, 6, 1, b"\x01\x02\x03\xff\x01\x01") + src_ds.GetRasterBand(1).SetNoDataValue(255) + src_ds.GetRasterBand(2).SetNoDataValue(255) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + src_ds.Close() + + gain_filename = str(tmp_vsimem / "gain.tif") + gain_ds = gdal.GetDriverByName("GTiff").Create(gain_filename, 6, 1, 2) + gain_ds.GetRasterBand(1).WriteRaster(0, 0, 6, 1, b"\x02\x04\x06\x01\xfe\x01") + gain_ds.GetRasterBand(2).WriteRaster(0, 0, 6, 1, b"\x03\x05\x07\x01\xfe\x01") + gain_ds.GetRasterBand(1).SetNoDataValue(254) + gain_ds.GetRasterBand(2).SetNoDataValue(254) + gain_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + gain_ds.Close() + + offset_filename = str(tmp_vsimem / "offset.tif") + offset_ds = gdal.GetDriverByName("GTiff").Create(offset_filename, 6, 1, 2) + offset_ds.GetRasterBand(1).WriteRaster(0, 0, 6, 1, b"\x01\x02\x03\x01\x01\xfd") + offset_ds.GetRasterBand(2).WriteRaster(0, 0, 6, 1, b"\x02\x03\x04\x01\x01\xfd") + offset_ds.GetRasterBand(1).SetNoDataValue(253) + offset_ds.GetRasterBand(2).SetNoDataValue(253) + offset_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + offset_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_1">{gain_filename}</Argument> + <Argument name="gain_dataset_band_1">1</Argument> + <Argument name="gain_dataset_filename_2">{gain_filename}</Argument> + <Argument name="gain_dataset_band_2">2</Argument> + <Argument name="offset_dataset_filename_1">{offset_filename}</Argument> + <Argument name="offset_dataset_band_1">1</Argument> + <Argument name="offset_dataset_filename_2">{offset_filename}</Argument> + <Argument name="offset_dataset_band_2">2</Argument> + <Argument name="min">2</Argument> + <Argument name="max">16</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert struct.unpack("B" * 6, ds.GetRasterBand(1).ReadRaster()) == ( + 2, + 6, + 15, + 255, + 255, + 255, + ) + assert struct.unpack("B" * 6, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 7, + 16, + 255, + 255, + 255, + ) + + +############################################################################### +# Test nominal case of Dehazing algorithm where gain and offset have a lower +# resolution than the input dataset + + +def test_vrtprocesseddataset_dehazing_different_resolution(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 6, 2, 1) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 6, 2, b"\x01\x01\x02\x02\x03\x03" * 2) + src_ds.SetGeoTransform([0, 0.5, 0, 0, 0, 0.5]) + src_ds.Close() + + gain_filename = str(tmp_vsimem / "gain.tif") + gain_ds = gdal.GetDriverByName("GTiff").Create(gain_filename, 3, 1, 1) + gain_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x02\x04\x06") + gain_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + gain_ds.Close() + + offset_filename = str(tmp_vsimem / "offset.tif") + offset_ds = gdal.GetDriverByName("GTiff").Create(offset_filename, 3, 1, 1) + offset_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + offset_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + offset_ds.Close() + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_1">{gain_filename}</Argument> + <Argument name="gain_dataset_band_1">1</Argument> + <Argument name="offset_dataset_filename_1">{offset_filename}</Argument> + <Argument name="offset_dataset_band_1">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + assert struct.unpack("B" * 12, ds.GetRasterBand(1).ReadRaster()) == ( + 1, + 2, + 6, + 8, + 15, + 15, + 1, + 2, + 6, + 8, + 15, + 15, + ) + + +############################################################################### +# Test error cases of Dehazing algorithm + + +def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 3, 1, 1) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + src_ds.Close() + + with pytest.raises( + Exception, + match="Step 'nr 1' lacks required Argument 'offset_dataset_band_{band}'", + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_1">{src_filename}</Argument> + <Argument name="gain_dataset_band_1">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, + match="Invalid band in argument 'gain_dataset_filename_2'", + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_2">{src_filename}</Argument> + <Argument name="gain_dataset_band_1">1</Argument> + <Argument name="offset_dataset_filename_1">{src_filename}</Argument> + <Argument name="offset_dataset_band_1">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, + match="Invalid band in argument 'gain_dataset_band_2'", + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_1">{src_filename}</Argument> + <Argument name="gain_dataset_band_2">1</Argument> + <Argument name="offset_dataset_filename_1">{src_filename}</Argument> + <Argument name="offset_dataset_band_1">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, + match="Invalid band in argument 'offset_dataset_filename_2'", + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_1">{src_filename}</Argument> + <Argument name="gain_dataset_band_1">1</Argument> + <Argument name="offset_dataset_filename_2">{src_filename}</Argument> + <Argument name="offset_dataset_band_1">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, + match="Invalid band in argument 'offset_dataset_band_2'", + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_1">{src_filename}</Argument> + <Argument name="gain_dataset_band_1">1</Argument> + <Argument name="offset_dataset_filename_1">{src_filename}</Argument> + <Argument name="offset_dataset_band_2">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises( + Exception, + match=r"Invalid band number \(2\) for a gain dataset", + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_1">{src_filename}</Argument> + <Argument name="gain_dataset_band_1">2</Argument> + <Argument name="offset_dataset_filename_1">{src_filename}</Argument> + <Argument name="offset_dataset_band_1">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises(Exception): # "No such file or directory'", but O/S dependent + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_1">invalid</Argument> + <Argument name="gain_dataset_band_1">1</Argument> + <Argument name="offset_dataset_filename_1">{src_filename}</Argument> + <Argument name="offset_dataset_band_1">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + nogt_filename = str(tmp_vsimem / "nogt.tif") + ds = gdal.GetDriverByName("GTiff").Create(nogt_filename, 1, 1, 1) + ds.Close() + + with pytest.raises(Exception, match="lacks a geotransform"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Dehazing</Algorithm> + <Argument name="gain_dataset_filename_1">{nogt_filename}</Argument> + <Argument name="gain_dataset_band_1">1</Argument> + <Argument name="offset_dataset_filename_1">{nogt_filename}</Argument> + <Argument name="offset_dataset_band_1">1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + +############################################################################### +# Test nominal cases of Trimming algorithm + + +def test_vrtprocesseddataset_trimming_nominal(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 6, 1, 4) + + R = 100.0 + G = 150.0 + B = 200.0 + NIR = 100.0 + + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, int(R), 150, 200, 0, 0, 0) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, int(G), 200, 100, 0, 0, 0) + ) + src_ds.GetRasterBand(3).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, int(B), 100, 150, 0, 0, 0) + ) + src_ds.GetRasterBand(4).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, int(NIR), 150, 200, 0, 0, 0) + ) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + src_ds.Close() + + trimming_filename = str(tmp_vsimem / "trimming.tif") + trimming_ds = gdal.GetDriverByName("GTiff").Create(trimming_filename, 6, 1, 1) + + localMaxRGB = 205.0 + + trimming_ds.GetRasterBand(1).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, int(localMaxRGB), 210, 220, 0, 0, 0) + ) + trimming_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + trimming_ds.Close() + + top_rgb = 200.0 + tone_ceil = 190.0 + top_margin = 0.1 + + ds = gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Trimming</Algorithm> + <Argument name="trimming_dataset_filename">{trimming_filename}</Argument> + <Argument name="top_rgb">{top_rgb}</Argument> + <Argument name="tone_ceil">{tone_ceil}</Argument> + <Argument name="top_margin">{top_margin}</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + # Do algorithm at hand + + # Extract local saturation value from trimming image + reducedRGB = min((1.0 - top_margin) * top_rgb / localMaxRGB, 1) + + # RGB bands specific process + maxRGB = max(R, G, B) + toneMaxRGB = min(tone_ceil / maxRGB, 1) + toneR = min(tone_ceil / R, 1) + toneG = min(tone_ceil / G, 1) + toneB = min(tone_ceil / B, 1) + outputR = min(reducedRGB * R * toneR / toneMaxRGB, top_rgb) + outputG = min(reducedRGB * G * toneG / toneMaxRGB, top_rgb) + outputB = min(reducedRGB * B * toneB / toneMaxRGB, top_rgb) + + # Other bands processing (NIR, ...): only apply RGB reduction factor + outputNIR = reducedRGB * NIR + + # print(outputR, outputG, outputB, outputNIR) + + assert ( + round(outputR) + == struct.unpack("B", ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1))[0] + ) + assert ( + round(outputG) + == struct.unpack("B", ds.GetRasterBand(2).ReadRaster(0, 0, 1, 1))[0] + ) + assert ( + round(outputB) + == struct.unpack("B", ds.GetRasterBand(3).ReadRaster(0, 0, 1, 1))[0] + ) + assert ( + round(outputNIR) + == struct.unpack("B", ds.GetRasterBand(4).ReadRaster(0, 0, 1, 1))[0] + ) + + assert struct.unpack("B" * 6, ds.GetRasterBand(1).ReadRaster()) == ( + 92, # round(outputR) + 135, + 164, + 0, + 0, + 0, + ) + assert struct.unpack("B" * 6, ds.GetRasterBand(2).ReadRaster()) == ( + 139, # round(outputG) + 171, + 86, + 0, + 0, + 0, + ) + assert struct.unpack("B" * 6, ds.GetRasterBand(3).ReadRaster()) == ( + 176, # round(outputB) + 90, + 129, + 0, + 0, + 0, + ) + assert struct.unpack("B" * 6, ds.GetRasterBand(4).ReadRaster()) == ( + 88, # round(outputNIR) + 129, + 164, + 0, + 0, + 0, + ) + + +############################################################################### +# Test error cases of Trimming algorithm + + +def test_vrtprocesseddataset_trimming_errors(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 6, 1, 4) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, 100, 150, 200, 0, 0, 0) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, 150, 200, 100, 0, 0, 0) + ) + src_ds.GetRasterBand(3).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, 200, 100, 150, 0, 0, 0) + ) + src_ds.GetRasterBand(4).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, 100, 150, 200, 0, 0, 0) + ) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + src_ds.Close() + + trimming_filename = str(tmp_vsimem / "trimming.tif") + trimming_ds = gdal.GetDriverByName("GTiff").Create(trimming_filename, 6, 1, 1) + trimming_ds.GetRasterBand(1).WriteRaster( + 0, 0, 6, 1, struct.pack("B" * 6, 200, 210, 220, 0, 0, 0) + ) + trimming_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + trimming_ds.Close() + + trimming_two_bands_filename = str(tmp_vsimem / "trimming_two_bands.tif") + trimming_ds = gdal.GetDriverByName("GTiff").Create( + trimming_two_bands_filename, 6, 1, 2 + ) + trimming_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + trimming_ds.Close() + + with pytest.raises(Exception): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Trimming</Algorithm> + <Argument name="trimming_dataset_filename">invalid</Argument> + <Argument name="top_rgb">200</Argument> + <Argument name="tone_ceil">190</Argument> + <Argument name="top_margin">0.1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + for val in (0, 5): + with pytest.raises(Exception, match="Invalid band in argument 'red_band'"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Trimming</Algorithm> + <Argument name="trimming_dataset_filename">{trimming_filename}</Argument> + <Argument name="red_band">{val}</Argument> + <Argument name="top_rgb">200</Argument> + <Argument name="tone_ceil">190</Argument> + <Argument name="top_margin">0.1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + for val in (0, 5): + with pytest.raises(Exception, match="Invalid band in argument 'green_band'"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Trimming</Algorithm> + <Argument name="trimming_dataset_filename">{trimming_filename}</Argument> + <Argument name="green_band">{val}</Argument> + <Argument name="top_rgb">200</Argument> + <Argument name="tone_ceil">190</Argument> + <Argument name="top_margin">0.1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + for val in (0, 5): + with pytest.raises(Exception, match="Invalid band in argument 'blue_band'"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Trimming</Algorithm> + <Argument name="trimming_dataset_filename">{trimming_filename}</Argument> + <Argument name="blue_band">{val}</Argument> + <Argument name="top_rgb">200</Argument> + <Argument name="tone_ceil">190</Argument> + <Argument name="top_margin">0.1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + for (red_band, green_band, blue_band) in [(1, 1, 3), (3, 2, 3), (1, 3, 3)]: + with pytest.raises( + Exception, + match="red_band, green_band and blue_band must have distinct values", + ): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Trimming</Algorithm> + <Argument name="trimming_dataset_filename">{trimming_filename}</Argument> + <Argument name="red_band">{red_band}</Argument> + <Argument name="green_band">{green_band}</Argument> + <Argument name="blue_band">{blue_band}</Argument> + <Argument name="top_rgb">200</Argument> + <Argument name="tone_ceil">190</Argument> + <Argument name="top_margin">0.1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + with pytest.raises(Exception, match="Trimming dataset should have a single band"): + gdal.Open( + f"""<VRTDataset subclass='VRTProcessedDataset'> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step> + <Algorithm>Trimming</Algorithm> + <Argument name="trimming_dataset_filename">{trimming_two_bands_filename}</Argument> + <Argument name="top_rgb">200</Argument> + <Argument name="tone_ceil">190</Argument> + <Argument name="top_margin">0.1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + ) + + +############################################################################### +# Test that serialization (for example due to statistics computation) properly +# works + + +def test_vrtprocesseddataset_serialize(tmp_vsimem): + + src_filename = str(tmp_vsimem / "src.tif") + src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 1) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x02") + src_ds.Close() + + vrt_filename = str(tmp_vsimem / "the.vrt") + content = f"""<VRTDataset subclass='VRTProcessedDataset'> + <VRTRasterBand subClass='VRTProcessedRasterBand' dataType='Byte'/> + <Input> + <SourceFilename>{src_filename}</SourceFilename> + </Input> + <ProcessingSteps> + <Step name="Affine combination of band values"> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">10,1</Argument> + </Step> + </ProcessingSteps> + </VRTDataset> + """ + with gdaltest.tempfile(vrt_filename, content): + ds = gdal.Open(vrt_filename) + assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (11, 12) + assert ds.GetRasterBand(1).GetStatistics(False, False) == [0.0, 0.0, 0.0, -1.0] + ds.GetRasterBand(1).ComputeStatistics(False) + ds.Close() + + ds = gdal.Open(vrt_filename) + assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (11, 12) + assert ds.GetRasterBand(1).GetStatistics(False, False) == [ + 11.0, + 12.0, + 11.5, + 0.5, + ] diff --git a/doc/source/drivers/raster/vrt.rst b/doc/source/drivers/raster/vrt.rst index 1c860c439ef2..6d4f26f25a31 100644 --- a/doc/source/drivers/raster/vrt.rst +++ b/doc/source/drivers/raster/vrt.rst @@ -1907,6 +1907,22 @@ See the dedicated :ref:`vrt_multidimensional` page. vrt_multidimensional +Processed dataset VRT +--------------------- + +.. versionadded:: 3.9 + +A VRT processed dataset is a specific variant of the :ref:`raster.vrt` format, +to apply chained processing steps that may apply to several bands at the same time. + +See the dedicated :ref:`vrt_processed_dataset` page. + +.. toctree:: + :maxdepth: 1 + :hidden: + + vrt_processed_dataset + vrt:// connection string ------------------------ diff --git a/doc/source/drivers/raster/vrt_processed_dataset.rst b/doc/source/drivers/raster/vrt_processed_dataset.rst new file mode 100644 index 000000000000..76e84ecfbbaa --- /dev/null +++ b/doc/source/drivers/raster/vrt_processed_dataset.rst @@ -0,0 +1,261 @@ +.. _vrt_processed_dataset: + +================================================================================ +VRT processed dataset +================================================================================ + +.. versionadded:: 3.9 + +A VRT processed dataset is a specific variant of the :ref:`raster.vrt` format, +to apply chained processing steps that may apply to several bands at the same time. + +The following built-in algorithms are introduced, and may typically be applied +in the following order: + +- Dehazing: remove haze effects by applying (subsampled) gain and offset + auxiliary datasets. + +- BandAffineCombination: perform an affine transformation combination of bands. + +- Trimming: apply local thresholding of saturation + +- LUT: apply a look-up table (band per band) + +More algorithms can be registered at run-time with the :cpp:func:`GDALVRTRegisterProcessedDatasetFunc` +function` + +Here's an example of such a file to apply various correction to a R,G,B,NIR dataset: + +.. code-block:: xml + + <VRTDataset subClass="VRTProcessedDataset"> + <Input> + <SourceFilename relativeToVRT="1">source.tif</SourceFilename> + </Input> + + <ProcessingSteps> + <Step name="Dehazing"> + <Algorithm>Dehazing</Algorithm> + + <Argument name="relativeToVRT">true</Argument> + + <Argument name="gain_dataset_filename_1">gains.tif</Argument> + <Argument name="gain_dataset_filename_2">gains.tif</Argument> + <Argument name="gain_dataset_filename_3">gains.tif</Argument> + <Argument name="gain_dataset_filename_4">gains.tif</Argument> + <Argument name="gain_dataset_band_1">1</Argument> + <Argument name="gain_dataset_band_2">2</Argument> + <Argument name="gain_dataset_band_3">3</Argument> + <Argument name="gain_dataset_band_4">4</Argument> + + <Argument name="offset_dataset_filename_1">offsets.tif</Argument> + <Argument name="offset_dataset_filename_2">offsets.tif</Argument> + <Argument name="offset_dataset_filename_3">offsets.tif</Argument> + <Argument name="offset_dataset_filename_4">offsets.tif</Argument> + <Argument name="offset_dataset_band_1">1</Argument> + <Argument name="offset_dataset_band_2">2</Argument> + <Argument name="offset_dataset_band_3">3</Argument> + <Argument name="offset_dataset_band_4">4</Argument> + + <Argument name="nodata">0</Argument> + <Argument name="min">1</Argument> + <Argument name="max">10000</Argument> + </Step> + + <Step name="Linear combination"> + <Algorithm>BandAffineCombination</Algorithm> + <Argument name="coefficients_1">0,1.2,-0.2,0.0,0.0</Argument> + <Argument name="coefficients_2">0,-0.03,1.03,0.0,0.0</Argument> + <Argument name="coefficients_3">0,0.0,0.0,1.0,0.0</Argument> + <Argument name="coefficients_4">0,0.0,0.0,0.0,1.0</Argument> + + <Argument name="min">1</Argument> + <Argument name="max">10000</Argument> + </Step> + + <Step name="Trimming"> + <Algorithm>Trimming</Algorithm> + <Argument name="relativeToVRT">true</Argument> + <Argument name="trimming_dataset_filename">trimming.tif</Argument> + <Argument name="tone_ceil">10000</Argument> + <Argument name="top_margin">0</Argument> + <Argument name="top_rgb">10000</Argument> + </Step> + + <Step name="LUT"> + <Algorithm>LUT</Algorithm> + <Argument name="lut_1"> + 0:0,10000.0:255 + </Argument> + <Argument name="lut_2"> + 0:0,10000.0:255 + </Argument> + <Argument name="lut_3"> + 0:0,10000.0:255 + </Argument> + <Argument name="lut_4"> + 0:0,10000.0:255 + </Argument> + </Step> + </ProcessingSteps> + + <VRTRasterBand dataType="Byte" band="1" subClass="VRTProcessedRasterBand"> + <ColorInterp>Red</ColorInterp> + </VRTRasterBand> + <VRTRasterBand dataType="Byte" band="2" subClass="VRTProcessedRasterBand"> + <ColorInterp>Green</ColorInterp> + </VRTRasterBand> + <VRTRasterBand dataType="Byte" band="3" subClass="VRTProcessedRasterBand"> + <ColorInterp>Blue</ColorInterp> + </VRTRasterBand> + <VRTRasterBand dataType="Byte" band="4" subClass="VRTProcessedRasterBand"> + </VRTRasterBand> + </VRTDataset> + +.vrt format +----------- + +The ``VRTDataset`` root element must have a ``subClass="VRTProcessedDataset"`` attribute. + +The following child elements of ``VRTDataset`` may be defined: ``SRS``, ``GeoTransform``, ``Metadata``. If they are not explicitly set, they are inferred from the input dataset. + +``VRTRasterBand`` elements may be explicitly defined, in particular if the data type of the virtual dataset after all processing steps is different from the input one, or if the number of output bands is different from the number of input bands. If there is no explicit ``VRTRasterBand`` element, the number and data types of input bands are used implicitly. When explicitly defined, ``VRTRasterBand`` elements must have a ``subClass="VRTProcessedRasterBand"`` attribute. +` +It must also have the 2 following child elements: + +- ``Input``, which must have one and only one of the following ``SourceFilename`` or ``VRTDataset`` as child elements, to define the input dataset to which to apply the processing steps. + +- ``ProcessingSteps``, with at least one child ``Step`` element. + +Each ``Step`` must have a ``Algorithm`` child element, and an optional ``name`` attribute. +The value of ``Algorithm`` must be a registered VRTProcessedDataset function. At time of writing, the following 4 algorithms are defined: ``Dehazing``, ``BandAffineCombination``, ``Trimming`` and ``LUT``. + +A ``Step`` will generally have one or several ``Argument`` child elements, some of them being required, others optional. Consult the documentation of each algorithm. + +Dehazing algorithm +------------------ + +Remove haze effects by applying (subsampled) gain and offset auxiliary datasets. + +The gain and offset auxiliary datasets must have a georeferencing consistent of +the input dataset, but may have a different resolution. + +The formula applied by that algorithm is: ``output_value = clamp(input_value * gain - offset, min, max)`` + +The following required arguments must be specified: + +- ``gain_dataset_filename_{band}``: Filename to the gain dataset, where {band} must be replaced by 1 to the number of input bands. + +- ``gain_dataset_band_{band}``: Band number corresponding to ``gain_dataset_filename_{band}``, where {band} must be replaced by 1 to the number of input bands. + +- ``offset_dataset_filename_{band}``: Filename to the offset dataset, where {band} must be replaced by 1 to the number of input bands. + +- ``offset_dataset_band_{band}``: Band number corresponding to ``offset_dataset_filename_{band}``, where {band} must be replaced by 1 to the number of input bands. + + +The following optional arguments may be specified: + +- ``relativeToVRT``: Whether gain and offset filenames are relative to the VRT. Allowed values are ``true`` and ``false``. Defaults to ``false`` + +- ``min``: Clamp minimum value, applied before writing the output value. + +- ``max``: Clamp maximum value, applied before writing the output value. + +- ``nodata``: Override the input nodata value coming from the previous step (or the input dataset for the first step). + +- ``gain_nodata``: Override the nodata value coming from the gain dataset(s). + +- ``offset_nodata``: Override the nodata value coming from the offset dataset(s). + + +BandAffineCombination algorithm +------------------------------- + +Perform an affine transformation combination of bands. + +The following required argument must be specified: + +- ``coefficients_{band}``: Comma-separated coefficients for combining bands where {band} must be replaced by 1 to the number of output bands. The number of coefficients in each argument must be 1 + number_of_input_bands, where the first coefficient is a constant, the second coefficient is the weight of the first input band, the third coefficient is the weight of the second input band, etc. + + +The following optional arguments may be specified: + +- ``src_nodata``: Override the input nodata value coming from the previous step (or the input dataset for the first step). + +- ``dst_nodata``: Set the output nodata value. + +- ``replacement_nodata``: Value to substitute to a valid computed value that would be equal to dst_nodata. + +- ``dst_intended_datatype``: Intended datatype of output (which might be different than the working data type). Used to infer an appropriate value for replacement_nodata when it is not specified. + +- ``min``: Clamp minimum value, applied before writing the output value. + +- ``max``: Clamp maximum value, applied before writing the output value. + + +Trimming algorithm +------------------ + +Apply local thresholding of saturation, with a special processing of the R,G,B bands compared to other bands. + +The pseudo algorithm used for each pixel is: + +.. code-block:: + + // Extract local saturation value from trimming image + localMaxRGB = value from TrimmingImage + reducedRGB = min ( (1-top_margin)*top_rgb/localMaxRGB ; 1) + + // RGB bands specific process + RGB[] = get red, green, blue components of input buffer + maxRGB = max(RGB[]) + toneMaxRGB = min ( toneCeil/maxRGB ; 1) + toneBand[] = min ( toneCeil/RGB[] ; 1) + + output_value_RGB[] = min ( reducedRGB*RGB[]*toneBand[] / toneMaxRGB ; topRGB) + + // Other bands processing (NIR, ...): only apply RGB reduction factor + Trimmed(OtherBands[]) = reducedRGB * OtherBands[] + + +The following required arguments must be specified: + +- ``trimming_dataset_filename``: Filename of the trimming dataset. It must have one single band. It must have a georeferencing consistent of the input dataset, but may have a different resolution. + +- ``top_rgb``: Maximum saturating RGB output value. + +- ``tone_ceil``: Maximum threshold beyond which we give up saturation. + +- ``top_margin``: Margin to allow for dynamics in brighest areas (between 0 and 1, should be close to 0) + + +The following optional arguments may be specified: + +- ``relativeToVRT``: Whether the trimming dataset filename is relative to the VRT. Allowed values are ``true`` and ``false``. Defaults to ``false`` + +- ``red_band``: Index (one-based) of the red band. Defaults to 1. + +- ``green_band``: Index (one-based) of the green band. Defaults to 1. + +- ``blue_band``: Index (one-based) of the blue band. Defaults to 1. + +- ``nodata``: Override the input nodata value coming from the previous step (or the input dataset for the first step). + +- ``trimming_nodata``: Override the nodata value coming from the trimming dataset. + + +LUT +--- + +Apply a look-up table (band per band), typically to get from UInt16 to Byte data types. + +The following required argument must be specified: + +- ``lut_{band}``: List of the form ``[src value 1]:[dest value 1],[src value 2]:[dest value 2],....``. {band} must be replaced by 1 to the number of bands. + + +The following optional arguments may be specified: + +- ``src_nodata``: Override the input nodata value coming from the previous step (or the input dataset for the first step). + +- ``dst_nodata``: Set the output nodata value. diff --git a/frmts/vrt/CMakeLists.txt b/frmts/vrt/CMakeLists.txt index ba770561f569..447d48eaedbe 100644 --- a/frmts/vrt/CMakeLists.txt +++ b/frmts/vrt/CMakeLists.txt @@ -14,6 +14,8 @@ add_gdal_driver( vrtdataset.cpp pixelfunctions.cpp vrtpansharpened.cpp + vrtprocesseddataset.cpp + vrtprocesseddatasetfunctions.cpp vrtmultidim.cpp gdaltileindexdataset.cpp STRONG_CXX_WFLAGS) diff --git a/frmts/vrt/data/gdalvrt.xsd b/frmts/vrt/data/gdalvrt.xsd index 5e204df9ab98..89ce1496a4b7 100644 --- a/frmts/vrt/data/gdalvrt.xsd +++ b/frmts/vrt/data/gdalvrt.xsd @@ -30,30 +30,76 @@ ****************************************************************************/ --> <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" version="1.0"> - <xs:element name="VRTDataset"> - <xs:complexType> - <xs:sequence> - <xs:choice minOccurs="0" maxOccurs="unbounded"> - <xs:element name="SRS" type="SRSType"/> - <xs:element name="GeoTransform" type="xs:string"/> - <xs:element name="GCPList" type="GCPListType"/> - <xs:element name="BlockXSize" type="nonNegativeInteger32"/> - <xs:element name="BlockYSize" type="nonNegativeInteger32"/> - <xs:element name="Metadata" type="MetadataType"/> <!-- may be repeated --> - <xs:element name="VRTRasterBand" type="VRTRasterBandType"/> <!-- may be repeated --> - <xs:element name="MaskBand" type="MaskBandType"/> - <xs:element name="GDALWarpOptions" type="GDALWarpOptionsType"/> <!-- only if subClass="VRTWarpedDataset" --> - <xs:element name="PansharpeningOptions" type="PansharpeningOptionsType"/> <!-- only if subClass="VRTPansharpenedDataset" --> - <xs:element name="Group" type="GroupType"/> <!-- only for multidimensional dataset --> - <xs:element name="OverviewList" type="OverviewListType"/> - </xs:choice> - </xs:sequence> - <xs:attribute name="subClass" type="xs:string"/> - <xs:attribute name="rasterXSize" type="nonNegativeInteger32"/> - <xs:attribute name="rasterYSize" type="nonNegativeInteger32"/> - </xs:complexType> + <xs:element name="VRTDataset" type="VRTDatasetType"> + <xs:annotation> + <xs:documentation>Root element</xs:documentation> + </xs:annotation> </xs:element> + <xs:complexType name="VRTDatasetType"> + <xs:sequence> + <xs:choice minOccurs="0" maxOccurs="unbounded"> + <xs:element name="SRS" type="SRSType"/> + <xs:element name="GeoTransform" type="xs:string"/> + <xs:element name="GCPList" type="GCPListType"/> + <xs:element name="BlockXSize" type="nonNegativeInteger32"/> + <xs:element name="BlockYSize" type="nonNegativeInteger32"/> + <xs:element name="Metadata" type="MetadataType"> + <xs:annotation> + <xs:documentation>May be repeated</xs:documentation> + </xs:annotation> + </xs:element> + <xs:element name="VRTRasterBand" type="VRTRasterBandType"> + <xs:annotation> + <xs:documentation>May be repeated</xs:documentation> + </xs:annotation> + </xs:element> + <xs:element name="MaskBand" type="MaskBandType"/> + <xs:element name="GDALWarpOptions" type="GDALWarpOptionsType"> + <xs:annotation> + <xs:documentation>Allowed only if subClass="VRTWarpedDataset"</xs:documentation> + </xs:annotation> + </xs:element> + <xs:element name="PansharpeningOptions" type="PansharpeningOptionsType"> + <xs:annotation> + <xs:documentation>Allowed only if subClass="VRTPansharpenedDataset"</xs:documentation> + </xs:annotation> + </xs:element> + <xs:element name="Input" type="InputType"> + <xs:annotation> + <xs:documentation>Allowed only if subClass="VRTProcessedDataset"</xs:documentation> + </xs:annotation> + </xs:element> + <xs:element name="ProcessingSteps" type="ProcessingStepsType"> + <xs:annotation> + <xs:documentation>Allowed only if subClass="VRTProcessedDataset"</xs:documentation> + </xs:annotation> + </xs:element> + <xs:element name="Group" type="GroupType"> + <xs:annotation> + <xs:documentation>only for multidimensional dataset</xs:documentation> + </xs:annotation> + </xs:element> + <xs:element name="OverviewList" type="OverviewListType"/> + </xs:choice> + </xs:sequence> + <xs:attribute name="subClass" type="DatasetSubclassType"/> + <xs:attribute name="rasterXSize" type="nonNegativeInteger32"/> + <xs:attribute name="rasterYSize" type="nonNegativeInteger32"/> + </xs:complexType> + + <xs:simpleType name="DatasetSubclassType"> + <xs:restriction base="xs:string"> + <xs:enumeration value="VRTWarpedDataset"/> + <xs:enumeration value="VRTPansharpenedDataset"/> + <xs:enumeration value="VRTProcessedDataset"> + <xs:annotation> + <xs:documentation>Added in GDAL 3.9</xs:documentation> + </xs:annotation> + </xs:enumeration> + </xs:restriction> + </xs:simpleType> + <xs:complexType name="OverviewListType"> <xs:simpleContent> <xs:extension base="integerList"> @@ -158,6 +204,51 @@ </xs:sequence> </xs:complexType> + <xs:complexType name="InputType"> + <xs:sequence> + <xs:choice minOccurs="0" maxOccurs="1"> + <xs:element name="SourceFilename" type="SourceFilenameType"/> + <xs:element name="VRTDataset" type="VRTDatasetType"/> + </xs:choice> + </xs:sequence> + </xs:complexType> + + <xs:complexType name="ProcessingStepsType"> + <xs:sequence minOccurs="1" maxOccurs="unbounded"> + <xs:element name="Step" type="ProcessingStepType"/> + </xs:sequence> + </xs:complexType> + + <xs:complexType name="ProcessingStepType"> + <xs:annotation> + <xs:documentation>Processing step of a VRTPansharpenedDataset</xs:documentation> + </xs:annotation> + <xs:sequence> + <xs:element name="Algorithm" type="xs:string" minOccurs="1"> + <xs:annotation> + <xs:documentation>Builtin allowed names are BandAffineCombination, LUT, Dehazing, Trimming. More algorithms can be registered at run-time.</xs:documentation> + </xs:annotation> + </xs:element> + <xs:element name="Argument" type="ArgumentType" maxOccurs="unbounded"/> + </xs:sequence> + <xs:attribute name="name" type="xs:string"/> + </xs:complexType> + + <xs:complexType name="ArgumentType"> + <xs:annotation> + <xs:documentation>Argument of a processing function</xs:documentation> + </xs:annotation> + <xs:simpleContent> + <xs:extension base="xs:string"> + <xs:attribute name="name" type="xs:string" use="required"> + <xs:annotation> + <xs:documentation>Allowed names are specific of each processing function</xs:documentation> + </xs:annotation> + </xs:attribute> + </xs:extension> + </xs:simpleContent> + </xs:complexType> + <xs:complexType name="MDIType"> <xs:simpleContent> <xs:extension base="xs:string"> @@ -234,6 +325,7 @@ <xs:enumeration value="VRTDerivedRasterBand"/> <xs:enumeration value="VRTRawRasterBand"/> <xs:enumeration value="VRTPansharpenedRasterBand"/> + <xs:enumeration value="VRTProcessedRasterBand"/> </xs:restriction> </xs:simpleType> diff --git a/frmts/vrt/vrtdataset.cpp b/frmts/vrt/vrtdataset.cpp index 88451a04470c..6c0c5bfed0dc 100644 --- a/frmts/vrt/vrtdataset.cpp +++ b/frmts/vrt/vrtdataset.cpp @@ -97,10 +97,6 @@ VRTDataset::~VRTDataset() { VRTDataset::FlushCache(true); - if (m_poSRS) - m_poSRS->Release(); - if (m_poGCP_SRS) - m_poGCP_SRS->Release(); CPLFree(m_pszVRTPath); delete m_poMaskBand; @@ -150,6 +146,17 @@ CPLErr VRTPansharpenedDataset::FlushCache(bool bAtClosing) /* FlushCache() */ /************************************************************************/ +CPLErr VRTProcessedDataset::FlushCache(bool bAtClosing) + +{ + return VRTFlushCacheStruct<VRTProcessedDataset>::FlushCache(*this, + bAtClosing); +} + +/************************************************************************/ +/* FlushCache() */ +/************************************************************************/ + template <class T> CPLErr VRTFlushCacheStruct<T>::FlushCache(T &obj, bool bAtClosing) { @@ -312,7 +319,7 @@ CPLXMLNode *VRTDataset::SerializeToXML(const char *pszVRTPathIn) /* -------------------------------------------------------------------- */ if (!m_asGCPs.empty()) { - GDALSerializeGCPListToXML(psDSTree, m_asGCPs, m_poGCP_SRS); + GDALSerializeGCPListToXML(psDSTree, m_asGCPs, m_poGCP_SRS.get()); } /* -------------------------------------------------------------------- */ @@ -405,10 +412,18 @@ CPLXMLNode *CPL_STDCALL VRTSerializeToXML(VRTDatasetH hDataset, /************************************************************************/ VRTRasterBand *VRTDataset::InitBand(const char *pszSubclass, int nBand, - bool bAllowPansharpened) + bool bAllowPansharpenedOrProcessed) { VRTRasterBand *poBand = nullptr; - if (EQUAL(pszSubclass, "VRTSourcedRasterBand")) + if (auto poProcessedDS = dynamic_cast<VRTProcessedDataset *>(this)) + { + if (bAllowPansharpenedOrProcessed && + EQUAL(pszSubclass, "VRTProcessedRasterBand")) + { + poBand = new VRTProcessedRasterBand(poProcessedDS, nBand); + } + } + else if (EQUAL(pszSubclass, "VRTSourcedRasterBand")) poBand = new VRTSourcedRasterBand(this, nBand); else if (EQUAL(pszSubclass, "VRTDerivedRasterBand")) poBand = new VRTDerivedRasterBand(this, nBand); @@ -417,13 +432,17 @@ VRTRasterBand *VRTDataset::InitBand(const char *pszSubclass, int nBand, else if (EQUAL(pszSubclass, "VRTWarpedRasterBand") && dynamic_cast<VRTWarpedDataset *>(this) != nullptr) poBand = new VRTWarpedRasterBand(this, nBand); - else if (bAllowPansharpened && + else if (bAllowPansharpenedOrProcessed && EQUAL(pszSubclass, "VRTPansharpenedRasterBand") && dynamic_cast<VRTPansharpenedDataset *>(this) != nullptr) poBand = new VRTPansharpenedRasterBand(this, nBand); - else + + if (!poBand) + { CPLError(CE_Failure, CPLE_AppDefined, "VRTRasterBand of unrecognized subclass '%s'.", pszSubclass); + } + return poBand; } @@ -443,9 +462,7 @@ CPLErr VRTDataset::XMLInit(const CPLXMLNode *psTree, const char *pszVRTPathIn) const CPLXMLNode *psSRSNode = CPLGetXMLNode(psTree, "SRS"); if (psSRSNode) { - if (m_poSRS) - m_poSRS->Release(); - m_poSRS = new OGRSpatialReference(); + m_poSRS.reset(new OGRSpatialReference()); m_poSRS->SetFromUserInput( CPLGetXMLValue(psSRSNode, nullptr, ""), OGRSpatialReference::SET_FROM_USER_INPUT_LIMITATIONS_get()); @@ -500,7 +517,9 @@ CPLErr VRTDataset::XMLInit(const CPLXMLNode *psTree, const char *pszVRTPathIn) /* -------------------------------------------------------------------- */ if (const CPLXMLNode *psGCPList = CPLGetXMLNode(psTree, "GCPList")) { - GDALDeserializeGCPListFromXML(psGCPList, m_asGCPs, &m_poGCP_SRS); + OGRSpatialReference *poSRS = nullptr; + GDALDeserializeGCPListFromXML(psGCPList, m_asGCPs, &poSRS); + m_poGCP_SRS.reset(poSRS); } /* -------------------------------------------------------------------- */ @@ -557,6 +576,13 @@ CPLErr VRTDataset::XMLInit(const CPLXMLNode *psTree, const char *pszVRTPathIn) { const char *pszSubclass = CPLGetXMLValue(psChild, "subclass", "VRTSourcedRasterBand"); + if (dynamic_cast<VRTProcessedDataset *>(this) && + !EQUAL(pszSubclass, "VRTProcessedRasterBand")) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Only subClass=VRTProcessedRasterBand supported"); + return CE_Failure; + } VRTRasterBand *poBand = InitBand(pszSubclass, l_nBands + 1, true); if (poBand != nullptr && @@ -636,10 +662,7 @@ CPLErr VRTDataset::SetGCPs(int nGCPCountIn, const GDAL_GCP *pasGCPListIn, const OGRSpatialReference *poGCP_SRS) { - if (m_poGCP_SRS) - m_poGCP_SRS->Release(); - - m_poGCP_SRS = poGCP_SRS ? poGCP_SRS->Clone() : nullptr; + m_poGCP_SRS.reset(poGCP_SRS ? poGCP_SRS->Clone() : nullptr); m_asGCPs = gdal::GCP::fromC(pasGCPListIn, nGCPCountIn); SetNeedsFlush(); @@ -654,12 +677,7 @@ CPLErr VRTDataset::SetGCPs(int nGCPCountIn, const GDAL_GCP *pasGCPListIn, CPLErr VRTDataset::SetSpatialRef(const OGRSpatialReference *poSRS) { - if (m_poSRS) - m_poSRS->Release(); - if (poSRS) - m_poSRS = poSRS->Clone(); - else - m_poSRS = nullptr; + m_poSRS.reset(poSRS ? poSRS->Clone() : nullptr); SetNeedsFlush(); @@ -860,8 +878,7 @@ GDALDataset *VRTDataset::Open(GDALOpenInfo *poOpenInfo) /* -------------------------------------------------------------------- */ /* Turn the XML representation into a VRTDataset. */ /* -------------------------------------------------------------------- */ - VRTDataset *poDS = static_cast<VRTDataset *>( - OpenXML(pszXML, pszVRTPath, poOpenInfo->eAccess)); + VRTDataset *poDS = OpenXML(pszXML, pszVRTPath, poOpenInfo->eAccess); if (poDS != nullptr) poDS->m_bNeedsFlush = false; @@ -1497,8 +1514,8 @@ GDALDataset *VRTDataset::OpenVRTProtocol(const char *pszSpec) /* of the dataset. */ /************************************************************************/ -GDALDataset *VRTDataset::OpenXML(const char *pszXML, const char *pszVRTPath, - GDALAccess eAccessIn) +VRTDataset *VRTDataset::OpenXML(const char *pszXML, const char *pszVRTPath, + GDALAccess eAccessIn) { /* -------------------------------------------------------------------- */ @@ -1519,8 +1536,10 @@ GDALDataset *VRTDataset::OpenXML(const char *pszXML, const char *pszVRTPath, const bool bIsPansharpened = strcmp(pszSubClass, "VRTPansharpenedDataset") == 0; + const bool bIsProcessed = strcmp(pszSubClass, "VRTProcessedDataset") == 0; - if (!bIsPansharpened && CPLGetXMLNode(psRoot, "Group") == nullptr && + if (!bIsPansharpened && !bIsProcessed && + CPLGetXMLNode(psRoot, "Group") == nullptr && (CPLGetXMLNode(psRoot, "rasterXSize") == nullptr || CPLGetXMLNode(psRoot, "rasterYSize") == nullptr || CPLGetXMLNode(psRoot, "VRTRasterBand") == nullptr)) @@ -1537,7 +1556,8 @@ GDALDataset *VRTDataset::OpenXML(const char *pszXML, const char *pszVRTPath, const int nXSize = atoi(CPLGetXMLValue(psRoot, "rasterXSize", "0")); const int nYSize = atoi(CPLGetXMLValue(psRoot, "rasterYSize", "0")); - if (!bIsPansharpened && CPLGetXMLNode(psRoot, "VRTRasterBand") != nullptr && + if (!bIsPansharpened && !bIsProcessed && + CPLGetXMLNode(psRoot, "VRTRasterBand") != nullptr && !GDALCheckDatasetDimensions(nXSize, nYSize)) { return nullptr; @@ -1548,6 +1568,8 @@ GDALDataset *VRTDataset::OpenXML(const char *pszXML, const char *pszVRTPath, poDS = new VRTWarpedDataset(nXSize, nYSize); else if (bIsPansharpened) poDS = new VRTPansharpenedDataset(nXSize, nYSize); + else if (bIsProcessed) + poDS = new VRTProcessedDataset(nXSize, nYSize); else { poDS = new VRTDataset(nXSize, nYSize); @@ -2783,4 +2805,94 @@ void VRTDataset::ClearStatistics() GDALDataset::ClearStatistics(); } +/************************************************************************/ +/* BuildSourceFilename() */ +/************************************************************************/ + +/* static */ +std::string VRTDataset::BuildSourceFilename(const char *pszFilename, + const char *pszVRTPath, + bool bRelativeToVRT) +{ + std::string osSrcDSName; + if (pszVRTPath != nullptr && bRelativeToVRT) + { + // Try subdatasetinfo API first + // Note: this will become the only branch when subdatasetinfo will become + // available for NITF_IM, RASTERLITE and TILEDB + const auto oSubDSInfo{GDALGetSubdatasetInfo(pszFilename)}; + if (oSubDSInfo && !oSubDSInfo->GetPathComponent().empty()) + { + auto path{oSubDSInfo->GetPathComponent()}; + osSrcDSName = oSubDSInfo->ModifyPathComponent( + CPLProjectRelativeFilename(pszVRTPath, path.c_str())); + GDALDestroySubdatasetInfo(oSubDSInfo); + } + else + { + bool bDone = false; + for (const char *pszSyntax : VRTDataset::apszSpecialSyntax) + { + CPLString osPrefix(pszSyntax); + osPrefix.resize(strchr(pszSyntax, ':') - pszSyntax + 1); + if (pszSyntax[osPrefix.size()] == '"') + osPrefix += '"'; + if (EQUALN(pszFilename, osPrefix, osPrefix.size())) + { + if (STARTS_WITH_CI(pszSyntax + osPrefix.size(), "{ANY}")) + { + const char *pszLastPart = strrchr(pszFilename, ':') + 1; + // CSV:z:/foo.xyz + if ((pszLastPart[0] == '/' || pszLastPart[0] == '\\') && + pszLastPart - pszFilename >= 3 && + pszLastPart[-3] == ':') + { + pszLastPart -= 2; + } + CPLString osPrefixFilename = pszFilename; + osPrefixFilename.resize(pszLastPart - pszFilename); + osSrcDSName = + osPrefixFilename + + CPLProjectRelativeFilename(pszVRTPath, pszLastPart); + bDone = true; + } + else if (STARTS_WITH_CI(pszSyntax + osPrefix.size(), + "{FILENAME}")) + { + CPLString osFilename(pszFilename + osPrefix.size()); + size_t nPos = 0; + if (osFilename.size() >= 3 && osFilename[1] == ':' && + (osFilename[2] == '\\' || osFilename[2] == '/')) + nPos = 2; + nPos = osFilename.find( + pszSyntax[osPrefix.size() + strlen("{FILENAME}")], + nPos); + if (nPos != std::string::npos) + { + const CPLString osSuffix = osFilename.substr(nPos); + osFilename.resize(nPos); + osSrcDSName = osPrefix + + CPLProjectRelativeFilename( + pszVRTPath, osFilename) + + osSuffix; + bDone = true; + } + } + break; + } + } + if (!bDone) + { + osSrcDSName = + CPLProjectRelativeFilename(pszVRTPath, pszFilename); + } + } + } + else + { + osSrcDSName = pszFilename; + } + return osSrcDSName; +} + /*! @endcond */ diff --git a/frmts/vrt/vrtdataset.h b/frmts/vrt/vrtdataset.h index 7ef902a63b47..f9df8ac59f47 100644 --- a/frmts/vrt/vrtdataset.h +++ b/frmts/vrt/vrtdataset.h @@ -47,6 +47,7 @@ #include <vector> CPLErr GDALRegisterDefaultPixelFunc(); +void GDALVRTRegisterDefaultProcessedDatasetFuncs(); CPLString VRTSerializeNoData(double dfVal, GDALDataType eDataType, int nPrecision); @@ -202,6 +203,7 @@ template <class T> struct VRTFlushCacheStruct class VRTWarpedDataset; class VRTPansharpenedDataset; +class VRTProcessedDataset; class VRTGroup; class CPL_DLL VRTDataset CPL_NON_FINAL : public GDALDataset @@ -210,16 +212,14 @@ class CPL_DLL VRTDataset CPL_NON_FINAL : public GDALDataset friend struct VRTFlushCacheStruct<VRTDataset>; friend struct VRTFlushCacheStruct<VRTWarpedDataset>; friend struct VRTFlushCacheStruct<VRTPansharpenedDataset>; + friend struct VRTFlushCacheStruct<VRTProcessedDataset>; friend class VRTSourcedRasterBand; + friend class VRTSimpleSource; friend VRTDatasetH CPL_STDCALL VRTCreate(int nXSize, int nYSize); - OGRSpatialReference *m_poSRS = nullptr; - - int m_bGeoTransformSet = false; - double m_adfGeoTransform[6]; - std::vector<gdal::GCP> m_asGCPs{}; - OGRSpatialReference *m_poGCP_SRS = nullptr; + std::unique_ptr<OGRSpatialReference, OGRSpatialReferenceReleaser> + m_poGCP_SRS{}; bool m_bNeedsFlush = false; bool m_bWritable = true; @@ -247,8 +247,13 @@ class CPL_DLL VRTDataset CPL_NON_FINAL : public GDALDataset VRTSource::WorkingState m_oWorkingState{}; + static constexpr const char *const apszSpecialSyntax[] = { + "NITF_IM:{ANY}:{FILENAME}", "PDF:{ANY}:{FILENAME}", + "RASTERLITE:{FILENAME},{ANY}", "TILEDB:\"{FILENAME}\":{ANY}", + "TILEDB:{FILENAME}:{ANY}"}; + VRTRasterBand *InitBand(const char *pszSubclass, int nBand, - bool bAllowPansharpened); + bool bAllowPansharpenedOrProcessed); static GDALDataset *OpenVRTProtocol(const char *pszSpec); bool AddVirtualOverview(int nOvFactor, const char *pszResampling); @@ -263,6 +268,11 @@ class CPL_DLL VRTDataset CPL_NON_FINAL : public GDALDataset int m_nBlockXSize = 0; int m_nBlockYSize = 0; + std::unique_ptr<OGRSpatialReference, OGRSpatialReferenceReleaser> m_poSRS{}; + + int m_bGeoTransformSet = false; + double m_adfGeoTransform[6]; + virtual int CloseDependentDatasets() override; public: @@ -287,7 +297,7 @@ class CPL_DLL VRTDataset CPL_NON_FINAL : public GDALDataset const OGRSpatialReference *GetSpatialRef() const override { - return m_poSRS; + return m_poSRS.get(); } CPLErr SetSpatialRef(const OGRSpatialReference *poSRS) override; @@ -306,7 +316,7 @@ class CPL_DLL VRTDataset CPL_NON_FINAL : public GDALDataset const OGRSpatialReference *GetGCPSpatialRef() const override { - return m_poGCP_SRS; + return m_poGCP_SRS.get(); } virtual const GDAL_GCP *GetGCPs() override; @@ -375,8 +385,8 @@ class CPL_DLL VRTDataset CPL_NON_FINAL : public GDALDataset static int Identify(GDALOpenInfo *); static GDALDataset *Open(GDALOpenInfo *); - static GDALDataset *OpenXML(const char *, const char * = nullptr, - GDALAccess eAccess = GA_ReadOnly); + static VRTDataset *OpenXML(const char *, const char * = nullptr, + GDALAccess eAccess = GA_ReadOnly); static GDALDataset *Create(const char *pszName, int nXSize, int nYSize, int nBands, GDALDataType eType, char **papszOptions); @@ -385,6 +395,10 @@ class CPL_DLL VRTDataset CPL_NON_FINAL : public GDALDataset CSLConstList papszRootGroupOptions, CSLConstList papszOptions); static CPLErr Delete(const char *pszFilename); + + static std::string BuildSourceFilename(const char *pszFilename, + const char *pszVRTPath, + bool bRelativeToVRT); }; /************************************************************************/ @@ -526,6 +540,132 @@ class VRTPansharpenedDataset final : public VRTDataset } }; +/************************************************************************/ +/* VRTPansharpenedDataset */ +/************************************************************************/ + +/** Specialized implementation of VRTDataset that chains several processing + * steps applied on all bands at a time. + * + * @since 3.9 + */ +class VRTProcessedDataset final : public VRTDataset +{ + public: + VRTProcessedDataset(int nXSize, int nYSize); + ~VRTProcessedDataset() override; + + virtual CPLErr FlushCache(bool bAtClosing) override; + + virtual CPLErr XMLInit(const CPLXMLNode *, const char *) override; + virtual CPLXMLNode *SerializeToXML(const char *pszVRTPath) override; + + void GetBlockSize(int *, int *) const; + + // GByte whose initialization constructor does nothing +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Weffc++" +#endif + struct NoInitByte + { + GByte value; + + // cppcheck-suppress uninitMemberVar + NoInitByte() + { + // do nothing + /* coverity[uninit_member] */ + } + + inline operator GByte() const + { + return value; + } + }; +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + + private: + friend class VRTProcessedRasterBand; + + //! Data for a processing step. + struct Step + { + //! Algorithm name + std::string osAlgorithm{}; + + //! Arguments to pass to the processing function. + CPLStringList aosArguments{}; + + //! Data type of the input buffer. + GDALDataType eInDT = GDT_Unknown; + + //! Data type of the output buffer. + GDALDataType eOutDT = GDT_Unknown; + + //! Number of input bands. + int nInBands = 0; + + //! Number of output bands. + int nOutBands = 0; + + //! Nodata values (nInBands) of the input bands. + std::vector<double> adfInNoData{}; + + //! Nodata values (nOutBands) of the output bands. + std::vector<double> adfOutNoData{}; + + //! Working data structure (private data of the implementation of the function) + VRTPDWorkingDataPtr pWorkingData = nullptr; + + // NOTE: if adding a new member, edit the move constructor and + // assignment operators! + + Step() = default; + ~Step(); + Step(Step &&); + Step &operator=(Step &&); + + private: + Step(const Step &) = delete; + Step &operator=(const Step &) = delete; + void deinit(); + }; + + //! Directory of the VRT + std::string m_osVRTPath{}; + + //! Source dataset + std::unique_ptr<GDALDataset> m_poSrcDS{}; + + //! Processing steps. + std::vector<Step> m_aoSteps{}; + + //! Backup XML tree passed to XMLInit() + CPLXMLTreeCloser m_oXMLTree{nullptr}; + + //! Overview datasets (dynamically generated from the ones of m_poSrcDS) + std::vector<std::unique_ptr<GDALDataset>> m_apoOverviewDatasets{}; + + //! Input buffer of a processing step + std::vector<NoInitByte> m_abyInput{}; + + //! Output buffer of a processing step + std::vector<NoInitByte> m_abyOutput{}; + + CPLErr Init(const CPLXMLNode *, const char *, + const VRTProcessedDataset *poParentDS, + GDALDataset *poParentSrcDS, int iOvrLevel); + + bool ParseStep(const CPLXMLNode *psStep, bool bIsFinalStep, + GDALDataType &eCurrentDT, int &nCurrentBandCount, + std::vector<double> &adfInNoData, + std::vector<double> &adfOutNoData); + bool ProcessRegion(int nXOff, int nYOff, int nBufXSize, int nBufYSize); +}; + /************************************************************************/ /* VRTRasterBand */ /* */ @@ -895,6 +1035,26 @@ class VRTPansharpenedRasterBand final : public VRTRasterBand } }; +/************************************************************************/ +/* VRTProcessedRasterBand */ +/************************************************************************/ + +class VRTProcessedRasterBand final : public VRTRasterBand +{ + public: + VRTProcessedRasterBand(VRTProcessedDataset *poDS, int nBand, + GDALDataType eDataType = GDT_Unknown); + + virtual CPLErr IReadBlock(int, int, void *) override; + + virtual int GetOverviewCount() override; + virtual GDALRasterBand *GetOverview(int) override; + + virtual CPLXMLNode *SerializeToXML(const char *pszVRTPath, + bool &bHasWarnedAboutRAMUsage, + size_t &nAccRAMUsage) override; +}; + /************************************************************************/ /* VRTDerivedRasterBand */ /************************************************************************/ diff --git a/frmts/vrt/vrtdriver.cpp b/frmts/vrt/vrtdriver.cpp index 2883944b4e13..5e69096de56a 100644 --- a/frmts/vrt/vrtdriver.cpp +++ b/frmts/vrt/vrtdriver.cpp @@ -34,6 +34,8 @@ #include "gdal_alg_priv.h" #include "gdal_frmts.h" +#include <mutex> + /*! @cond Doxygen_Suppress */ /************************************************************************/ @@ -504,8 +506,16 @@ void GDALRegister_VRT() if (GDALGetDriverByName("VRT") != nullptr) return; - // First register the pixel functions - GDALRegisterDefaultPixelFunc(); + static std::once_flag flag; + std::call_once(flag, + []() + { + // First register the pixel functions + GDALRegisterDefaultPixelFunc(); + + // Register functions for VRTProcessedDataset + GDALVRTRegisterDefaultProcessedDatasetFuncs(); + }); VRTDriver *poDriver = new VRTDriver(); diff --git a/frmts/vrt/vrtprocesseddataset.cpp b/frmts/vrt/vrtprocesseddataset.cpp new file mode 100644 index 000000000000..8a3fa27a0258 --- /dev/null +++ b/frmts/vrt/vrtprocesseddataset.cpp @@ -0,0 +1,1342 @@ +/****************************************************************************** + * + * Project: Virtual GDAL Datasets + * Purpose: Implementation of VRTProcessedDataset. + * Author: Even Rouault <even.rouault at spatialys.com> + * + ****************************************************************************** + * Copyright (c) 2024, Even Rouault <even.rouault at spatialys.com> + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + ****************************************************************************/ + +#include "cpl_minixml.h" +#include "cpl_string.h" +#include "vrtdataset.h" + +#include <algorithm> +#include <limits> +#include <map> +#include <vector> + +/************************************************************************/ +/* VRTProcessedDatasetFunc */ +/************************************************************************/ + +//! Structure holding information for a VRTProcessedDataset function. +struct VRTProcessedDatasetFunc +{ + //! Processing function name + std::string osFuncName{}; + + //! User data to provide to pfnInit, pfnFree, pfnProcess callbacks. + void *pUserData = nullptr; + + //! Whether XML metadata has been specified + bool bMetadataSpecified = false; + + //! Map of (constant argument name, constant value) + std::map<std::string, std::string> oMapConstantArguments{}; + + //! Set of builtin argument names (e.g "offset", "scale", "nodata") + std::set<std::string> oSetBuiltinArguments{}; + + //! Arguments defined in the VRT + struct OtherArgument + { + std::string osType{}; + bool bRequired = false; + }; + + std::map<std::string, OtherArgument> oOtherArguments{}; + + //! Requested input data type. + GDALDataType eRequestedInputDT = GDT_Unknown; + + //! List of supported input datatypes. Empty if no restriction. + std::vector<GDALDataType> aeSupportedInputDT{}; + + //! List of supported input band counts. Empty if no restriction. + std::vector<int> anSupportedInputBandCount{}; + + //! Optional initialization function + GDALVRTProcessedDatasetFuncInit pfnInit = nullptr; + + //! Optional free function + GDALVRTProcessedDatasetFuncFree pfnFree = nullptr; + + //! Required processing function + GDALVRTProcessedDatasetFuncProcess pfnProcess = nullptr; +}; + +/************************************************************************/ +/* GetGlobalMapProcessedDatasetFunc() */ +/************************************************************************/ + +/** Return the registry of VRTProcessedDatasetFunc functions */ +static std::map<std::string, VRTProcessedDatasetFunc> & +GetGlobalMapProcessedDatasetFunc() +{ + static std::map<std::string, VRTProcessedDatasetFunc> goMap; + return goMap; +} + +/************************************************************************/ +/* Step::~Step() */ +/************************************************************************/ + +/*! @cond Doxygen_Suppress */ + +/** Step destructor */ +VRTProcessedDataset::Step::~Step() +{ + deinit(); +} + +/************************************************************************/ +/* Step::deinit() */ +/************************************************************************/ + +/** Free pWorkingData */ +void VRTProcessedDataset::Step::deinit() +{ + if (pWorkingData) + { + const auto &oMapFunctions = GetGlobalMapProcessedDatasetFunc(); + const auto oIterFunc = oMapFunctions.find(osAlgorithm); + if (oIterFunc != oMapFunctions.end()) + { + if (oIterFunc->second.pfnFree) + { + oIterFunc->second.pfnFree(osAlgorithm.c_str(), + oIterFunc->second.pUserData, + pWorkingData); + } + } + else + { + CPLAssert(false); + } + pWorkingData = nullptr; + } +} + +/************************************************************************/ +/* Step::Step(Step&& other) */ +/************************************************************************/ + +/** Move constructor */ +VRTProcessedDataset::Step::Step(Step &&other) + : osAlgorithm(std::move(other.osAlgorithm)), + aosArguments(std::move(other.aosArguments)), eInDT(other.eInDT), + eOutDT(other.eOutDT), nInBands(other.nInBands), + nOutBands(other.nOutBands), adfInNoData(other.adfInNoData), + adfOutNoData(other.adfOutNoData), pWorkingData(other.pWorkingData) +{ + other.pWorkingData = nullptr; +} + +/************************************************************************/ +/* Step operator=(Step&& other) */ +/************************************************************************/ + +/** Move assignment operator */ +VRTProcessedDataset::Step &VRTProcessedDataset::Step::operator=(Step &&other) +{ + if (&other != this) + { + deinit(); + osAlgorithm = std::move(other.osAlgorithm); + aosArguments = std::move(other.aosArguments); + eInDT = other.eInDT; + eOutDT = other.eOutDT; + nInBands = other.nInBands; + nOutBands = other.nOutBands; + adfInNoData = std::move(other.adfInNoData); + adfOutNoData = std::move(other.adfOutNoData); + std::swap(pWorkingData, other.pWorkingData); + } + return *this; +} + +/************************************************************************/ +/* VRTProcessedDataset() */ +/************************************************************************/ + +/** Constructor */ +VRTProcessedDataset::VRTProcessedDataset(int nXSize, int nYSize) + : VRTDataset(nXSize, nYSize) +{ +} + +/************************************************************************/ +/* ~VRTProcessedDataset() */ +/************************************************************************/ + +VRTProcessedDataset::~VRTProcessedDataset() + +{ + VRTProcessedDataset::FlushCache(true); + VRTProcessedDataset::CloseDependentDatasets(); +} + +/************************************************************************/ +/* XMLInit() */ +/************************************************************************/ + +/** Instantiate object from XML tree */ +CPLErr VRTProcessedDataset::XMLInit(const CPLXMLNode *psTree, + const char *pszVRTPathIn) + +{ + if (Init(psTree, pszVRTPathIn, nullptr, nullptr, -1) != CE_None) + return CE_Failure; + + const auto poSrcFirstBand = m_poSrcDS->GetRasterBand(1); + const int nOvrCount = poSrcFirstBand->GetOverviewCount(); + for (int i = 0; i < nOvrCount; ++i) + { + auto poOvrDS = std::make_unique<VRTProcessedDataset>(0, 0); + if (poOvrDS->Init(psTree, pszVRTPathIn, this, m_poSrcDS.get(), i) != + CE_None) + break; + m_apoOverviewDatasets.emplace_back(std::move(poOvrDS)); + } + + return CE_None; +} + +/** Instantiate object from XML tree */ +CPLErr VRTProcessedDataset::Init(const CPLXMLNode *psTree, + const char *pszVRTPathIn, + const VRTProcessedDataset *poParentDS, + GDALDataset *poParentSrcDS, int iOvrLevel) + +{ + const CPLXMLNode *psInput = CPLGetXMLNode(psTree, "Input"); + if (!psInput) + { + CPLError(CE_Failure, CPLE_AppDefined, "Input element missing"); + return CE_Failure; + } + + if (pszVRTPathIn) + m_osVRTPath = pszVRTPathIn; + + if (poParentSrcDS) + { + m_poSrcDS.reset( + GDALCreateOverviewDataset(poParentSrcDS, iOvrLevel, true)); + } + else if (const CPLXMLNode *psSourceFileNameNode = + CPLGetXMLNode(psInput, "SourceFilename")) + { + const bool bRelativeToVRT = CPL_TO_BOOL( + atoi(CPLGetXMLValue(psSourceFileNameNode, "relativetoVRT", "0"))); + const std::string osFilename = VRTDataset::BuildSourceFilename( + CPLGetXMLValue(psInput, "SourceFilename", ""), pszVRTPathIn, + bRelativeToVRT); + m_poSrcDS.reset(GDALDataset::Open( + osFilename.c_str(), GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR, nullptr, + nullptr, nullptr)); + } + else if (const CPLXMLNode *psVRTDataset = + CPLGetXMLNode(psInput, "VRTDataset")) + { + CPLXMLNode sVRTDatasetTmp = *psVRTDataset; + sVRTDatasetTmp.psNext = nullptr; + char *pszXML = CPLSerializeXMLTree(&sVRTDatasetTmp); + m_poSrcDS.reset(VRTDataset::OpenXML(pszXML, pszVRTPathIn, GA_ReadOnly)); + CPLFree(pszXML); + } + else + { + CPLError( + CE_Failure, CPLE_AppDefined, + "Input element should have a SourceFilename or VRTDataset element"); + return CE_Failure; + } + + if (!m_poSrcDS) + return CE_Failure; + + if (nRasterXSize == 0 && nRasterYSize == 0) + { + nRasterXSize = m_poSrcDS->GetRasterXSize(); + nRasterYSize = m_poSrcDS->GetRasterYSize(); + } + else if (nRasterXSize != m_poSrcDS->GetRasterXSize() || + nRasterYSize != m_poSrcDS->GetRasterYSize()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Inconsistent declared VRT dimensions with input dataset"); + return CE_Failure; + } + + if (m_poSrcDS->GetRasterCount() == 0) + return CE_Failure; + + // Inherit SRS from source if not explicitly defined in VRT + if (!CPLGetXMLNode(psTree, "SRS")) + { + const OGRSpatialReference *poSRS = m_poSrcDS->GetSpatialRef(); + if (poSRS) + { + m_poSRS.reset(poSRS->Clone()); + } + } + + // Inherit GeoTransform from source if not explicitly defined in VRT + if (iOvrLevel < 0 && !CPLGetXMLNode(psTree, "GeoTransform")) + { + if (m_poSrcDS->GetGeoTransform(m_adfGeoTransform) == CE_None) + m_bGeoTransformSet = true; + } + + /* -------------------------------------------------------------------- */ + /* Initialize blocksize before calling sub-init so that the */ + /* band initializers can get it from the dataset object when */ + /* they are created. */ + /* -------------------------------------------------------------------- */ + + const auto poSrcFirstBand = m_poSrcDS->GetRasterBand(1); + poSrcFirstBand->GetBlockSize(&m_nBlockXSize, &m_nBlockYSize); + if (const char *pszBlockXSize = + CPLGetXMLValue(psTree, "BlockXSize", nullptr)) + m_nBlockXSize = atoi(pszBlockXSize); + if (const char *pszBlockYSize = + CPLGetXMLValue(psTree, "BlockYSize", nullptr)) + m_nBlockYSize = atoi(pszBlockYSize); + + // Initialize all the general VRT stuff. + if (VRTDataset::XMLInit(psTree, pszVRTPathIn) != CE_None) + { + return CE_Failure; + } + + // Use geotransform from parent for overviews + if (iOvrLevel >= 0 && poParentDS->m_bGeoTransformSet) + { + m_bGeoTransformSet = true; + m_adfGeoTransform[0] = poParentDS->m_adfGeoTransform[0]; + m_adfGeoTransform[1] = poParentDS->m_adfGeoTransform[1]; + m_adfGeoTransform[2] = poParentDS->m_adfGeoTransform[2]; + m_adfGeoTransform[3] = poParentDS->m_adfGeoTransform[3]; + m_adfGeoTransform[4] = poParentDS->m_adfGeoTransform[4]; + m_adfGeoTransform[5] = poParentDS->m_adfGeoTransform[5]; + + m_adfGeoTransform[1] *= + static_cast<double>(poParentDS->GetRasterXSize()) / nRasterXSize; + m_adfGeoTransform[2] *= + static_cast<double>(poParentDS->GetRasterYSize()) / nRasterYSize; + m_adfGeoTransform[4] *= + static_cast<double>(poParentDS->GetRasterXSize()) / nRasterXSize; + m_adfGeoTransform[5] *= + static_cast<double>(poParentDS->GetRasterYSize()) / nRasterYSize; + } + + // Create bands automatically from source dataset if not explicitly defined + // in VRT. + if (!CPLGetXMLNode(psTree, "VRTRasterBand")) + { + for (int i = 0; i < m_poSrcDS->GetRasterCount(); ++i) + { + const auto poSrcBand = m_poSrcDS->GetRasterBand(i + 1); + auto poBand = new VRTProcessedRasterBand( + this, i + 1, poSrcBand->GetRasterDataType()); + poBand->CopyCommonInfoFrom(poSrcBand); + SetBand(i + 1, poBand); + } + } + + const CPLXMLNode *psProcessingSteps = + CPLGetXMLNode(psTree, "ProcessingSteps"); + if (!psProcessingSteps) + { + CPLError(CE_Failure, CPLE_AppDefined, + "ProcessingSteps element missing"); + return CE_Failure; + } + + const auto eInDT = poSrcFirstBand->GetRasterDataType(); + for (int i = 1; i < m_poSrcDS->GetRasterCount(); ++i) + { + const auto eDT = m_poSrcDS->GetRasterBand(i + 1)->GetRasterDataType(); + if (eDT != eInDT) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Not all bands of the input dataset have the same data " + "type. The data type of the first band will be used as " + "the reference one."); + break; + } + } + + GDALDataType eCurrentDT = eInDT; + int nCurrentBandCount = m_poSrcDS->GetRasterCount(); + + std::vector<double> adfNoData; + for (int i = 1; i <= nCurrentBandCount; ++i) + { + int bHasVal = FALSE; + const double dfVal = + m_poSrcDS->GetRasterBand(i)->GetNoDataValue(&bHasVal); + adfNoData.emplace_back( + bHasVal ? dfVal : std::numeric_limits<double>::quiet_NaN()); + } + + int nStepCount = 0; + for (const CPLXMLNode *psStep = psProcessingSteps->psChild; psStep; + psStep = psStep->psNext) + { + if (psStep->eType == CXT_Element && + strcmp(psStep->pszValue, "Step") == 0) + { + ++nStepCount; + } + } + + int iStep = 0; + for (const CPLXMLNode *psStep = psProcessingSteps->psChild; psStep; + psStep = psStep->psNext) + { + if (psStep->eType == CXT_Element && + strcmp(psStep->pszValue, "Step") == 0) + { + ++iStep; + const bool bIsFinalStep = (iStep == nStepCount); + std::vector<double> adfOutNoData; + if (bIsFinalStep) + { + // Initialize adfOutNoData with nodata value of *output* bands + // for final step + for (int i = 1; i <= nBands; ++i) + { + int bHasVal = FALSE; + const double dfVal = + GetRasterBand(i)->GetNoDataValue(&bHasVal); + adfOutNoData.emplace_back( + bHasVal ? dfVal + : std::numeric_limits<double>::quiet_NaN()); + } + } + if (!ParseStep(psStep, bIsFinalStep, eCurrentDT, nCurrentBandCount, + adfNoData, adfOutNoData)) + return CE_Failure; + adfNoData = std::move(adfOutNoData); + } + } + + if (m_aoSteps.empty()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "At least one step should be defined"); + return CE_Failure; + } + + if (nCurrentBandCount != nBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Number of output bands of last step is not consistent with " + "number of VRTProcessedRasterBand's"); + return CE_Failure; + } + + if (nBands > 1) + SetMetadataItem("INTERLEAVE", "PIXEL", "IMAGE_STRUCTURE"); + + m_oXMLTree.reset(CPLCloneXMLTree(psTree)); + + return CE_None; +} + +/************************************************************************/ +/* ParseStep() */ +/************************************************************************/ + +/** Parse the current Step node and create a corresponding entry in m_aoSteps. + * + * @param psStep Step node + * @param bIsFinalStep Whether this is the final step. + * @param[in,out] eCurrentDT Input data type for this step. + * Updated to output data type at end of method. + * @param[in,out] nCurrentBandCount Input band count for this step. + * Updated to output band cout at end of + * method. + * @param adfInNoData Input nodata values + * @param[in,out] adfOutNoData Output nodata values, to be filled by this + * method. When bIsFinalStep, this is also an + * input parameter. + * @return true on success. + */ +bool VRTProcessedDataset::ParseStep(const CPLXMLNode *psStep, bool bIsFinalStep, + GDALDataType &eCurrentDT, + int &nCurrentBandCount, + std::vector<double> &adfInNoData, + std::vector<double> &adfOutNoData) +{ + const char *pszStepName = CPLGetXMLValue( + psStep, "name", CPLSPrintf("nr %d", 1 + int(m_aoSteps.size()))); + const char *pszAlgorithm = CPLGetXMLValue(psStep, "Algorithm", nullptr); + if (!pszAlgorithm) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Step '%s' lacks a Algorithm element", pszStepName); + return false; + } + + const auto &oMapFunctions = GetGlobalMapProcessedDatasetFunc(); + const auto oIterFunc = oMapFunctions.find(pszAlgorithm); + if (oIterFunc == oMapFunctions.end()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Step '%s' uses unregistered algorithm '%s'", pszStepName, + pszAlgorithm); + return false; + } + + const auto &oFunc = oIterFunc->second; + + if (!oFunc.aeSupportedInputDT.empty()) + { + if (std::find(oFunc.aeSupportedInputDT.begin(), + oFunc.aeSupportedInputDT.end(), + eCurrentDT) == oFunc.aeSupportedInputDT.end()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Step '%s' (using algorithm '%s') does not " + "support input data type = '%s'", + pszStepName, pszAlgorithm, + GDALGetDataTypeName(eCurrentDT)); + return false; + } + } + + if (!oFunc.anSupportedInputBandCount.empty()) + { + if (std::find(oFunc.anSupportedInputBandCount.begin(), + oFunc.anSupportedInputBandCount.end(), + nCurrentBandCount) == + oFunc.anSupportedInputBandCount.end()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Step '%s' (using algorithm '%s') does not " + "support input band count = %d", + pszStepName, pszAlgorithm, nCurrentBandCount); + return false; + } + } + + Step oStep; + oStep.osAlgorithm = pszAlgorithm; + oStep.eInDT = oFunc.eRequestedInputDT != GDT_Unknown + ? oFunc.eRequestedInputDT + : eCurrentDT; + oStep.nInBands = nCurrentBandCount; + + // Unless modified by pfnInit... + oStep.eOutDT = oStep.eInDT; + + oStep.adfInNoData = adfInNoData; + oStep.adfOutNoData = bIsFinalStep ? adfOutNoData : adfInNoData; + + // Deal with constant arguments + for (const auto &nameValuePair : oFunc.oMapConstantArguments) + { + oStep.aosArguments.AddNameValue(nameValuePair.first.c_str(), + nameValuePair.second.c_str()); + } + + // Deal with built-in arguments + if (oFunc.oSetBuiltinArguments.find("nodata") != + oFunc.oSetBuiltinArguments.end()) + { + int bHasVal = false; + const auto poSrcFirstBand = m_poSrcDS->GetRasterBand(1); + const double dfVal = poSrcFirstBand->GetNoDataValue(&bHasVal); + if (bHasVal) + { + oStep.aosArguments.AddNameValue("nodata", + CPLSPrintf("%.18g", dfVal)); + } + } + + if (oFunc.oSetBuiltinArguments.find("offset_{band}") != + oFunc.oSetBuiltinArguments.end()) + { + for (int i = 1; i <= m_poSrcDS->GetRasterCount(); ++i) + { + int bHasVal = false; + const double dfVal = GetRasterBand(i)->GetOffset(&bHasVal); + oStep.aosArguments.AddNameValue( + CPLSPrintf("offset_%d", i), + CPLSPrintf("%.18g", bHasVal ? dfVal : 0.0)); + } + } + + if (oFunc.oSetBuiltinArguments.find("scale_{band}") != + oFunc.oSetBuiltinArguments.end()) + { + for (int i = 1; i <= m_poSrcDS->GetRasterCount(); ++i) + { + int bHasVal = false; + const double dfVal = GetRasterBand(i)->GetScale(&bHasVal); + oStep.aosArguments.AddNameValue( + CPLSPrintf("scale_%d", i), + CPLSPrintf("%.18g", bHasVal ? dfVal : 1.0)); + } + } + + // Parse arguments specified in VRT + std::set<std::string> oFoundArguments; + + for (const CPLXMLNode *psStepChild = psStep->psChild; psStepChild; + psStepChild = psStepChild->psNext) + { + if (psStepChild->eType == CXT_Element && + strcmp(psStepChild->pszValue, "Argument") == 0) + { + const char *pszParamName = + CPLGetXMLValue(psStepChild, "name", nullptr); + if (!pszParamName) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Step '%s' has a Argument without a name attribute", + pszStepName); + return false; + } + const char *pszValue = CPLGetXMLValue(psStepChild, nullptr, ""); + auto oOtherArgIter = + oFunc.oOtherArguments.find(CPLString(pszParamName).tolower()); + if (!oFunc.oOtherArguments.empty() && + oOtherArgIter == oFunc.oOtherArguments.end()) + { + // If we got a parameter name like 'coefficients_1', + // try to fetch the generic 'coefficients_{band}' + std::string osParamName(pszParamName); + const auto nPos = osParamName.rfind('_'); + if (nPos != std::string::npos) + { + osParamName.resize(nPos + 1); + osParamName += "{band}"; + oOtherArgIter = oFunc.oOtherArguments.find( + CPLString(osParamName).tolower()); + } + } + if (oOtherArgIter != oFunc.oOtherArguments.end()) + { + oFoundArguments.insert(oOtherArgIter->first); + + const std::string &osType = oOtherArgIter->second.osType; + if (osType == "boolean") + { + if (!EQUAL(pszValue, "true") && !EQUAL(pszValue, "false")) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Step '%s' has a Argument '%s' whose " + "value '%s' is not a boolean", + pszStepName, pszParamName, pszValue); + return false; + } + } + else if (osType == "integer") + { + if (CPLGetValueType(pszValue) != CPL_VALUE_INTEGER) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Step '%s' has a Argument '%s' whose " + "value '%s' is not a integer", + pszStepName, pszParamName, pszValue); + return false; + } + } + else if (osType == "double") + { + const auto eType = CPLGetValueType(pszValue); + if (eType != CPL_VALUE_INTEGER && eType != CPL_VALUE_REAL) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Step '%s' has a Argument '%s' whose " + "value '%s' is not a double", + pszStepName, pszParamName, pszValue); + return false; + } + } + else if (osType == "double_list") + { + const CPLStringList aosTokens( + CSLTokenizeString2(pszValue, ",", 0)); + for (int i = 0; i < aosTokens.size(); ++i) + { + const auto eType = CPLGetValueType(aosTokens[i]); + if (eType != CPL_VALUE_INTEGER && + eType != CPL_VALUE_REAL) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Step '%s' has a Argument '%s' " + "whose value '%s' is not a " + "comma-separated list of doubles", + pszStepName, pszParamName, pszValue); + return false; + } + } + } + else if (osType != "string") + { + CPLDebug("VRT", "Unhandled argument type '%s'", + osType.c_str()); + CPLAssert(0); + } + } + else if (oFunc.bMetadataSpecified && + oFunc.oSetBuiltinArguments.find( + CPLString(pszParamName).tolower()) == + oFunc.oSetBuiltinArguments.end() && + oFunc.oMapConstantArguments.find( + CPLString(pszParamName).tolower()) == + oFunc.oMapConstantArguments.end()) + { + CPLError(CE_Warning, CPLE_NotSupported, + "Step '%s' has a Argument '%s' which is not " + "supported", + pszStepName, pszParamName); + } + + oStep.aosArguments.AddNameValue(pszParamName, pszValue); + } + } + + // Check that required arguments have been specified + for (const auto &oIter : oFunc.oOtherArguments) + { + if (oIter.second.bRequired && + oFoundArguments.find(oIter.first) == oFoundArguments.end()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Step '%s' lacks required Argument '%s'", pszStepName, + oIter.first.c_str()); + return false; + } + } + + if (oFunc.pfnInit) + { + double *padfOutNoData = nullptr; + if (bIsFinalStep) + { + oStep.nOutBands = nBands; + padfOutNoData = + static_cast<double *>(CPLMalloc(nBands * sizeof(double))); + CPLAssert(adfOutNoData.size() == static_cast<size_t>(nBands)); + memcpy(padfOutNoData, adfOutNoData.data(), nBands * sizeof(double)); + } + else + { + oStep.nOutBands = 0; + } + + if (oFunc.pfnInit(pszAlgorithm, oFunc.pUserData, + oStep.aosArguments.List(), oStep.nInBands, + oStep.eInDT, adfInNoData.data(), &(oStep.nOutBands), + &(oStep.eOutDT), &padfOutNoData, m_osVRTPath.c_str(), + &(oStep.pWorkingData)) != CE_None) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Step '%s' (using algorithm '%s') init() function " + "failed", + pszStepName, pszAlgorithm); + CPLFree(padfOutNoData); + return false; + } + + // Input nodata values may have been modified by pfnInit() + oStep.adfInNoData = adfInNoData; + + if (padfOutNoData) + { + adfOutNoData = + std::vector<double>(padfOutNoData, padfOutNoData + nBands); + } + else + { + adfOutNoData = std::vector<double>( + oStep.nOutBands, std::numeric_limits<double>::quiet_NaN()); + } + CPLFree(padfOutNoData); + + oStep.adfOutNoData = adfOutNoData; + } + else + { + oStep.nOutBands = oStep.nInBands; + adfOutNoData = oStep.adfOutNoData; + } + + eCurrentDT = oStep.eOutDT; + nCurrentBandCount = oStep.nOutBands; + + m_aoSteps.emplace_back(std::move(oStep)); + + return true; +} + +/************************************************************************/ +/* SerializeToXML() */ +/************************************************************************/ + +CPLXMLNode *VRTProcessedDataset::SerializeToXML(const char *pszVRTPathIn) + +{ + CPLXMLNode *psTree = CPLCloneXMLTree(m_oXMLTree.get()); + if (psTree == nullptr) + return psTree; + + /* -------------------------------------------------------------------- */ + /* Remove VRTRasterBand nodes from the original tree and find the */ + /* last child. */ + /* -------------------------------------------------------------------- */ + CPLXMLNode *psLastChild = psTree->psChild; + CPLXMLNode *psPrevChild = nullptr; + while (psLastChild) + { + CPLXMLNode *psNextChild = psLastChild->psNext; + if (psLastChild->eType == CXT_Element && + strcmp(psLastChild->pszValue, "VRTRasterBand") == 0) + { + if (psPrevChild) + psPrevChild->psNext = psNextChild; + else + psTree->psChild = psNextChild; + psLastChild->psNext = nullptr; + CPLDestroyXMLNode(psLastChild); + psLastChild = psPrevChild ? psPrevChild : psTree->psChild; + } + else if (!psNextChild) + { + break; + } + else + { + psPrevChild = psLastChild; + psLastChild = psNextChild; + } + } + CPLAssert(psLastChild); // we have at least Input + + /* -------------------------------------------------------------------- */ + /* Serialize bands. */ + /* -------------------------------------------------------------------- */ + bool bHasWarnedAboutRAMUsage = false; + size_t nAccRAMUsage = 0; + for (int iBand = 0; iBand < nBands; iBand++) + { + CPLXMLNode *psBandTree = + static_cast<VRTRasterBand *>(papoBands[iBand]) + ->SerializeToXML(pszVRTPathIn, bHasWarnedAboutRAMUsage, + nAccRAMUsage); + + if (psBandTree != nullptr) + { + psLastChild->psNext = psBandTree; + psLastChild = psBandTree; + } + } + + return psTree; +} + +/************************************************************************/ +/* SerializeToXML() */ +/************************************************************************/ + +CPLXMLNode * +VRTProcessedRasterBand::SerializeToXML(const char *pszVRTPathIn, + bool &bHasWarnedAboutRAMUsage, + size_t &nAccRAMUsage) + +{ + CPLXMLNode *psTree = VRTRasterBand::SerializeToXML( + pszVRTPathIn, bHasWarnedAboutRAMUsage, nAccRAMUsage); + + /* -------------------------------------------------------------------- */ + /* Set subclass. */ + /* -------------------------------------------------------------------- */ + CPLCreateXMLNode(CPLCreateXMLNode(psTree, CXT_Attribute, "subClass"), + CXT_Text, "VRTProcessedRasterBand"); + + return psTree; +} + +/************************************************************************/ +/* GetBlockSize() */ +/************************************************************************/ + +/** Return block size */ +void VRTProcessedDataset::GetBlockSize(int *pnBlockXSize, + int *pnBlockYSize) const + +{ + *pnBlockXSize = m_nBlockXSize; + *pnBlockYSize = m_nBlockYSize; +} + +/************************************************************************/ +/* ProcessRegion() */ +/************************************************************************/ + +/** Compute pixel values for the specified region. + * + * The output is stored in m_abyInput in a pixel-interleaved way. + */ +bool VRTProcessedDataset::ProcessRegion(int nXOff, int nYOff, int nBufXSize, + int nBufYSize) +{ + + CPLAssert(!m_aoSteps.empty()); + + const int nFirstBandCount = m_aoSteps.front().nInBands; + CPLAssert(nFirstBandCount == m_poSrcDS->GetRasterCount()); + const GDALDataType eFirstDT = m_aoSteps.front().eInDT; + const int nFirstDTSize = GDALGetDataTypeSizeBytes(eFirstDT); + auto &abyInput = m_abyInput; + auto &abyOutput = m_abyOutput; + try + { + abyInput.resize(static_cast<size_t>(nBufXSize) * nBufYSize * + nFirstBandCount * nFirstDTSize); + } + catch (const std::bad_alloc &) + { + CPLError(CE_Failure, CPLE_OutOfMemory, + "Out of memory allocating working buffer"); + return false; + } + + if (m_poSrcDS->RasterIO( + GF_Read, nXOff, nYOff, nBufXSize, nBufYSize, abyInput.data(), + nBufXSize, nBufYSize, eFirstDT, nFirstBandCount, nullptr, + static_cast<GSpacing>(nFirstDTSize) * nFirstBandCount, + static_cast<GSpacing>(nFirstDTSize) * nFirstBandCount * nBufXSize, + nFirstDTSize, nullptr) != CE_None) + { + return false; + } + + const double dfSrcXOff = nXOff; + const double dfSrcYOff = nYOff; + const double dfSrcXSize = nBufXSize; + const double dfSrcYSize = nBufYSize; + + double adfSrcGT[6]; + if (m_poSrcDS->GetGeoTransform(adfSrcGT) != CE_None) + { + adfSrcGT[0] = 0; + adfSrcGT[1] = 1; + adfSrcGT[2] = 0; + adfSrcGT[3] = 0; + adfSrcGT[4] = 0; + adfSrcGT[5] = 1; + } + + GDALDataType eLastDT = eFirstDT; + const auto &oMapFunctions = GetGlobalMapProcessedDatasetFunc(); + for (const auto &oStep : m_aoSteps) + { + const auto oIterFunc = oMapFunctions.find(oStep.osAlgorithm); + CPLAssert(oIterFunc != oMapFunctions.end()); + + // Data type adaptation + if (eLastDT != oStep.eInDT) + { + try + { + abyOutput.resize(static_cast<size_t>(nBufXSize) * nBufYSize * + oStep.nInBands * + GDALGetDataTypeSizeBytes(oStep.eInDT)); + } + catch (const std::bad_alloc &) + { + CPLError(CE_Failure, CPLE_OutOfMemory, + "Out of memory allocating working buffer"); + return false; + } + + GDALCopyWords64(abyInput.data(), eLastDT, + GDALGetDataTypeSizeBytes(eLastDT), abyOutput.data(), + oStep.eInDT, GDALGetDataTypeSizeBytes(oStep.eInDT), + static_cast<size_t>(nBufXSize) * nBufYSize * + oStep.nInBands); + + std::swap(abyInput, abyOutput); + } + + try + { + abyOutput.resize(static_cast<size_t>(nBufXSize) * nBufYSize * + oStep.nOutBands * + GDALGetDataTypeSizeBytes(oStep.eOutDT)); + } + catch (const std::bad_alloc &) + { + CPLError(CE_Failure, CPLE_OutOfMemory, + "Out of memory allocating working buffer"); + return false; + } + + const auto &oFunc = oIterFunc->second; + if (oFunc.pfnProcess( + oStep.osAlgorithm.c_str(), oFunc.pUserData, oStep.pWorkingData, + oStep.aosArguments.List(), nBufXSize, nBufYSize, + abyInput.data(), abyInput.size(), oStep.eInDT, oStep.nInBands, + oStep.adfInNoData.data(), abyOutput.data(), abyOutput.size(), + oStep.eOutDT, oStep.nOutBands, oStep.adfOutNoData.data(), + dfSrcXOff, dfSrcYOff, dfSrcXSize, dfSrcYSize, adfSrcGT, + m_osVRTPath.c_str(), + /*papszExtra=*/nullptr) != CE_None) + { + return false; + } + + std::swap(abyInput, abyOutput); + eLastDT = oStep.eOutDT; + } + + return true; +} + +/************************************************************************/ +/* VRTProcessedRasterBand() */ +/************************************************************************/ + +/** Constructor */ +VRTProcessedRasterBand::VRTProcessedRasterBand(VRTProcessedDataset *poDSIn, + int nBandIn, + GDALDataType eDataTypeIn) +{ + Initialize(poDSIn->GetRasterXSize(), poDSIn->GetRasterYSize()); + + poDS = poDSIn; + nBand = nBandIn; + eAccess = GA_Update; + eDataType = eDataTypeIn; + + poDSIn->GetBlockSize(&nBlockXSize, &nBlockYSize); +} + +/************************************************************************/ +/* GetOverviewCount() */ +/************************************************************************/ + +int VRTProcessedRasterBand::GetOverviewCount() +{ + auto poVRTDS = cpl::down_cast<VRTProcessedDataset *>(poDS); + return static_cast<int>(poVRTDS->m_apoOverviewDatasets.size()); +} + +/************************************************************************/ +/* GetOverview() */ +/************************************************************************/ + +GDALRasterBand *VRTProcessedRasterBand::GetOverview(int iOvr) +{ + auto poVRTDS = cpl::down_cast<VRTProcessedDataset *>(poDS); + if (iOvr < 0 || + iOvr >= static_cast<int>(poVRTDS->m_apoOverviewDatasets.size())) + return nullptr; + return poVRTDS->m_apoOverviewDatasets[iOvr]->GetRasterBand(nBand); +} + +/************************************************************************/ +/* IReadBlock() */ +/************************************************************************/ + +CPLErr VRTProcessedRasterBand::IReadBlock(int nBlockXOff, int nBlockYOff, + void *pImage) + +{ + auto poVRTDS = cpl::down_cast<VRTProcessedDataset *>(poDS); + + int nBufXSize = 0; + int nBufYSize = 0; + GetActualBlockSize(nBlockXOff, nBlockYOff, &nBufXSize, &nBufYSize); + + const int nXPixelOff = nBlockXOff * nBlockXSize; + const int nYPixelOff = nBlockYOff * nBlockYSize; + if (!poVRTDS->ProcessRegion(nXPixelOff, nYPixelOff, nBufXSize, nBufYSize)) + { + return CE_Failure; + } + + const int nOutBands = poVRTDS->m_aoSteps.back().nOutBands; + CPLAssert(nOutBands == poVRTDS->GetRasterCount()); + const auto eLastDT = poVRTDS->m_aoSteps.back().eOutDT; + const int nLastDTSize = GDALGetDataTypeSizeBytes(eLastDT); + const int nDTSize = GDALGetDataTypeSizeBytes(eDataType); + + // Dispatch final output buffer to cached blocks of output bands + for (int iDstBand = 0; iDstBand < nOutBands; ++iDstBand) + { + GDALRasterBlock *poBlock = nullptr; + GByte *pDst; + if (iDstBand + 1 == nBand) + { + pDst = static_cast<GByte *>(pImage); + } + else + { + auto poOtherBand = poVRTDS->papoBands[iDstBand]; + poBlock = poOtherBand->TryGetLockedBlockRef(nBlockXOff, nBlockYOff); + if (poBlock) + { + poBlock->DropLock(); + continue; + } + poBlock = poOtherBand->GetLockedBlockRef( + nBlockXOff, nBlockYOff, /* bJustInitialized = */ true); + if (!poBlock) + continue; + pDst = static_cast<GByte *>(poBlock->GetDataRef()); + } + for (int iY = 0; iY < nBufYSize; ++iY) + { + GDALCopyWords(poVRTDS->m_abyInput.data() + + (iDstBand + static_cast<size_t>(iY) * nBufXSize * + nOutBands) * + nLastDTSize, + eLastDT, nLastDTSize * nOutBands, + pDst + + static_cast<size_t>(iY) * nBlockXSize * nDTSize, + eDataType, nDTSize, nBufXSize); + } + if (poBlock) + poBlock->DropLock(); + } + + return CE_None; +} + +/*! @endcond */ + +/************************************************************************/ +/* GDALVRTRegisterProcessedDatasetFunc() */ +/************************************************************************/ + +/** Register a function to be used by VRTProcessedDataset. + + An example of content for pszXMLMetadata is: + \verbatim + <ProcessedDatasetFunctionArgumentsList> + <Argument name='src_nodata' type='double' description='Override input nodata value'/> + <Argument name='dst_nodata' type='double' description='Override output nodata value'/> + <Argument name='replacement_nodata' description='value to substitute to a valid computed value that would be nodata' type='double'/> + <Argument name='dst_intended_datatype' type='string' description='Intented datatype of output (which might be different than the working data type)'/> + <Argument name='coefficients_{band}' description='Comma-separated coefficients for combining bands. First one is constant term' type='double_list' required='true'/> + </ProcessedDatasetFunctionArgumentsList> + \endverbatim + + @param pszFuncName Function name. Must be unique and not null. + @param pUserData User data. May be nullptr. Must remain valid during the + lifetime of GDAL. + @param pszXMLMetadata XML metadata describing the function arguments. May be + nullptr if there are no arguments. + @param eRequestedInputDT If the pfnProcess callback only supports a single + data type, it should be specified in this parameter. + Otherwise set it to GDT_Unknown. + @param paeSupportedInputDT List of supported input data types. May be nullptr + if all are supported or if eRequestedInputDT is + set to a non GDT_Unknown value. + @param nSupportedInputDTSize Size of paeSupportedInputDT + @param panSupportedInputBandCount List of supported band count. May be nullptr + if any source band count is supported. + @param nSupportedInputBandCountSize Size of panSupportedInputBandCount + @param pfnInit Initialization function called when a VRTProcessedDataset + step uses the register function. This initialization function + will return the output data type, output band count and + potentially initialize a working structure, typically parsing + arguments. May be nullptr. + If not specified, it will be assumed that the input and output + data types are the same, and that the input number of bands + and output number of bands are the same. + @param pfnFree Free function that will free the working structure allocated + by pfnInit. May be nullptr. + @param pfnProcess Processing function called to compute pixel values. Must + not be nullptr. + @param papszOptions Unused currently. Must be nullptr. + @return CE_None in case of success, error otherwise. + @since 3.9 + */ +CPLErr GDALVRTRegisterProcessedDatasetFunc( + const char *pszFuncName, void *pUserData, const char *pszXMLMetadata, + GDALDataType eRequestedInputDT, const GDALDataType *paeSupportedInputDT, + size_t nSupportedInputDTSize, const int *panSupportedInputBandCount, + size_t nSupportedInputBandCountSize, + GDALVRTProcessedDatasetFuncInit pfnInit, + GDALVRTProcessedDatasetFuncFree pfnFree, + GDALVRTProcessedDatasetFuncProcess pfnProcess, + CPL_UNUSED CSLConstList papszOptions) +{ + if (pszFuncName == nullptr || pszFuncName[0] == '\0') + { + CPLError(CE_Failure, CPLE_AppDefined, + "pszFuncName should be non-empty"); + return CE_Failure; + } + + auto &oMap = GetGlobalMapProcessedDatasetFunc(); + if (oMap.find(pszFuncName) != oMap.end()) + { + CPLError(CE_Failure, CPLE_AppDefined, "%s already registered", + pszFuncName); + return CE_Failure; + } + + if (!pfnProcess) + { + CPLError(CE_Failure, CPLE_AppDefined, "pfnProcess should not be null"); + return CE_Failure; + } + + VRTProcessedDatasetFunc oFunc; + oFunc.osFuncName = pszFuncName; + oFunc.pUserData = pUserData; + if (pszXMLMetadata) + { + oFunc.bMetadataSpecified = true; + auto psTree = CPLXMLTreeCloser(CPLParseXMLString(pszXMLMetadata)); + if (!psTree) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Cannot parse pszXMLMetadata=%s for %s", pszXMLMetadata, + pszFuncName); + return CE_Failure; + } + const CPLXMLNode *psRoot = CPLGetXMLNode( + psTree.get(), "=ProcessedDatasetFunctionArgumentsList"); + if (!psRoot) + { + CPLError(CE_Failure, CPLE_AppDefined, + "No root ProcessedDatasetFunctionArgumentsList element in " + "pszXMLMetadata=%s for %s", + pszXMLMetadata, pszFuncName); + return CE_Failure; + } + for (const CPLXMLNode *psIter = psRoot->psChild; psIter; + psIter = psIter->psNext) + { + if (psIter->eType == CXT_Element && + strcmp(psIter->pszValue, "Argument") == 0) + { + const char *pszName = CPLGetXMLValue(psIter, "name", nullptr); + if (!pszName) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Missing Argument.name attribute in " + "pszXMLMetadata=%s for %s", + pszXMLMetadata, pszFuncName); + return CE_Failure; + } + const char *pszType = CPLGetXMLValue(psIter, "type", nullptr); + if (!pszType) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Missing Argument.type attribute in " + "pszXMLMetadata=%s for %s", + pszXMLMetadata, pszFuncName); + return CE_Failure; + } + if (strcmp(pszType, "constant") == 0) + { + const char *pszValue = + CPLGetXMLValue(psIter, "value", nullptr); + if (!pszValue) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Missing Argument.value attribute in " + "pszXMLMetadata=%s for %s", + pszXMLMetadata, pszFuncName); + return CE_Failure; + } + oFunc.oMapConstantArguments[CPLString(pszName).tolower()] = + pszValue; + } + else if (strcmp(pszType, "builtin") == 0) + { + if (EQUAL(pszName, "nodata") || + EQUAL(pszName, "offset_{band}") || + EQUAL(pszName, "scale_{band}")) + { + oFunc.oSetBuiltinArguments.insert( + CPLString(pszName).tolower()); + } + else + { + CPLError(CE_Failure, CPLE_NotSupported, + "Unsupported builtin parameter name %s in " + "pszXMLMetadata=%s for %s. Only nodata, " + "offset_{band} and scale_{band} are supported", + pszName, pszXMLMetadata, pszFuncName); + return CE_Failure; + } + } + else if (strcmp(pszType, "boolean") == 0 || + strcmp(pszType, "string") == 0 || + strcmp(pszType, "integer") == 0 || + strcmp(pszType, "double") == 0 || + strcmp(pszType, "double_list") == 0) + { + VRTProcessedDatasetFunc::OtherArgument otherArgument; + otherArgument.bRequired = CPLTestBool( + CPLGetXMLValue(psIter, "required", "false")); + otherArgument.osType = pszType; + oFunc.oOtherArguments[CPLString(pszName).tolower()] = + std::move(otherArgument); + } + else + { + CPLError(CE_Failure, CPLE_NotSupported, + "Unsupported type for parameter %s in " + "pszXMLMetadata=%s for %s. Only boolean, string, " + "integer, double and double_list are supported", + pszName, pszXMLMetadata, pszFuncName); + return CE_Failure; + } + } + } + } + oFunc.eRequestedInputDT = eRequestedInputDT; + if (nSupportedInputDTSize) + { + oFunc.aeSupportedInputDT.insert( + oFunc.aeSupportedInputDT.end(), paeSupportedInputDT, + paeSupportedInputDT + nSupportedInputDTSize); + } + if (nSupportedInputBandCountSize) + { + oFunc.anSupportedInputBandCount.insert( + oFunc.anSupportedInputBandCount.end(), panSupportedInputBandCount, + panSupportedInputBandCount + nSupportedInputBandCountSize); + } + oFunc.pfnInit = pfnInit; + oFunc.pfnFree = pfnFree; + oFunc.pfnProcess = pfnProcess; + + oMap[pszFuncName] = std::move(oFunc); + + return CE_None; +} diff --git a/frmts/vrt/vrtprocesseddatasetfunctions.cpp b/frmts/vrt/vrtprocesseddatasetfunctions.cpp new file mode 100644 index 000000000000..9a2eefd0748a --- /dev/null +++ b/frmts/vrt/vrtprocesseddatasetfunctions.cpp @@ -0,0 +1,1579 @@ +/****************************************************************************** + * + * Project: Virtual GDAL Datasets + * Purpose: Implementation of VRTProcessedDataset processing functions + * Author: Even Rouault <even.rouault at spatialys.com> + * + ****************************************************************************** + * Copyright (c) 2024, Even Rouault <even.rouault at spatialys.com> + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + ****************************************************************************/ + +#include "cpl_minixml.h" +#include "cpl_string.h" +#include "vrtdataset.h" + +#include <algorithm> +#include <limits> +#include <map> +#include <set> +#include <vector> + +/************************************************************************/ +/* GetDstValue() */ +/************************************************************************/ + +/** Return a destination value given an initial value, the destination no data + * value and its replacement value + */ +static inline double GetDstValue(double dfVal, double dfDstNoData, + double dfReplacementDstNodata, + GDALDataType eIntendedDstDT, + bool bDstIntendedDTIsInteger) +{ + if (bDstIntendedDTIsInteger && std::round(dfVal) == dfDstNoData) + { + return dfReplacementDstNodata; + } + else if (eIntendedDstDT == GDT_Float32 && + static_cast<float>(dfVal) == static_cast<float>(dfDstNoData)) + { + return dfReplacementDstNodata; + } + else if (eIntendedDstDT == GDT_Float64 && dfVal == dfDstNoData) + { + return dfReplacementDstNodata; + } + else + { + return dfVal; + } +} + +/************************************************************************/ +/* BandAffineCombinationData */ +/************************************************************************/ + +namespace +{ +/** Working structure for 'BandAffineCombination' builtin function. */ +struct BandAffineCombinationData +{ + static constexpr const char *const EXPECTED_SIGNATURE = + "BandAffineCombination"; + //! Signature (to make sure callback functions are called with the right argument) + const std::string m_osSignature = EXPECTED_SIGNATURE; + + /** Replacement nodata value */ + std::vector<double> m_adfReplacementDstNodata{}; + + /** Intended destination data type. */ + GDALDataType m_eIntendedDstDT = GDT_Float64; + + /** Affine transformation coefficients. + * m_aadfCoefficients[i][0] is the constant term for the i(th) dst band + * m_aadfCoefficients[i][j] is the weight of the j(th) src band for the + * i(th) dst vand. + * Said otherwise dst[i] = m_aadfCoefficients[i][0] + + * sum(m_aadfCoefficients[i][j + 1] * src[j] for j in 0...nSrcBands-1) + */ + std::vector<std::vector<double>> m_aadfCoefficients{}; + + //! Minimum clamping value. + double m_dfClampMin = std::numeric_limits<double>::quiet_NaN(); + + //! Maximum clamping value. + double m_dfClampMax = std::numeric_limits<double>::quiet_NaN(); +}; +} // namespace + +/************************************************************************/ +/* SetOutputValuesForInNoDataAndOutNoData() */ +/************************************************************************/ + +static std::vector<double> SetOutputValuesForInNoDataAndOutNoData( + int nInBands, double *padfInNoData, int *pnOutBands, + double **ppadfOutNoData, bool bSrcNodataSpecified, double dfSrcNoData, + bool bDstNodataSpecified, double dfDstNoData, bool bIsFinalStep) +{ + if (bSrcNodataSpecified) + { + std::vector<double> adfNoData(nInBands, dfSrcNoData); + memcpy(padfInNoData, adfNoData.data(), + adfNoData.size() * sizeof(double)); + } + + std::vector<double> adfDstNoData; + if (bDstNodataSpecified) + { + adfDstNoData.resize(*pnOutBands, dfDstNoData); + } + else if (bIsFinalStep) + { + adfDstNoData = + std::vector<double>(*ppadfOutNoData, *ppadfOutNoData + *pnOutBands); + } + else + { + adfDstNoData = + std::vector<double>(padfInNoData, padfInNoData + nInBands); + adfDstNoData.resize(*pnOutBands, *padfInNoData); + } + + if (*ppadfOutNoData == nullptr) + { + *ppadfOutNoData = + static_cast<double *>(CPLMalloc(*pnOutBands * sizeof(double))); + } + memcpy(*ppadfOutNoData, adfDstNoData.data(), *pnOutBands * sizeof(double)); + + return adfDstNoData; +} + +/************************************************************************/ +/* BandAffineCombinationInit() */ +/************************************************************************/ + +/** Init function for 'BandAffineCombination' builtin function. */ +static CPLErr BandAffineCombinationInit( + const char * /*pszFuncName*/, void * /*pUserData*/, + CSLConstList papszFunctionArgs, int nInBands, GDALDataType eInDT, + double *padfInNoData, int *pnOutBands, GDALDataType *peOutDT, + double **ppadfOutNoData, const char * /* pszVRTPath */, + VRTPDWorkingDataPtr *ppWorkingData) +{ + CPLAssert(eInDT == GDT_Float64); + + *peOutDT = eInDT; + *ppWorkingData = nullptr; + + auto data = std::make_unique<BandAffineCombinationData>(); + + std::map<int, std::vector<double>> oMapCoefficients{}; + double dfSrcNoData = std::numeric_limits<double>::quiet_NaN(); + bool bSrcNodataSpecified = false; + double dfDstNoData = std::numeric_limits<double>::quiet_NaN(); + bool bDstNodataSpecified = false; + double dfReplacementDstNodata = std::numeric_limits<double>::quiet_NaN(); + bool bReplacementDstNodataSpecified = false; + + for (const auto &[pszKey, pszValue] : + cpl::IterateNameValue(papszFunctionArgs)) + { + if (EQUAL(pszKey, "src_nodata")) + { + bSrcNodataSpecified = true; + dfSrcNoData = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "dst_nodata")) + { + bDstNodataSpecified = true; + dfDstNoData = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "replacement_nodata")) + { + bReplacementDstNodataSpecified = true; + dfReplacementDstNodata = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "dst_intended_datatype")) + { + for (GDALDataType eDT = GDT_Byte; eDT < GDT_TypeCount; + eDT = static_cast<GDALDataType>(eDT + 1)) + { + if (EQUAL(GDALGetDataTypeName(eDT), pszValue)) + { + data->m_eIntendedDstDT = eDT; + break; + } + } + } + else if (STARTS_WITH_CI(pszKey, "coefficients_")) + { + const int nTargetBand = atoi(pszKey + strlen("coefficients_")); + if (nTargetBand <= 0 || nTargetBand > 65536) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band in argument '%s'", pszKey); + return CE_Failure; + } + const CPLStringList aosTokens(CSLTokenizeString2(pszValue, ",", 0)); + if (aosTokens.size() != 1 + nInBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Argument %s has %d values, whereas %d are expected", + pszKey, aosTokens.size(), 1 + nInBands); + return CE_Failure; + } + std::vector<double> adfValues; + for (int i = 0; i < aosTokens.size(); ++i) + { + adfValues.push_back(CPLAtof(aosTokens[i])); + } + oMapCoefficients[nTargetBand - 1] = std::move(adfValues); + } + else if (EQUAL(pszKey, "min")) + { + data->m_dfClampMin = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "max")) + { + data->m_dfClampMax = CPLAtof(pszValue); + } + else + { + CPLError(CE_Warning, CPLE_AppDefined, + "Unrecognized argument name %s. Ignored", pszKey); + } + } + + const bool bIsFinalStep = *pnOutBands != 0; + if (bIsFinalStep) + { + if (*pnOutBands != static_cast<int>(oMapCoefficients.size())) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Final step expect %d bands, but only %d coefficient_XX " + "are provided", + *pnOutBands, static_cast<int>(oMapCoefficients.size())); + return CE_Failure; + } + } + else + { + *pnOutBands = static_cast<int>(oMapCoefficients.size()); + } + + const std::vector<double> adfDstNoData = + SetOutputValuesForInNoDataAndOutNoData( + nInBands, padfInNoData, pnOutBands, ppadfOutNoData, + bSrcNodataSpecified, dfSrcNoData, bDstNodataSpecified, dfDstNoData, + bIsFinalStep); + + if (bReplacementDstNodataSpecified) + { + data->m_adfReplacementDstNodata.resize(*pnOutBands, + dfReplacementDstNodata); + } + else + { + for (double dfVal : adfDstNoData) + { + data->m_adfReplacementDstNodata.emplace_back( + GDALGetNoDataReplacementValue(data->m_eIntendedDstDT, dfVal)); + } + } + + // Check we have a set of coefficient for all output bands and + // convert the map to a vector + for (auto &oIter : oMapCoefficients) + { + const int iExpected = static_cast<int>(data->m_aadfCoefficients.size()); + if (oIter.first != iExpected) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Argument coefficients_%d is missing", iExpected + 1); + return CE_Failure; + } + data->m_aadfCoefficients.emplace_back(std::move(oIter.second)); + } + *ppWorkingData = data.release(); + return CE_None; +} + +/************************************************************************/ +/* BandAffineCombinationFree() */ +/************************************************************************/ + +/** Free function for 'BandAffineCombination' builtin function. */ +static void BandAffineCombinationFree(const char * /*pszFuncName*/, + void * /*pUserData*/, + VRTPDWorkingDataPtr pWorkingData) +{ + BandAffineCombinationData *data = + static_cast<BandAffineCombinationData *>(pWorkingData); + CPLAssert(data->m_osSignature == + BandAffineCombinationData::EXPECTED_SIGNATURE); + CPL_IGNORE_RET_VAL(data->m_osSignature); + delete data; +} + +/************************************************************************/ +/* BandAffineCombinationProcess() */ +/************************************************************************/ + +/** Processing function for 'BandAffineCombination' builtin function. */ +static CPLErr BandAffineCombinationProcess( + const char * /*pszFuncName*/, void * /*pUserData*/, + VRTPDWorkingDataPtr pWorkingData, CSLConstList /* papszFunctionArgs*/, + int nBufXSize, int nBufYSize, const void *pInBuffer, size_t nInBufferSize, + GDALDataType eInDT, int nInBands, const double *CPL_RESTRICT padfInNoData, + void *pOutBuffer, size_t nOutBufferSize, GDALDataType eOutDT, int nOutBands, + const double *CPL_RESTRICT padfOutNoData, double /*dfSrcXOff*/, + double /*dfSrcYOff*/, double /*dfSrcXSize*/, double /*dfSrcYSize*/, + const double /*adfSrcGT*/[], const char * /* pszVRTPath */, + CSLConstList /*papszExtra*/) +{ + const size_t nElts = static_cast<size_t>(nBufXSize) * nBufYSize; + + CPL_IGNORE_RET_VAL(eInDT); + CPLAssert(eInDT == GDT_Float64); + CPL_IGNORE_RET_VAL(eOutDT); + CPLAssert(eOutDT == GDT_Float64); + CPL_IGNORE_RET_VAL(nInBufferSize); + CPLAssert(nInBufferSize == nElts * nInBands * sizeof(double)); + CPL_IGNORE_RET_VAL(nOutBufferSize); + CPLAssert(nOutBufferSize == nElts * nOutBands * sizeof(double)); + + const BandAffineCombinationData *data = + static_cast<BandAffineCombinationData *>(pWorkingData); + CPLAssert(data->m_osSignature == + BandAffineCombinationData::EXPECTED_SIGNATURE); + const double *CPL_RESTRICT padfSrc = static_cast<const double *>(pInBuffer); + double *CPL_RESTRICT padfDst = static_cast<double *>(pOutBuffer); + const bool bDstIntendedDTIsInteger = + GDALDataTypeIsInteger(data->m_eIntendedDstDT); + const double dfClampMin = data->m_dfClampMin; + const double dfClampMax = data->m_dfClampMax; + for (size_t i = 0; i < nElts; ++i) + { + for (int iDst = 0; iDst < nOutBands; ++iDst) + { + const auto &adfCoefficients = data->m_aadfCoefficients[iDst]; + double dfVal = adfCoefficients[0]; + bool bSetNoData = false; + for (int iSrc = 0; iSrc < nInBands; ++iSrc) + { + // written this way to work with a NaN value + if (!(padfSrc[iSrc] != padfInNoData[iSrc])) + { + bSetNoData = true; + break; + } + dfVal += adfCoefficients[iSrc + 1] * padfSrc[iSrc]; + } + if (bSetNoData) + { + *padfDst = padfOutNoData[iDst]; + } + else + { + double dfDstVal = GetDstValue( + dfVal, padfOutNoData[iDst], + data->m_adfReplacementDstNodata[iDst], + data->m_eIntendedDstDT, bDstIntendedDTIsInteger); + if (dfDstVal < dfClampMin) + dfDstVal = dfClampMin; + if (dfDstVal > dfClampMax) + dfDstVal = dfClampMax; + *padfDst = dfDstVal; + } + ++padfDst; + } + padfSrc += nInBands; + } + + return CE_None; +} + +/************************************************************************/ +/* LUTData */ +/************************************************************************/ + +namespace +{ +/** Working structure for 'LUT' builtin function. */ +struct LUTData +{ + static constexpr const char *const EXPECTED_SIGNATURE = "LUT"; + //! Signature (to make sure callback functions are called with the right argument) + const std::string m_osSignature = EXPECTED_SIGNATURE; + + //! m_aadfLUTInputs[i][j] is the j(th) input value for that LUT of band i. + std::vector<std::vector<double>> m_aadfLUTInputs{}; + + //! m_aadfLUTOutputs[i][j] is the j(th) output value for that LUT of band i. + std::vector<std::vector<double>> m_aadfLUTOutputs{}; + + /************************************************************************/ + /* LookupValue() */ + /************************************************************************/ + + double LookupValue(int iBand, double dfInput) const + { + const auto &adfInput = m_aadfLUTInputs[iBand]; + const auto &afdOutput = m_aadfLUTOutputs[iBand]; + + // Find the index of the first element in the LUT input array that + // is not smaller than the input value. + int i = static_cast<int>( + std::lower_bound(adfInput.data(), adfInput.data() + adfInput.size(), + dfInput) - + adfInput.data()); + + if (i == 0) + return afdOutput[0]; + + // If the index is beyond the end of the LUT input array, the input + // value is larger than all the values in the array. + if (i == static_cast<int>(adfInput.size())) + return afdOutput.back(); + + if (adfInput[i] == dfInput) + return afdOutput[i]; + + // Otherwise, interpolate. + return afdOutput[i - 1] + (dfInput - adfInput[i - 1]) * + ((afdOutput[i] - afdOutput[i - 1]) / + (adfInput[i] - adfInput[i - 1])); + } +}; +} // namespace + +/************************************************************************/ +/* LUTInit() */ +/************************************************************************/ + +/** Init function for 'LUT' builtin function. */ +static CPLErr LUTInit(const char * /*pszFuncName*/, void * /*pUserData*/, + CSLConstList papszFunctionArgs, int nInBands, + GDALDataType eInDT, double *padfInNoData, int *pnOutBands, + GDALDataType *peOutDT, double **ppadfOutNoData, + const char * /* pszVRTPath */, + VRTPDWorkingDataPtr *ppWorkingData) +{ + CPLAssert(eInDT == GDT_Float64); + + const bool bIsFinalStep = *pnOutBands != 0; + *peOutDT = eInDT; + *ppWorkingData = nullptr; + + if (!bIsFinalStep) + { + *pnOutBands = nInBands; + } + + auto data = std::make_unique<LUTData>(); + + double dfSrcNoData = std::numeric_limits<double>::quiet_NaN(); + bool bSrcNodataSpecified = false; + double dfDstNoData = std::numeric_limits<double>::quiet_NaN(); + bool bDstNodataSpecified = false; + + std::map<int, std::pair<std::vector<double>, std::vector<double>>> oMap{}; + + for (const auto &[pszKey, pszValue] : + cpl::IterateNameValue(papszFunctionArgs)) + { + if (EQUAL(pszKey, "src_nodata")) + { + bSrcNodataSpecified = true; + dfSrcNoData = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "dst_nodata")) + { + bDstNodataSpecified = true; + dfDstNoData = CPLAtof(pszValue); + } + else if (STARTS_WITH_CI(pszKey, "lut_")) + { + const int nBand = atoi(pszKey + strlen("lut_")); + if (nBand <= 0 || nBand > nInBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band in argument '%s'", pszKey); + return CE_Failure; + } + const CPLStringList aosTokens(CSLTokenizeString2(pszValue, ",", 0)); + std::vector<double> adfInputValues; + std::vector<double> adfOutputValues; + for (int i = 0; i < aosTokens.size(); ++i) + { + const CPLStringList aosTokens2( + CSLTokenizeString2(aosTokens[i], ":", 0)); + if (aosTokens2.size() != 2) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid value for argument '%s'", pszKey); + return CE_Failure; + } + adfInputValues.push_back(CPLAtof(aosTokens2[0])); + adfOutputValues.push_back(CPLAtof(aosTokens2[1])); + } + oMap[nBand - 1] = std::pair(std::move(adfInputValues), + std::move(adfOutputValues)); + } + else + { + CPLError(CE_Warning, CPLE_AppDefined, + "Unrecognized argument name %s. Ignored", pszKey); + } + } + + SetOutputValuesForInNoDataAndOutNoData( + nInBands, padfInNoData, pnOutBands, ppadfOutNoData, bSrcNodataSpecified, + dfSrcNoData, bDstNodataSpecified, dfDstNoData, bIsFinalStep); + + int iExpected = 0; + // Check we have values for all bands and convert to vector + for (auto &oIter : oMap) + { + if (oIter.first != iExpected) + { + CPLError(CE_Failure, CPLE_AppDefined, "Argument lut_%d is missing", + iExpected + 1); + return CE_Failure; + } + ++iExpected; + data->m_aadfLUTInputs.emplace_back(std::move(oIter.second.first)); + data->m_aadfLUTOutputs.emplace_back(std::move(oIter.second.second)); + } + + if (static_cast<int>(oMap.size()) < *pnOutBands) + { + CPLError(CE_Failure, CPLE_AppDefined, "Missing lut_XX element(s)"); + return CE_Failure; + } + + *ppWorkingData = data.release(); + return CE_None; +} + +/************************************************************************/ +/* LUTFree() */ +/************************************************************************/ + +/** Free function for 'LUT' builtin function. */ +static void LUTFree(const char * /*pszFuncName*/, void * /*pUserData*/, + VRTPDWorkingDataPtr pWorkingData) +{ + LUTData *data = static_cast<LUTData *>(pWorkingData); + CPLAssert(data->m_osSignature == LUTData::EXPECTED_SIGNATURE); + CPL_IGNORE_RET_VAL(data->m_osSignature); + delete data; +} + +/************************************************************************/ +/* LUTProcess() */ +/************************************************************************/ + +/** Processing function for 'LUT' builtin function. */ +static CPLErr +LUTProcess(const char * /*pszFuncName*/, void * /*pUserData*/, + VRTPDWorkingDataPtr pWorkingData, + CSLConstList /* papszFunctionArgs*/, int nBufXSize, int nBufYSize, + const void *pInBuffer, size_t nInBufferSize, GDALDataType eInDT, + int nInBands, const double *CPL_RESTRICT padfInNoData, + void *pOutBuffer, size_t nOutBufferSize, GDALDataType eOutDT, + int nOutBands, const double *CPL_RESTRICT padfOutNoData, + double /*dfSrcXOff*/, double /*dfSrcYOff*/, double /*dfSrcXSize*/, + double /*dfSrcYSize*/, const double /*adfSrcGT*/[], + const char * /* pszVRTPath */, CSLConstList /*papszExtra*/) +{ + const size_t nElts = static_cast<size_t>(nBufXSize) * nBufYSize; + + CPL_IGNORE_RET_VAL(eInDT); + CPLAssert(eInDT == GDT_Float64); + CPL_IGNORE_RET_VAL(eOutDT); + CPLAssert(eOutDT == GDT_Float64); + CPL_IGNORE_RET_VAL(nInBufferSize); + CPLAssert(nInBufferSize == nElts * nInBands * sizeof(double)); + CPL_IGNORE_RET_VAL(nOutBufferSize); + CPLAssert(nOutBufferSize == nElts * nOutBands * sizeof(double)); + CPLAssert(nInBands == nOutBands); + CPL_IGNORE_RET_VAL(nOutBands); + + const LUTData *data = static_cast<LUTData *>(pWorkingData); + CPLAssert(data->m_osSignature == LUTData::EXPECTED_SIGNATURE); + const double *CPL_RESTRICT padfSrc = static_cast<const double *>(pInBuffer); + double *CPL_RESTRICT padfDst = static_cast<double *>(pOutBuffer); + for (size_t i = 0; i < nElts; ++i) + { + for (int iBand = 0; iBand < nInBands; ++iBand) + { + // written this way to work with a NaN value + if (!(*padfSrc != padfInNoData[iBand])) + *padfDst = padfOutNoData[iBand]; + else + *padfDst = data->LookupValue(iBand, *padfSrc); + ++padfSrc; + ++padfDst; + } + } + + return CE_None; +} + +/************************************************************************/ +/* DehazingData */ +/************************************************************************/ + +namespace +{ +/** Working structure for 'Dehazing' builtin function. */ +struct DehazingData +{ + static constexpr const char *const EXPECTED_SIGNATURE = "Dehazing"; + //! Signature (to make sure callback functions are called with the right argument) + const std::string m_osSignature = EXPECTED_SIGNATURE; + + //! Nodata value for gain dataset(s) + double m_dfGainNodata = std::numeric_limits<double>::quiet_NaN(); + + //! Nodata value for offset dataset(s) + double m_dfOffsetNodata = std::numeric_limits<double>::quiet_NaN(); + + //! Minimum clamping value. + double m_dfClampMin = std::numeric_limits<double>::quiet_NaN(); + + //! Maximum clamping value. + double m_dfClampMax = std::numeric_limits<double>::quiet_NaN(); + + //! Map from gain/offset dataset name to datasets + std::map<std::string, std::unique_ptr<GDALDataset>> m_oDatasetMap{}; + + //! Vector of size nInBands that point to the raster band from which to read gains. + std::vector<GDALRasterBand *> m_oGainBands{}; + + //! Vector of size nInBands that point to the raster band from which to read offsets. + std::vector<GDALRasterBand *> m_oOffsetBands{}; + + //! Working buffer that contain gain values. + std::vector<VRTProcessedDataset::NoInitByte> m_abyGainBuffer{}; + + //! Working buffer that contain offset values. + std::vector<VRTProcessedDataset::NoInitByte> m_abyOffsetBuffer{}; +}; +} // namespace + +/************************************************************************/ +/* CheckAllBands() */ +/************************************************************************/ + +/** Return true if the key of oMap is the sequence of all integers between + * 0 and nExpectedBandCount-1. + */ +template <class T> +static bool CheckAllBands(const std::map<int, T> &oMap, int nExpectedBandCount) +{ + int iExpected = 0; + for (const auto &kv : oMap) + { + if (kv.first != iExpected) + return false; + ++iExpected; + } + return iExpected == nExpectedBandCount; +} + +/************************************************************************/ +/* DehazingInit() */ +/************************************************************************/ + +/** Init function for 'Dehazing' builtin function. */ +static CPLErr DehazingInit(const char * /*pszFuncName*/, void * /*pUserData*/, + CSLConstList papszFunctionArgs, int nInBands, + GDALDataType eInDT, double *padfInNoData, + int *pnOutBands, GDALDataType *peOutDT, + double **ppadfOutNoData, const char *pszVRTPath, + VRTPDWorkingDataPtr *ppWorkingData) +{ + CPLAssert(eInDT == GDT_Float64); + + const bool bIsFinalStep = *pnOutBands != 0; + *peOutDT = eInDT; + *ppWorkingData = nullptr; + + if (!bIsFinalStep) + { + *pnOutBands = nInBands; + } + + auto data = std::make_unique<DehazingData>(); + + bool bNodataSpecified = false; + double dfNoData = std::numeric_limits<double>::quiet_NaN(); + + bool bGainNodataSpecified = false; + bool bOffsetNodataSpecified = false; + + std::map<int, std::string> oGainDatasetNameMap; + std::map<int, int> oGainDatasetBandMap; + + std::map<int, std::string> oOffsetDatasetNameMap; + std::map<int, int> oOffsetDatasetBandMap; + + bool bRelativeToVRT = false; + + for (const auto &[pszKey, pszValue] : + cpl::IterateNameValue(papszFunctionArgs)) + { + if (EQUAL(pszKey, "relativeToVRT")) + { + bRelativeToVRT = CPLTestBool(pszValue); + } + else if (EQUAL(pszKey, "nodata")) + { + bNodataSpecified = true; + dfNoData = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "gain_nodata")) + { + bGainNodataSpecified = true; + data->m_dfGainNodata = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "offset_nodata")) + { + bOffsetNodataSpecified = true; + data->m_dfOffsetNodata = CPLAtof(pszValue); + } + else if (STARTS_WITH_CI(pszKey, "gain_dataset_filename_")) + { + const int nBand = atoi(pszKey + strlen("gain_dataset_filename_")); + if (nBand <= 0 || nBand > nInBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band in argument '%s'", pszKey); + return CE_Failure; + } + oGainDatasetNameMap[nBand - 1] = pszValue; + } + else if (STARTS_WITH_CI(pszKey, "gain_dataset_band_")) + { + const int nBand = atoi(pszKey + strlen("gain_dataset_band_")); + if (nBand <= 0 || nBand > nInBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band in argument '%s'", pszKey); + return CE_Failure; + } + oGainDatasetBandMap[nBand - 1] = atoi(pszValue); + } + else if (STARTS_WITH_CI(pszKey, "offset_dataset_filename_")) + { + const int nBand = atoi(pszKey + strlen("offset_dataset_filename_")); + if (nBand <= 0 || nBand > nInBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band in argument '%s'", pszKey); + return CE_Failure; + } + oOffsetDatasetNameMap[nBand - 1] = pszValue; + } + else if (STARTS_WITH_CI(pszKey, "offset_dataset_band_")) + { + const int nBand = atoi(pszKey + strlen("offset_dataset_band_")); + if (nBand <= 0 || nBand > nInBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band in argument '%s'", pszKey); + return CE_Failure; + } + oOffsetDatasetBandMap[nBand - 1] = atoi(pszValue); + } + else if (EQUAL(pszKey, "min")) + { + data->m_dfClampMin = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "max")) + { + data->m_dfClampMax = CPLAtof(pszValue); + } + else + { + CPLError(CE_Warning, CPLE_AppDefined, + "Unrecognized argument name %s. Ignored", pszKey); + } + } + + if (!CheckAllBands(oGainDatasetNameMap, nInBands)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Missing gain_dataset_filename_XX element(s)"); + return CE_Failure; + } + if (!CheckAllBands(oGainDatasetBandMap, nInBands)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Missing gain_dataset_band_XX element(s)"); + return CE_Failure; + } + if (!CheckAllBands(oOffsetDatasetNameMap, nInBands)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Missing offset_dataset_filename_XX element(s)"); + return CE_Failure; + } + if (!CheckAllBands(oOffsetDatasetBandMap, nInBands)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Missing offset_dataset_band_XX element(s)"); + return CE_Failure; + } + + data->m_oGainBands.resize(nInBands); + data->m_oOffsetBands.resize(nInBands); + + constexpr int IDX_GAIN = 0; + constexpr int IDX_OFFSET = 1; + for (int i : {IDX_GAIN, IDX_OFFSET}) + { + const auto &oMapNames = + (i == IDX_GAIN) ? oGainDatasetNameMap : oOffsetDatasetNameMap; + const auto &oMapBands = + (i == IDX_GAIN) ? oGainDatasetBandMap : oOffsetDatasetBandMap; + for (const auto &kv : oMapNames) + { + const int nInBandIdx = kv.first; + const auto osFilename = VRTDataset::BuildSourceFilename( + kv.second.c_str(), pszVRTPath, bRelativeToVRT); + auto oIter = data->m_oDatasetMap.find(osFilename); + if (oIter == data->m_oDatasetMap.end()) + { + auto poDS = std::unique_ptr<GDALDataset>(GDALDataset::Open( + osFilename.c_str(), GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR, + nullptr, nullptr, nullptr)); + if (!poDS) + return CE_Failure; + double adfAuxGT[6]; + if (poDS->GetGeoTransform(adfAuxGT) != CE_None) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s lacks a geotransform", osFilename.c_str()); + return CE_Failure; + } + oIter = data->m_oDatasetMap + .insert(std::pair(osFilename, std::move(poDS))) + .first; + } + auto poDS = oIter->second.get(); + const auto oIterBand = oMapBands.find(nInBandIdx); + CPLAssert(oIterBand != oMapBands.end()); + const int nAuxBand = oIterBand->second; + if (nAuxBand <= 0 || nAuxBand > poDS->GetRasterCount()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band number (%d) for a %s dataset", nAuxBand, + (i == IDX_GAIN) ? "gain" : "offset"); + return CE_Failure; + } + auto poAuxBand = poDS->GetRasterBand(nAuxBand); + int bAuxBandHasNoData = false; + const double dfAuxNoData = + poAuxBand->GetNoDataValue(&bAuxBandHasNoData); + if (i == IDX_GAIN) + { + data->m_oGainBands[nInBandIdx] = poAuxBand; + if (!bGainNodataSpecified && bAuxBandHasNoData) + data->m_dfGainNodata = dfAuxNoData; + } + else + { + data->m_oOffsetBands[nInBandIdx] = poAuxBand; + if (!bOffsetNodataSpecified && bAuxBandHasNoData) + data->m_dfOffsetNodata = dfAuxNoData; + } + } + } + + SetOutputValuesForInNoDataAndOutNoData( + nInBands, padfInNoData, pnOutBands, ppadfOutNoData, bNodataSpecified, + dfNoData, bNodataSpecified, dfNoData, bIsFinalStep); + + *ppWorkingData = data.release(); + return CE_None; +} + +/************************************************************************/ +/* DehazingFree() */ +/************************************************************************/ + +/** Free function for 'Dehazing' builtin function. */ +static void DehazingFree(const char * /*pszFuncName*/, void * /*pUserData*/, + VRTPDWorkingDataPtr pWorkingData) +{ + DehazingData *data = static_cast<DehazingData *>(pWorkingData); + CPLAssert(data->m_osSignature == DehazingData::EXPECTED_SIGNATURE); + CPL_IGNORE_RET_VAL(data->m_osSignature); + delete data; +} + +/************************************************************************/ +/* LoadAuxData() */ +/************************************************************************/ + +// Load auxiliary corresponding offset, gain or trimming data. +static bool LoadAuxData(double dfULX, double dfULY, double dfLRX, double dfLRY, + size_t nElts, int nBufXSize, int nBufYSize, + const char *pszAuxType, GDALRasterBand *poAuxBand, + std::vector<VRTProcessedDataset::NoInitByte> &abyBuffer) +{ + double adfAuxGT[6]; + double adfAuxInvGT[6]; + + // Compute pixel/line coordinates from the georeferenced extent + CPL_IGNORE_RET_VAL(poAuxBand->GetDataset()->GetGeoTransform( + adfAuxGT)); // return code already tested + CPL_IGNORE_RET_VAL(GDALInvGeoTransform(adfAuxGT, adfAuxInvGT)); + const double dfULPixel = + adfAuxInvGT[0] + adfAuxInvGT[1] * dfULX + adfAuxInvGT[2] * dfULY; + const double dfULLine = + adfAuxInvGT[3] + adfAuxInvGT[4] * dfULX + adfAuxInvGT[5] * dfULY; + const double dfLRPixel = + adfAuxInvGT[0] + adfAuxInvGT[1] * dfLRX + adfAuxInvGT[2] * dfLRY; + const double dfLRLine = + adfAuxInvGT[3] + adfAuxInvGT[4] * dfLRX + adfAuxInvGT[5] * dfLRY; + if (dfULPixel >= dfLRPixel || dfULLine >= dfLRLine) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Unexpected computed %s pixel/line", pszAuxType); + return false; + } + if (dfULPixel < -1 || dfLRPixel > poAuxBand->GetXSize() || dfULLine < -1 || + dfLRLine > poAuxBand->GetYSize()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Unexpected computed %s pixel/line", pszAuxType); + return false; + } + + const int nAuxXOff = std::max(0, static_cast<int>(std::round(dfULPixel))); + const int nAuxYOff = std::max(0, static_cast<int>(std::round(dfULLine))); + const int nAuxX2Off = std::min(poAuxBand->GetXSize(), + static_cast<int>(std::round(dfLRPixel))); + const int nAuxY2Off = + std::min(poAuxBand->GetYSize(), static_cast<int>(std::round(dfLRLine))); + + try + { + abyBuffer.resize(nElts * sizeof(float)); + } + catch (const std::bad_alloc &) + { + CPLError(CE_Failure, CPLE_OutOfMemory, + "Out of memory allocating working buffer"); + return false; + } + GDALRasterIOExtraArg sExtraArg; + INIT_RASTERIO_EXTRA_ARG(sExtraArg); + sExtraArg.bFloatingPointWindowValidity = true; + CPL_IGNORE_RET_VAL(sExtraArg.eResampleAlg); + sExtraArg.eResampleAlg = GRIORA_Bilinear; + sExtraArg.dfXOff = std::max(0.0, dfULPixel); + sExtraArg.dfYOff = std::max(0.0, dfULLine); + sExtraArg.dfXSize = std::min<double>(poAuxBand->GetXSize(), dfLRPixel) - + std::max(0.0, dfULPixel); + sExtraArg.dfYSize = std::min<double>(poAuxBand->GetYSize(), dfLRLine) - + std::max(0.0, dfULLine); + return (poAuxBand->RasterIO( + GF_Read, nAuxXOff, nAuxYOff, std::max(1, nAuxX2Off - nAuxXOff), + std::max(1, nAuxY2Off - nAuxYOff), abyBuffer.data(), nBufXSize, + nBufYSize, GDT_Float32, 0, 0, &sExtraArg) == CE_None); +} + +/************************************************************************/ +/* DehazingProcess() */ +/************************************************************************/ + +/** Processing function for 'Dehazing' builtin function. */ +static CPLErr DehazingProcess( + const char * /*pszFuncName*/, void * /*pUserData*/, + VRTPDWorkingDataPtr pWorkingData, CSLConstList /* papszFunctionArgs*/, + int nBufXSize, int nBufYSize, const void *pInBuffer, size_t nInBufferSize, + GDALDataType eInDT, int nInBands, const double *CPL_RESTRICT padfInNoData, + void *pOutBuffer, size_t nOutBufferSize, GDALDataType eOutDT, int nOutBands, + const double *CPL_RESTRICT padfOutNoData, double dfSrcXOff, + double dfSrcYOff, double dfSrcXSize, double dfSrcYSize, + const double adfSrcGT[], const char * /* pszVRTPath */, + CSLConstList /*papszExtra*/) +{ + const size_t nElts = static_cast<size_t>(nBufXSize) * nBufYSize; + + CPL_IGNORE_RET_VAL(eInDT); + CPLAssert(eInDT == GDT_Float64); + CPL_IGNORE_RET_VAL(eOutDT); + CPLAssert(eOutDT == GDT_Float64); + CPL_IGNORE_RET_VAL(nInBufferSize); + CPLAssert(nInBufferSize == nElts * nInBands * sizeof(double)); + CPL_IGNORE_RET_VAL(nOutBufferSize); + CPLAssert(nOutBufferSize == nElts * nOutBands * sizeof(double)); + CPLAssert(nInBands == nOutBands); + CPL_IGNORE_RET_VAL(nOutBands); + + DehazingData *data = static_cast<DehazingData *>(pWorkingData); + CPLAssert(data->m_osSignature == DehazingData::EXPECTED_SIGNATURE); + const double *CPL_RESTRICT padfSrc = static_cast<const double *>(pInBuffer); + double *CPL_RESTRICT padfDst = static_cast<double *>(pOutBuffer); + + // Compute georeferenced extent of input region + const double dfULX = + adfSrcGT[0] + adfSrcGT[1] * dfSrcXOff + adfSrcGT[2] * dfSrcYOff; + const double dfULY = + adfSrcGT[3] + adfSrcGT[4] * dfSrcXOff + adfSrcGT[5] * dfSrcYOff; + const double dfLRX = adfSrcGT[0] + adfSrcGT[1] * (dfSrcXOff + dfSrcXSize) + + adfSrcGT[2] * (dfSrcYOff + dfSrcYSize); + const double dfLRY = adfSrcGT[3] + adfSrcGT[4] * (dfSrcXOff + dfSrcXSize) + + adfSrcGT[5] * (dfSrcYOff + dfSrcYSize); + + auto &abyOffsetBuffer = data->m_abyGainBuffer; + auto &abyGainBuffer = data->m_abyOffsetBuffer; + + for (int iBand = 0; iBand < nInBands; ++iBand) + { + if (!LoadAuxData(dfULX, dfULY, dfLRX, dfLRY, nElts, nBufXSize, + nBufYSize, "gain", data->m_oGainBands[iBand], + abyGainBuffer) || + !LoadAuxData(dfULX, dfULY, dfLRX, dfLRY, nElts, nBufXSize, + nBufYSize, "offset", data->m_oOffsetBands[iBand], + abyOffsetBuffer)) + { + return CE_Failure; + } + + const double *CPL_RESTRICT padfSrcThisBand = padfSrc + iBand; + double *CPL_RESTRICT padfDstThisBand = padfDst + iBand; + const float *pafGain = + reinterpret_cast<const float *>(abyGainBuffer.data()); + const float *pafOffset = + reinterpret_cast<const float *>(abyOffsetBuffer.data()); + const double dfSrcNodata = padfInNoData[iBand]; + const double dfDstNodata = padfOutNoData[iBand]; + const double dfGainNodata = data->m_dfGainNodata; + const double dfOffsetNodata = data->m_dfOffsetNodata; + const double dfClampMin = data->m_dfClampMin; + const double dfClampMax = data->m_dfClampMax; + for (size_t i = 0; i < nElts; ++i) + { + const double dfSrcVal = *padfSrcThisBand; + // written this way to work with a NaN value + if (!(dfSrcVal != dfSrcNodata)) + { + *padfDstThisBand = dfDstNodata; + } + else + { + const double dfGain = pafGain[i]; + const double dfOffset = pafOffset[i]; + if (!(dfGain != dfGainNodata) || !(dfOffset != dfOffsetNodata)) + { + *padfDstThisBand = dfDstNodata; + } + else + { + double dfDehazed = dfSrcVal * dfGain - dfOffset; + if (dfDehazed < dfClampMin) + dfDehazed = dfClampMin; + if (dfDehazed > dfClampMax) + dfDehazed = dfClampMax; + + *padfDstThisBand = dfDehazed; + } + } + padfSrcThisBand += nInBands; + padfDstThisBand += nInBands; + } + } + + return CE_None; +} + +/************************************************************************/ +/* TrimmingData */ +/************************************************************************/ + +namespace +{ +/** Working structure for 'Trimming' builtin function. */ +struct TrimmingData +{ + static constexpr const char *const EXPECTED_SIGNATURE = "Trimming"; + //! Signature (to make sure callback functions are called with the right argument) + const std::string m_osSignature = EXPECTED_SIGNATURE; + + //! Nodata value for trimming dataset + double m_dfTrimmingNodata = std::numeric_limits<double>::quiet_NaN(); + + //! Maximum saturating RGB output value. + double m_dfTopRGB = 0; + + //! Maximum threshold beyond which we give up saturation + double m_dfToneCeil = 0; + + //! Margin to allow for dynamics in brighest areas (in [0,1] range) + double m_dfTopMargin = 0; + + //! Index (zero-based) of input/output red band. + int m_nRedBand = 1 - 1; + + //! Index (zero-based) of input/output green band. + int m_nGreenBand = 2 - 1; + + //! Index (zero-based) of input/output blue band. + int m_nBlueBand = 3 - 1; + + //! Trimming dataset + std::unique_ptr<GDALDataset> m_poTrimmingDS{}; + + //! Trimming raster band. + GDALRasterBand *m_poTrimmingBand = nullptr; + + //! Working buffer that contain trimming values. + std::vector<VRTProcessedDataset::NoInitByte> m_abyTrimmingBuffer{}; +}; +} // namespace + +/************************************************************************/ +/* TrimmingInit() */ +/************************************************************************/ + +/** Init function for 'Trimming' builtin function. */ +static CPLErr TrimmingInit(const char * /*pszFuncName*/, void * /*pUserData*/, + CSLConstList papszFunctionArgs, int nInBands, + GDALDataType eInDT, double *padfInNoData, + int *pnOutBands, GDALDataType *peOutDT, + double **ppadfOutNoData, const char *pszVRTPath, + VRTPDWorkingDataPtr *ppWorkingData) +{ + CPLAssert(eInDT == GDT_Float64); + + const bool bIsFinalStep = *pnOutBands != 0; + *peOutDT = eInDT; + *ppWorkingData = nullptr; + + if (!bIsFinalStep) + { + *pnOutBands = nInBands; + } + + auto data = std::make_unique<TrimmingData>(); + + bool bNodataSpecified = false; + double dfNoData = std::numeric_limits<double>::quiet_NaN(); + std::string osTrimmingFilename; + bool bTrimmingNodataSpecified = false; + bool bRelativeToVRT = false; + + for (const auto &[pszKey, pszValue] : + cpl::IterateNameValue(papszFunctionArgs)) + { + if (EQUAL(pszKey, "relativeToVRT")) + { + bRelativeToVRT = CPLTestBool(pszValue); + } + else if (EQUAL(pszKey, "nodata")) + { + bNodataSpecified = true; + dfNoData = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "trimming_nodata")) + { + bTrimmingNodataSpecified = true; + data->m_dfTrimmingNodata = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "trimming_dataset_filename")) + { + osTrimmingFilename = pszValue; + } + else if (EQUAL(pszKey, "red_band")) + { + const int nBand = atoi(pszValue) - 1; + if (nBand < 0 || nBand >= nInBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band in argument '%s'", pszKey); + return CE_Failure; + } + data->m_nRedBand = nBand; + } + else if (EQUAL(pszKey, "green_band")) + { + const int nBand = atoi(pszValue) - 1; + if (nBand < 0 || nBand >= nInBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band in argument '%s'", pszKey); + return CE_Failure; + } + data->m_nGreenBand = nBand; + } + else if (EQUAL(pszKey, "blue_band")) + { + const int nBand = atoi(pszValue) - 1; + if (nBand < 0 || nBand >= nInBands) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid band in argument '%s'", pszKey); + return CE_Failure; + } + data->m_nBlueBand = nBand; + } + else if (EQUAL(pszKey, "top_rgb")) + { + data->m_dfTopRGB = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "tone_ceil")) + { + data->m_dfToneCeil = CPLAtof(pszValue); + } + else if (EQUAL(pszKey, "top_margin")) + { + data->m_dfTopMargin = CPLAtof(pszValue); + } + else + { + CPLError(CE_Warning, CPLE_AppDefined, + "Unrecognized argument name %s. Ignored", pszKey); + } + } + + if (data->m_nRedBand == data->m_nGreenBand || + data->m_nRedBand == data->m_nBlueBand || + data->m_nGreenBand == data->m_nBlueBand) + { + CPLError( + CE_Failure, CPLE_NotSupported, + "red_band, green_band and blue_band must have distinct values"); + return CE_Failure; + } + + const auto osFilename = VRTDataset::BuildSourceFilename( + osTrimmingFilename.c_str(), pszVRTPath, bRelativeToVRT); + data->m_poTrimmingDS.reset(GDALDataset::Open( + osFilename.c_str(), GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR, nullptr, + nullptr, nullptr)); + if (!data->m_poTrimmingDS) + return CE_Failure; + if (data->m_poTrimmingDS->GetRasterCount() != 1) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Trimming dataset should have a single band"); + return CE_Failure; + } + data->m_poTrimmingBand = data->m_poTrimmingDS->GetRasterBand(1); + + double adfAuxGT[6]; + if (data->m_poTrimmingDS->GetGeoTransform(adfAuxGT) != CE_None) + { + CPLError(CE_Failure, CPLE_AppDefined, "%s lacks a geotransform", + osFilename.c_str()); + return CE_Failure; + } + int bAuxBandHasNoData = false; + const double dfAuxNoData = + data->m_poTrimmingBand->GetNoDataValue(&bAuxBandHasNoData); + if (!bTrimmingNodataSpecified && bAuxBandHasNoData) + data->m_dfTrimmingNodata = dfAuxNoData; + + SetOutputValuesForInNoDataAndOutNoData( + nInBands, padfInNoData, pnOutBands, ppadfOutNoData, bNodataSpecified, + dfNoData, bNodataSpecified, dfNoData, bIsFinalStep); + + *ppWorkingData = data.release(); + return CE_None; +} + +/************************************************************************/ +/* TrimmingFree() */ +/************************************************************************/ + +/** Free function for 'Trimming' builtin function. */ +static void TrimmingFree(const char * /*pszFuncName*/, void * /*pUserData*/, + VRTPDWorkingDataPtr pWorkingData) +{ + TrimmingData *data = static_cast<TrimmingData *>(pWorkingData); + CPLAssert(data->m_osSignature == TrimmingData::EXPECTED_SIGNATURE); + CPL_IGNORE_RET_VAL(data->m_osSignature); + delete data; +} + +/************************************************************************/ +/* TrimmingProcess() */ +/************************************************************************/ + +/** Processing function for 'Trimming' builtin function. */ +static CPLErr TrimmingProcess( + const char * /*pszFuncName*/, void * /*pUserData*/, + VRTPDWorkingDataPtr pWorkingData, CSLConstList /* papszFunctionArgs*/, + int nBufXSize, int nBufYSize, const void *pInBuffer, size_t nInBufferSize, + GDALDataType eInDT, int nInBands, const double *CPL_RESTRICT padfInNoData, + void *pOutBuffer, size_t nOutBufferSize, GDALDataType eOutDT, int nOutBands, + const double *CPL_RESTRICT padfOutNoData, double dfSrcXOff, + double dfSrcYOff, double dfSrcXSize, double dfSrcYSize, + const double adfSrcGT[], const char * /* pszVRTPath */, + CSLConstList /*papszExtra*/) +{ + const size_t nElts = static_cast<size_t>(nBufXSize) * nBufYSize; + + CPL_IGNORE_RET_VAL(eInDT); + CPLAssert(eInDT == GDT_Float64); + CPL_IGNORE_RET_VAL(eOutDT); + CPLAssert(eOutDT == GDT_Float64); + CPL_IGNORE_RET_VAL(nInBufferSize); + CPLAssert(nInBufferSize == nElts * nInBands * sizeof(double)); + CPL_IGNORE_RET_VAL(nOutBufferSize); + CPLAssert(nOutBufferSize == nElts * nOutBands * sizeof(double)); + CPLAssert(nInBands == nOutBands); + CPL_IGNORE_RET_VAL(nOutBands); + + TrimmingData *data = static_cast<TrimmingData *>(pWorkingData); + CPLAssert(data->m_osSignature == TrimmingData::EXPECTED_SIGNATURE); + const double *CPL_RESTRICT padfSrc = static_cast<const double *>(pInBuffer); + double *CPL_RESTRICT padfDst = static_cast<double *>(pOutBuffer); + + // Compute georeferenced extent of input region + const double dfULX = + adfSrcGT[0] + adfSrcGT[1] * dfSrcXOff + adfSrcGT[2] * dfSrcYOff; + const double dfULY = + adfSrcGT[3] + adfSrcGT[4] * dfSrcXOff + adfSrcGT[5] * dfSrcYOff; + const double dfLRX = adfSrcGT[0] + adfSrcGT[1] * (dfSrcXOff + dfSrcXSize) + + adfSrcGT[2] * (dfSrcYOff + dfSrcYSize); + const double dfLRY = adfSrcGT[3] + adfSrcGT[4] * (dfSrcXOff + dfSrcXSize) + + adfSrcGT[5] * (dfSrcYOff + dfSrcYSize); + + if (!LoadAuxData(dfULX, dfULY, dfLRX, dfLRY, nElts, nBufXSize, nBufYSize, + "trimming", data->m_poTrimmingBand, + data->m_abyTrimmingBuffer)) + { + return CE_Failure; + } + + const float *pafTrimming = + reinterpret_cast<const float *>(data->m_abyTrimmingBuffer.data()); + const int nRedBand = data->m_nRedBand; + const int nGreenBand = data->m_nGreenBand; + const int nBlueBand = data->m_nBlueBand; + const double dfTopMargin = data->m_dfTopMargin; + const double dfTopRGB = data->m_dfTopRGB; + const double dfToneCeil = data->m_dfToneCeil; +#if !defined(trimming_non_optimized_version) + const double dfInvToneCeil = 1.0 / dfToneCeil; +#endif + const bool bRGBBandsAreFirst = + std::max(std::max(nRedBand, nGreenBand), nBlueBand) <= 2; + const double dfNoDataTrimming = data->m_dfTrimmingNodata; + const double dfNoDataRed = padfInNoData[nRedBand]; + const double dfNoDataGreen = padfInNoData[nGreenBand]; + const double dfNoDataBlue = padfInNoData[nBlueBand]; + for (size_t i = 0; i < nElts; ++i) + { + // Extract local saturation value from trimming image + const double dfLocalMaxRGB = pafTrimming[i]; + const double dfReducedRGB = + std::min((1.0 - dfTopMargin) * dfTopRGB / dfLocalMaxRGB, 1.0); + + const double dfRed = padfSrc[nRedBand]; + const double dfGreen = padfSrc[nGreenBand]; + const double dfBlue = padfSrc[nBlueBand]; + bool bNoDataPixel = false; + if ((dfLocalMaxRGB != dfNoDataTrimming) && (dfRed != dfNoDataRed) && + (dfGreen != dfNoDataGreen) && (dfBlue != dfNoDataBlue)) + { + // RGB bands specific process + const double dfMaxRGB = std::max(std::max(dfRed, dfGreen), dfBlue); +#if !defined(trimming_non_optimized_version) + const double dfRedTimesToneRed = std::min(dfRed, dfToneCeil); + const double dfGreenTimesToneGreen = std::min(dfGreen, dfToneCeil); + const double dfBlueTimesToneBlue = std::min(dfBlue, dfToneCeil); + const double dfInvToneMaxRGB = + std::max(dfMaxRGB * dfInvToneCeil, 1.0); + const double dfReducedRGBTimesInvToneMaxRGB = + dfReducedRGB * dfInvToneMaxRGB; + padfDst[nRedBand] = std::min( + dfRedTimesToneRed * dfReducedRGBTimesInvToneMaxRGB, dfTopRGB); + padfDst[nGreenBand] = + std::min(dfGreenTimesToneGreen * dfReducedRGBTimesInvToneMaxRGB, + dfTopRGB); + padfDst[nBlueBand] = std::min( + dfBlueTimesToneBlue * dfReducedRGBTimesInvToneMaxRGB, dfTopRGB); +#else + // Original formulas. Slightly less optimized than the above ones. + const double dfToneMaxRGB = std::min(dfToneCeil / dfMaxRGB, 1.0); + const double dfToneRed = std::min(dfToneCeil / dfRed, 1.0); + const double dfToneGreen = std::min(dfToneCeil / dfGreen, 1.0); + const double dfToneBlue = std::min(dfToneCeil / dfBlue, 1.0); + padfDst[nRedBand] = std::min( + dfReducedRGB * dfRed * dfToneRed / dfToneMaxRGB, dfTopRGB); + padfDst[nGreenBand] = std::min( + dfReducedRGB * dfGreen * dfToneGreen / dfToneMaxRGB, dfTopRGB); + padfDst[nBlueBand] = std::min( + dfReducedRGB * dfBlue * dfToneBlue / dfToneMaxRGB, dfTopRGB); +#endif + + // Other bands processing (NIR, ...): only apply RGB reduction factor + if (bRGBBandsAreFirst) + { + // optimization + for (int iBand = 3; iBand < nInBands; ++iBand) + { + if (padfSrc[iBand] != padfInNoData[iBand]) + { + padfDst[iBand] = dfReducedRGB * padfSrc[iBand]; + } + else + { + bNoDataPixel = true; + break; + } + } + } + else + { + for (int iBand = 0; iBand < nInBands; ++iBand) + { + if (iBand != nRedBand && iBand != nGreenBand && + iBand != nBlueBand) + { + if (padfSrc[iBand] != padfInNoData[iBand]) + { + padfDst[iBand] = dfReducedRGB * padfSrc[iBand]; + } + else + { + bNoDataPixel = true; + break; + } + } + } + } + } + else + { + bNoDataPixel = true; + } + if (bNoDataPixel) + { + for (int iBand = 0; iBand < nInBands; ++iBand) + { + padfDst[iBand] = padfOutNoData[iBand]; + } + } + + padfSrc += nInBands; + padfDst += nInBands; + } + + return CE_None; +} + +/************************************************************************/ +/* GDALVRTRegisterDefaultProcessedDatasetFuncs() */ +/************************************************************************/ + +/** Register builtin functions that can be used in a VRTProcessedDataset. + */ +void GDALVRTRegisterDefaultProcessedDatasetFuncs() +{ + GDALVRTRegisterProcessedDatasetFunc( + "BandAffineCombination", nullptr, + "<ProcessedDatasetFunctionArgumentsList>" + " <Argument name='src_nodata' type='double' " + "description='Override input nodata value'/>" + " <Argument name='dst_nodata' type='double' " + "description='Override output nodata value'/>" + " <Argument name='replacement_nodata' " + "description='value to substitute to a valid computed value that " + "would be nodata' type='double'/>" + " <Argument name='dst_intended_datatype' type='string' " + "description='Intented datatype of output (which might be " + "different than the working data type)'/>" + " <Argument name='coefficients_{band}' " + "description='Comma-separated coefficients for combining bands. " + "First one is constant term' " + "type='double_list' required='true'/>" + " <Argument name='min' description='clamp min value' type='double'/>" + " <Argument name='max' description='clamp max value' type='double'/>" + "</ProcessedDatasetFunctionArgumentsList>", + GDT_Float64, nullptr, 0, nullptr, 0, BandAffineCombinationInit, + BandAffineCombinationFree, BandAffineCombinationProcess, nullptr); + + GDALVRTRegisterProcessedDatasetFunc( + "LUT", nullptr, + "<ProcessedDatasetFunctionArgumentsList>" + " <Argument name='src_nodata' type='double' " + "description='Override input nodata value'/>" + " <Argument name='dst_nodata' type='double' " + "description='Override output nodata value'/>" + " <Argument name='lut_{band}' " + "description='List of the form [src value 1]:[dest value 1]," + "[src value 2]:[dest value 2],...' " + "type='string' required='true'/>" + "</ProcessedDatasetFunctionArgumentsList>", + GDT_Float64, nullptr, 0, nullptr, 0, LUTInit, LUTFree, LUTProcess, + nullptr); + + GDALVRTRegisterProcessedDatasetFunc( + "Dehazing", nullptr, + "<ProcessedDatasetFunctionArgumentsList>" + " <Argument name='relativeToVRT' " + "description='Whether gain and offset filenames are relative to " + "the VRT' type='boolean' default='false'/>" + " <Argument name='gain_dataset_filename_{band}' " + "description='Filename to the gain dataset' " + "type='string' required='true'/>" + " <Argument name='gain_dataset_band_{band}' " + "description='Band of the gain dataset' " + "type='integer' required='true'/>" + " <Argument name='offset_dataset_filename_{band}' " + "description='Filename to the offset dataset' " + "type='string' required='true'/>" + " <Argument name='offset_dataset_band_{band}' " + "description='Band of the offset dataset' " + "type='integer' required='true'/>" + " <Argument name='min' description='clamp min value' type='double'/>" + " <Argument name='max' description='clamp max value' type='double'/>" + " <Argument name='nodata' type='double' " + "description='Override dataset nodata value'/>" + " <Argument name='gain_nodata' type='double' " + "description='Override gain dataset nodata value'/>" + " <Argument name='offset_nodata' type='double' " + "description='Override offset dataset nodata value'/>" + "</ProcessedDatasetFunctionArgumentsList>", + GDT_Float64, nullptr, 0, nullptr, 0, DehazingInit, DehazingFree, + DehazingProcess, nullptr); + + GDALVRTRegisterProcessedDatasetFunc( + "Trimming", nullptr, + "<ProcessedDatasetFunctionArgumentsList>" + " <Argument name='relativeToVRT' " + "description='Whether trimming_dataset_filename is relative to the VRT'" + " type='boolean' default='false'/>" + " <Argument name='trimming_dataset_filename' " + "description='Filename to the trimming dataset' " + "type='string' required='true'/>" + " <Argument name='red_band' type='integer' default='1'/>" + " <Argument name='green_band' type='integer' default='2'/>" + " <Argument name='blue_band' type='integer' default='3'/>" + " <Argument name='top_rgb' " + "description='Maximum saturating RGB output value' " + "type='double' required='true'/>" + " <Argument name='tone_ceil' " + "description='Maximum threshold beyond which we give up saturation' " + "type='double' required='true'/>" + " <Argument name='top_margin' " + "description='Margin to allow for dynamics in brighest areas " + "(between 0 and 1, should be close to 0)' " + "type='double' required='true'/>" + " <Argument name='nodata' type='double' " + "description='Override dataset nodata value'/>" + " <Argument name='trimming_nodata' type='double' " + "description='Override trimming dataset nodata value'/>" + "</ProcessedDatasetFunctionArgumentsList>", + GDT_Float64, nullptr, 0, nullptr, 0, TrimmingInit, TrimmingFree, + TrimmingProcess, nullptr); +} diff --git a/frmts/vrt/vrtsources.cpp b/frmts/vrt/vrtsources.cpp index b10ce6d44123..562d5e8c77ad 100644 --- a/frmts/vrt/vrtsources.cpp +++ b/frmts/vrt/vrtsources.cpp @@ -285,11 +285,6 @@ void VRTSimpleSource::GetDstWindow(double &dfDstXOff, double &dfDstYOff, /* SerializeToXML() */ /************************************************************************/ -static const char *const apszSpecialSyntax[] = { - "NITF_IM:{ANY}:{FILENAME}", "PDF:{ANY}:{FILENAME}", - "RASTERLITE:{FILENAME},{ANY}", "TILEDB:\"{FILENAME}\":{ANY}", - "TILEDB:{FILENAME}:{ANY}"}; - static bool IsSlowSource(const char *pszSrcName) { return strstr(pszSrcName, "/vsicurl/http") != nullptr || @@ -350,11 +345,8 @@ CPLXMLNode *VRTSimpleSource::SerializeToXML(const char *pszVRTPath) } else { - for (size_t i = 0; - i < sizeof(apszSpecialSyntax) / sizeof(apszSpecialSyntax[0]); - ++i) + for (const char *pszSyntax : VRTDataset::apszSpecialSyntax) { - const char *const pszSyntax = apszSpecialSyntax[i]; CPLString osPrefix(pszSyntax); osPrefix.resize(strchr(pszSyntax, ':') - pszSyntax + 1); if (pszSyntax[osPrefix.size()] == '"') @@ -552,86 +544,8 @@ VRTSimpleSource::XMLInit(const CPLXMLNode *psSrc, const char *pszVRTPath, m_nExplicitSharedStatus = CPLTestBool(pszShared); } - if (pszVRTPath != nullptr && m_bRelativeToVRTOri) - { - // Try subdatasetinfo API first - // Note: this will become the only branch when subdatasetinfo will become - // available for NITF_IM, RASTERLITE and TILEDB - const auto oSubDSInfo{GDALGetSubdatasetInfo(pszFilename)}; - if (oSubDSInfo && !oSubDSInfo->GetPathComponent().empty()) - { - auto path{oSubDSInfo->GetPathComponent()}; - m_osSrcDSName = oSubDSInfo->ModifyPathComponent( - CPLProjectRelativeFilename(pszVRTPath, path.c_str())); - GDALDestroySubdatasetInfo(oSubDSInfo); - } - else - { - bool bDone = false; - for (size_t i = 0; - i < sizeof(apszSpecialSyntax) / sizeof(apszSpecialSyntax[0]); - ++i) - { - const char *pszSyntax = apszSpecialSyntax[i]; - CPLString osPrefix(pszSyntax); - osPrefix.resize(strchr(pszSyntax, ':') - pszSyntax + 1); - if (pszSyntax[osPrefix.size()] == '"') - osPrefix += '"'; - if (EQUALN(pszFilename, osPrefix, osPrefix.size())) - { - if (STARTS_WITH_CI(pszSyntax + osPrefix.size(), "{ANY}")) - { - const char *pszLastPart = strrchr(pszFilename, ':') + 1; - // CSV:z:/foo.xyz - if ((pszLastPart[0] == '/' || pszLastPart[0] == '\\') && - pszLastPart - pszFilename >= 3 && - pszLastPart[-3] == ':') - { - pszLastPart -= 2; - } - CPLString osPrefixFilename = pszFilename; - osPrefixFilename.resize(pszLastPart - pszFilename); - m_osSrcDSName = - osPrefixFilename + - CPLProjectRelativeFilename(pszVRTPath, pszLastPart); - bDone = true; - } - else if (STARTS_WITH_CI(pszSyntax + osPrefix.size(), - "{FILENAME}")) - { - CPLString osFilename(pszFilename + osPrefix.size()); - size_t nPos = 0; - if (osFilename.size() >= 3 && osFilename[1] == ':' && - (osFilename[2] == '\\' || osFilename[2] == '/')) - nPos = 2; - nPos = osFilename.find( - pszSyntax[osPrefix.size() + strlen("{FILENAME}")], - nPos); - if (nPos != std::string::npos) - { - const CPLString osSuffix = osFilename.substr(nPos); - osFilename.resize(nPos); - m_osSrcDSName = osPrefix + - CPLProjectRelativeFilename( - pszVRTPath, osFilename) + - osSuffix; - bDone = true; - } - } - break; - } - } - if (!bDone) - { - m_osSrcDSName = - CPLProjectRelativeFilename(pszVRTPath, pszFilename); - } - } - } - else - { - m_osSrcDSName = pszFilename; - } + m_osSrcDSName = VRTDataset::BuildSourceFilename( + pszFilename, pszVRTPath, CPL_TO_BOOL(m_bRelativeToVRTOri)); const char *pszSourceBand = CPLGetXMLValue(psSrc, "SourceBand", "1"); m_bGetMaskBand = false; diff --git a/gcore/gdal.h b/gcore/gdal.h index 313e1362722d..9599f1d2b143 100644 --- a/gcore/gdal.h +++ b/gcore/gdal.h @@ -1640,6 +1640,115 @@ CPLErr CPL_DLL CPL_STDCALL GDALAddDerivedBandPixelFuncWithArgs( const char *pszName, GDALDerivedPixelFuncWithArgs pfnPixelFunc, const char *pszMetadata); +/** Generic pointer for the working structure of VRTProcessedDataset + * function. */ +typedef void *VRTPDWorkingDataPtr; + +/** Initialization function to pass to GDALVRTRegisterProcessedDatasetFunc. + * + * This initialization function is called for each step of a VRTProcessedDataset + * that uses the related algorithm. + * The initialization function returns the output data type, output band count + * and potentially initializes a working structure, typically parsing arguments. + * + * @param pszFuncName Function name. Must be unique and not null. + * @param pUserData User data. May be nullptr. Must remain valid during the + * lifetime of GDAL. + * @param papszFunctionArgs Function arguments as a list of key=value pairs. + * @param nInBands Number of input bands. + * @param eInDT Input data type. + * @param[in,out] padfInNoData Array of nInBands values for the input nodata + * value. The init function may also override them. + * @param[in,out] pnOutBands Pointer whose value must be set to the number of + * output bands. This will be set to 0 by the caller + * when calling the function, unless this is the + * final step, in which case it will be initialized + * with the number of expected output bands. + * @param[out] peOutDT Pointer whose value must be set to the output + * data type. + * @param[in,out] ppadfOutNoData Pointer to an array of *pnOutBands values + * for the output nodata value that the + * function must set. + * For non-final steps, *ppadfOutNoData + * will be nullptr and it is the responsibility + * of the function to CPLMalloc()'ate it. + * If this is the final step, it will be + * already allocated and initialized with the + * expected nodata values from the output + * dataset (if the init function need to + * reallocate it, it must use CPLRealloc()) + * @param pszVRTPath Directory of the VRT + * @param[out] ppWorkingData Pointer whose value must be set to a working + * structure, or nullptr. + * @return CE_None in case of success, error otherwise. + * @since GDAL 3.9 */ +typedef CPLErr (*GDALVRTProcessedDatasetFuncInit)( + const char *pszFuncName, void *pUserData, CSLConstList papszFunctionArgs, + int nInBands, GDALDataType eInDT, double *padfInNoData, int *pnOutBands, + GDALDataType *peOutDT, double **ppadfOutNoData, const char *pszVRTPath, + VRTPDWorkingDataPtr *ppWorkingData); + +/** Free function to pass to GDALVRTRegisterProcessedDatasetFunc. + * + * @param pszFuncName Function name. Must be unique and not null. + * @param pUserData User data. May be nullptr. Must remain valid during the + * lifetime of GDAL. + * @param pWorkingData Value of the *ppWorkingData output parameter of + * GDALVRTProcessedDatasetFuncInit. + * @since GDAL 3.9 + */ +typedef void (*GDALVRTProcessedDatasetFuncFree)( + const char *pszFuncName, void *pUserData, VRTPDWorkingDataPtr pWorkingData); + +/** Processing function to pass to GDALVRTRegisterProcessedDatasetFunc. + * @param pszFuncName Function name. Must be unique and not null. + * @param pUserData User data. May be nullptr. Must remain valid during the + * lifetime of GDAL. + * @param pWorkingData Value of the *ppWorkingData output parameter of + * GDALVRTProcessedDatasetFuncInit. + * @param papszFunctionArgs Function arguments as a list of key=value pairs. + * @param nBufXSize Width in pixels of pInBuffer and pOutBuffer + * @param nBufYSize Height in pixels of pInBuffer and pOutBuffer + * @param pInBuffer Input buffer. It is pixel-interleaved + * (i.e. R00,G00,B00,R01,G01,B01, etc.) + * @param nInBufferSize Size in bytes of pInBuffer + * @param eInDT Data type of pInBuffer + * @param nInBands Number of bands in pInBuffer. + * @param padfInNoData Input nodata values. + * @param pOutBuffer Output buffer. It is pixel-interleaved + * (i.e. R00,G00,B00,R01,G01,B01, etc.) + * @param nOutBufferSize Size in bytes of pOutBuffer + * @param eOutDT Data type of pOutBuffer + * @param nOutBands Number of bands in pOutBuffer. + * @param padfOutNoData Input nodata values. + * @param dfSrcXOff Source X coordinate in pixel of the top-left of the region + * @param dfSrcYOff Source Y coordinate in pixel of the top-left of the region + * @param dfSrcXSize Width in pixels of the region + * @param dfSrcYSize Height in pixels of the region + * @param adfSrcGT Source geotransform + * @param pszVRTPath Directory of the VRT + * @param papszExtra Extra arguments (unused for now) + * @since GDAL 3.9 + */ +typedef CPLErr (*GDALVRTProcessedDatasetFuncProcess)( + const char *pszFuncName, void *pUserData, VRTPDWorkingDataPtr pWorkingData, + CSLConstList papszFunctionArgs, int nBufXSize, int nBufYSize, + const void *pInBuffer, size_t nInBufferSize, GDALDataType eInDT, + int nInBands, const double *padfInNoData, void *pOutBuffer, + size_t nOutBufferSize, GDALDataType eOutDT, int nOutBands, + const double *padfOutNoData, double dfSrcXOff, double dfSrcYOff, + double dfSrcXSize, double dfSrcYSize, const double adfSrcGT[/*6*/], + const char *pszVRTPath, CSLConstList papszExtra); + +CPLErr CPL_DLL GDALVRTRegisterProcessedDatasetFunc( + const char *pszFuncName, void *pUserData, const char *pszXMLMetadata, + GDALDataType eRequestedInputDT, const GDALDataType *paeSupportedInputDT, + size_t nSupportedInputDTSize, const int *panSupportedInputBandCount, + size_t nSupportedInputBandCountSize, + GDALVRTProcessedDatasetFuncInit pfnInit, + GDALVRTProcessedDatasetFuncFree pfnFree, + GDALVRTProcessedDatasetFuncProcess pfnProcess, CSLConstList papszOptions); + GDALRasterBandH CPL_DLL CPL_STDCALL GDALGetMaskBand(GDALRasterBandH hBand); int CPL_DLL CPL_STDCALL GDALGetMaskFlags(GDALRasterBandH hBand); CPLErr CPL_DLL CPL_STDCALL GDALCreateMaskBand(GDALRasterBandH hBand, From 16513c9a3646399f3779be341b59d32bc7caab48 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 2 Apr 2024 18:41:42 +0200 Subject: [PATCH 030/230] VRT: improve documentation of what LUT does --- doc/source/drivers/raster/vrt.rst | 5 +++++ doc/source/drivers/raster/vrt_processed_dataset.rst | 7 +++++++ 2 files changed, 12 insertions(+) diff --git a/doc/source/drivers/raster/vrt.rst b/doc/source/drivers/raster/vrt.rst index 6d4f26f25a31..cbd911e3d090 100644 --- a/doc/source/drivers/raster/vrt.rst +++ b/doc/source/drivers/raster/vrt.rst @@ -423,6 +423,11 @@ the following form: The intermediary values are calculated using a linear interpolation between the bounding destination values of the corresponding range. +Source values should be monotonically non-decreasing. Clamping is performed for +input pixel values outside of the range specified by the LUT. That is, if an +input pixel value is lower than the minimum source value, then the destination +value corresponding to that minimum source value is used as the output pixel value. +And similarly for an input pixel value that is greater than the maximum source value. The ComplexSource supports fetching a color component from a source raster band that has a color table. The ColorTableComponent value is the index of the diff --git a/doc/source/drivers/raster/vrt_processed_dataset.rst b/doc/source/drivers/raster/vrt_processed_dataset.rst index 76e84ecfbbaa..f46991c56644 100644 --- a/doc/source/drivers/raster/vrt_processed_dataset.rst +++ b/doc/source/drivers/raster/vrt_processed_dataset.rst @@ -253,6 +253,13 @@ The following required argument must be specified: - ``lut_{band}``: List of the form ``[src value 1]:[dest value 1],[src value 2]:[dest value 2],....``. {band} must be replaced by 1 to the number of bands. +The intermediary values are calculated using a linear interpolation +between the bounding destination values of the corresponding range. +Source values should be monotonically non-decreasing. Clamping is performed for +input pixel values outside of the range specified by the LUT. That is, if an +input pixel value is lower than the minimum source value, then the destination +value corresponding to that minimum source value is used as the output pixel value. +And similarly for an input pixel value that is greater than the maximum source value. The following optional arguments may be specified: From d29eed89a91b0e2fae0c966a8b5b60742ec465e1 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 3 Apr 2024 18:52:15 +0200 Subject: [PATCH 031/230] VRTProcessedDataset: rename Dehazing algorithm to LocalScaleOffset --- autotest/gdrivers/vrtprocesseddataset.py | 26 +++---- .../drivers/raster/vrt_processed_dataset.rst | 15 ++-- frmts/vrt/data/gdalvrt.xsd | 2 +- frmts/vrt/vrtprocesseddatasetfunctions.cpp | 69 ++++++++++--------- 4 files changed, 58 insertions(+), 54 deletions(-) diff --git a/autotest/gdrivers/vrtprocesseddataset.py b/autotest/gdrivers/vrtprocesseddataset.py index 7ebec7613d0d..03acbb2a3218 100755 --- a/autotest/gdrivers/vrtprocesseddataset.py +++ b/autotest/gdrivers/vrtprocesseddataset.py @@ -600,7 +600,7 @@ def test_vrtprocesseddataset_lut_errors(tmp_vsimem): ############################################################################### -# Test nominal case of Dehazing algorithm +# Test nominal case of LocalScaleOffset algorithm def test_vrtprocesseddataset_dehazing_nominal(tmp_vsimem): @@ -639,7 +639,7 @@ def test_vrtprocesseddataset_dehazing_nominal(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_1">{gain_filename}</Argument> <Argument name="gain_dataset_band_1">1</Argument> <Argument name="gain_dataset_filename_2">{gain_filename}</Argument> @@ -674,7 +674,7 @@ def test_vrtprocesseddataset_dehazing_nominal(tmp_vsimem): ############################################################################### -# Test nominal case of Dehazing algorithm where gain and offset have a lower +# Test nominal case of LocalScaleOffset algorithm where gain and offset have a lower # resolution than the input dataset @@ -705,7 +705,7 @@ def test_vrtprocesseddataset_dehazing_different_resolution(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_1">{gain_filename}</Argument> <Argument name="gain_dataset_band_1">1</Argument> <Argument name="offset_dataset_filename_1">{offset_filename}</Argument> @@ -732,7 +732,7 @@ def test_vrtprocesseddataset_dehazing_different_resolution(tmp_vsimem): ############################################################################### -# Test error cases of Dehazing algorithm +# Test error cases of LocalScaleOffset algorithm def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): @@ -754,7 +754,7 @@ def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_1">{src_filename}</Argument> <Argument name="gain_dataset_band_1">1</Argument> </Step> @@ -774,7 +774,7 @@ def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_2">{src_filename}</Argument> <Argument name="gain_dataset_band_1">1</Argument> <Argument name="offset_dataset_filename_1">{src_filename}</Argument> @@ -796,7 +796,7 @@ def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_1">{src_filename}</Argument> <Argument name="gain_dataset_band_2">1</Argument> <Argument name="offset_dataset_filename_1">{src_filename}</Argument> @@ -818,7 +818,7 @@ def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_1">{src_filename}</Argument> <Argument name="gain_dataset_band_1">1</Argument> <Argument name="offset_dataset_filename_2">{src_filename}</Argument> @@ -840,7 +840,7 @@ def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_1">{src_filename}</Argument> <Argument name="gain_dataset_band_1">1</Argument> <Argument name="offset_dataset_filename_1">{src_filename}</Argument> @@ -862,7 +862,7 @@ def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_1">{src_filename}</Argument> <Argument name="gain_dataset_band_1">2</Argument> <Argument name="offset_dataset_filename_1">{src_filename}</Argument> @@ -881,7 +881,7 @@ def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_1">invalid</Argument> <Argument name="gain_dataset_band_1">1</Argument> <Argument name="offset_dataset_filename_1">{src_filename}</Argument> @@ -904,7 +904,7 @@ def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): </Input> <ProcessingSteps> <Step> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="gain_dataset_filename_1">{nogt_filename}</Argument> <Argument name="gain_dataset_band_1">1</Argument> <Argument name="offset_dataset_filename_1">{nogt_filename}</Argument> diff --git a/doc/source/drivers/raster/vrt_processed_dataset.rst b/doc/source/drivers/raster/vrt_processed_dataset.rst index f46991c56644..35b4d215ecb7 100644 --- a/doc/source/drivers/raster/vrt_processed_dataset.rst +++ b/doc/source/drivers/raster/vrt_processed_dataset.rst @@ -12,8 +12,8 @@ to apply chained processing steps that may apply to several bands at the same ti The following built-in algorithms are introduced, and may typically be applied in the following order: -- Dehazing: remove haze effects by applying (subsampled) gain and offset - auxiliary datasets. +- LocalScaleOffset: apply per-pixel gain and offset coming (typically subsampled) + from auxiliary datasets. Can be used for dehazing processing. - BandAffineCombination: perform an affine transformation combination of bands. @@ -35,7 +35,7 @@ Here's an example of such a file to apply various correction to a R,G,B,NIR data <ProcessingSteps> <Step name="Dehazing"> - <Algorithm>Dehazing</Algorithm> + <Algorithm>LocalScaleOffset</Algorithm> <Argument name="relativeToVRT">true</Argument> @@ -128,14 +128,15 @@ It must also have the 2 following child elements: - ``ProcessingSteps``, with at least one child ``Step`` element. Each ``Step`` must have a ``Algorithm`` child element, and an optional ``name`` attribute. -The value of ``Algorithm`` must be a registered VRTProcessedDataset function. At time of writing, the following 4 algorithms are defined: ``Dehazing``, ``BandAffineCombination``, ``Trimming`` and ``LUT``. +The value of ``Algorithm`` must be a registered VRTProcessedDataset function. At time of writing, the following 4 algorithms are defined: ``LocalScaleOffset``, ``BandAffineCombination``, ``Trimming`` and ``LUT``. A ``Step`` will generally have one or several ``Argument`` child elements, some of them being required, others optional. Consult the documentation of each algorithm. -Dehazing algorithm ------------------- +LocalScaleOffset algorithm +-------------------------- -Remove haze effects by applying (subsampled) gain and offset auxiliary datasets. +Apply per-pixel gain and offset coming (typically subsampled) from auxiliary +datasets. Can be used for dehazing processing. The gain and offset auxiliary datasets must have a georeferencing consistent of the input dataset, but may have a different resolution. diff --git a/frmts/vrt/data/gdalvrt.xsd b/frmts/vrt/data/gdalvrt.xsd index 89ce1496a4b7..9f9a91d7be63 100644 --- a/frmts/vrt/data/gdalvrt.xsd +++ b/frmts/vrt/data/gdalvrt.xsd @@ -226,7 +226,7 @@ <xs:sequence> <xs:element name="Algorithm" type="xs:string" minOccurs="1"> <xs:annotation> - <xs:documentation>Builtin allowed names are BandAffineCombination, LUT, Dehazing, Trimming. More algorithms can be registered at run-time.</xs:documentation> + <xs:documentation>Builtin allowed names are BandAffineCombination, LUT, LocalScaleOffset, Trimming. More algorithms can be registered at run-time.</xs:documentation> </xs:annotation> </xs:element> <xs:element name="Argument" type="ArgumentType" maxOccurs="unbounded"/> diff --git a/frmts/vrt/vrtprocesseddatasetfunctions.cpp b/frmts/vrt/vrtprocesseddatasetfunctions.cpp index 9a2eefd0748a..4b89e582d477 100644 --- a/frmts/vrt/vrtprocesseddatasetfunctions.cpp +++ b/frmts/vrt/vrtprocesseddatasetfunctions.cpp @@ -621,15 +621,15 @@ LUTProcess(const char * /*pszFuncName*/, void * /*pUserData*/, } /************************************************************************/ -/* DehazingData */ +/* LocalScaleOffsetData */ /************************************************************************/ namespace { -/** Working structure for 'Dehazing' builtin function. */ -struct DehazingData +/** Working structure for 'LocalScaleOffset' builtin function. */ +struct LocalScaleOffsetData { - static constexpr const char *const EXPECTED_SIGNATURE = "Dehazing"; + static constexpr const char *const EXPECTED_SIGNATURE = "LocalScaleOffset"; //! Signature (to make sure callback functions are called with the right argument) const std::string m_osSignature = EXPECTED_SIGNATURE; @@ -683,16 +683,16 @@ static bool CheckAllBands(const std::map<int, T> &oMap, int nExpectedBandCount) } /************************************************************************/ -/* DehazingInit() */ +/* LocalScaleOffsetInit() */ /************************************************************************/ -/** Init function for 'Dehazing' builtin function. */ -static CPLErr DehazingInit(const char * /*pszFuncName*/, void * /*pUserData*/, - CSLConstList papszFunctionArgs, int nInBands, - GDALDataType eInDT, double *padfInNoData, - int *pnOutBands, GDALDataType *peOutDT, - double **ppadfOutNoData, const char *pszVRTPath, - VRTPDWorkingDataPtr *ppWorkingData) +/** Init function for 'LocalScaleOffset' builtin function. */ +static CPLErr +LocalScaleOffsetInit(const char * /*pszFuncName*/, void * /*pUserData*/, + CSLConstList papszFunctionArgs, int nInBands, + GDALDataType eInDT, double *padfInNoData, int *pnOutBands, + GDALDataType *peOutDT, double **ppadfOutNoData, + const char *pszVRTPath, VRTPDWorkingDataPtr *ppWorkingData) { CPLAssert(eInDT == GDT_Float64); @@ -705,7 +705,7 @@ static CPLErr DehazingInit(const char * /*pszFuncName*/, void * /*pUserData*/, *pnOutBands = nInBands; } - auto data = std::make_unique<DehazingData>(); + auto data = std::make_unique<LocalScaleOffsetData>(); bool bNodataSpecified = false; double dfNoData = std::numeric_limits<double>::quiet_NaN(); @@ -901,15 +901,17 @@ static CPLErr DehazingInit(const char * /*pszFuncName*/, void * /*pUserData*/, } /************************************************************************/ -/* DehazingFree() */ +/* LocalScaleOffsetFree() */ /************************************************************************/ -/** Free function for 'Dehazing' builtin function. */ -static void DehazingFree(const char * /*pszFuncName*/, void * /*pUserData*/, - VRTPDWorkingDataPtr pWorkingData) +/** Free function for 'LocalScaleOffset' builtin function. */ +static void LocalScaleOffsetFree(const char * /*pszFuncName*/, + void * /*pUserData*/, + VRTPDWorkingDataPtr pWorkingData) { - DehazingData *data = static_cast<DehazingData *>(pWorkingData); - CPLAssert(data->m_osSignature == DehazingData::EXPECTED_SIGNATURE); + LocalScaleOffsetData *data = + static_cast<LocalScaleOffsetData *>(pWorkingData); + CPLAssert(data->m_osSignature == LocalScaleOffsetData::EXPECTED_SIGNATURE); CPL_IGNORE_RET_VAL(data->m_osSignature); delete data; } @@ -988,11 +990,11 @@ static bool LoadAuxData(double dfULX, double dfULY, double dfLRX, double dfLRY, } /************************************************************************/ -/* DehazingProcess() */ +/* LocalScaleOffsetProcess() */ /************************************************************************/ -/** Processing function for 'Dehazing' builtin function. */ -static CPLErr DehazingProcess( +/** Processing function for 'LocalScaleOffset' builtin function. */ +static CPLErr LocalScaleOffsetProcess( const char * /*pszFuncName*/, void * /*pUserData*/, VRTPDWorkingDataPtr pWorkingData, CSLConstList /* papszFunctionArgs*/, int nBufXSize, int nBufYSize, const void *pInBuffer, size_t nInBufferSize, @@ -1016,8 +1018,9 @@ static CPLErr DehazingProcess( CPLAssert(nInBands == nOutBands); CPL_IGNORE_RET_VAL(nOutBands); - DehazingData *data = static_cast<DehazingData *>(pWorkingData); - CPLAssert(data->m_osSignature == DehazingData::EXPECTED_SIGNATURE); + LocalScaleOffsetData *data = + static_cast<LocalScaleOffsetData *>(pWorkingData); + CPLAssert(data->m_osSignature == LocalScaleOffsetData::EXPECTED_SIGNATURE); const double *CPL_RESTRICT padfSrc = static_cast<const double *>(pInBuffer); double *CPL_RESTRICT padfDst = static_cast<double *>(pOutBuffer); @@ -1076,13 +1079,13 @@ static CPLErr DehazingProcess( } else { - double dfDehazed = dfSrcVal * dfGain - dfOffset; - if (dfDehazed < dfClampMin) - dfDehazed = dfClampMin; - if (dfDehazed > dfClampMax) - dfDehazed = dfClampMax; + double dfUnscaled = dfSrcVal * dfGain - dfOffset; + if (dfUnscaled < dfClampMin) + dfUnscaled = dfClampMin; + if (dfUnscaled > dfClampMax) + dfUnscaled = dfClampMax; - *padfDstThisBand = dfDehazed; + *padfDstThisBand = dfUnscaled; } } padfSrcThisBand += nInBands; @@ -1518,7 +1521,7 @@ void GDALVRTRegisterDefaultProcessedDatasetFuncs() nullptr); GDALVRTRegisterProcessedDatasetFunc( - "Dehazing", nullptr, + "LocalScaleOffset", nullptr, "<ProcessedDatasetFunctionArgumentsList>" " <Argument name='relativeToVRT' " "description='Whether gain and offset filenames are relative to " @@ -1544,8 +1547,8 @@ void GDALVRTRegisterDefaultProcessedDatasetFuncs() " <Argument name='offset_nodata' type='double' " "description='Override offset dataset nodata value'/>" "</ProcessedDatasetFunctionArgumentsList>", - GDT_Float64, nullptr, 0, nullptr, 0, DehazingInit, DehazingFree, - DehazingProcess, nullptr); + GDT_Float64, nullptr, 0, nullptr, 0, LocalScaleOffsetInit, + LocalScaleOffsetFree, LocalScaleOffsetProcess, nullptr); GDALVRTRegisterProcessedDatasetFunc( "Trimming", nullptr, From 0e67eeafc6f26c06b0c56188db95cd8fc77e6c69 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 5 Apr 2024 15:10:53 +0200 Subject: [PATCH 032/230] vrtprocesseddatasetfunctions.cpp: relax bound checking in LoadAuxData() --- frmts/vrt/vrtprocesseddatasetfunctions.cpp | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/frmts/vrt/vrtprocesseddatasetfunctions.cpp b/frmts/vrt/vrtprocesseddatasetfunctions.cpp index 4b89e582d477..a34aca2919b9 100644 --- a/frmts/vrt/vrtprocesseddatasetfunctions.cpp +++ b/frmts/vrt/vrtprocesseddatasetfunctions.cpp @@ -947,11 +947,19 @@ static bool LoadAuxData(double dfULX, double dfULY, double dfLRX, double dfLRY, "Unexpected computed %s pixel/line", pszAuxType); return false; } - if (dfULPixel < -1 || dfLRPixel > poAuxBand->GetXSize() || dfULLine < -1 || - dfLRLine > poAuxBand->GetYSize()) + if (dfULPixel < -1 || dfULLine < -1) { CPLError(CE_Failure, CPLE_AppDefined, - "Unexpected computed %s pixel/line", pszAuxType); + "Unexpected computed %s upper left (pixel,line)=(%f,%f)", + pszAuxType, dfULPixel, dfULLine); + return false; + } + if (dfLRPixel > poAuxBand->GetXSize() + 1 || + dfLRLine > poAuxBand->GetYSize() + 1) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Unexpected computed %s lower right (pixel,line)=(%f,%f)", + pszAuxType, dfLRPixel, dfLRLine); return false; } From 174c886a340f7a95a08eb8d8dc6c13515d32dd7f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 17 Apr 2024 23:52:28 +0200 Subject: [PATCH 033/230] vrtprocesseddataset.py: use numpy --- autotest/gdrivers/vrtprocesseddataset.py | 251 +++++++++-------------- 1 file changed, 94 insertions(+), 157 deletions(-) diff --git a/autotest/gdrivers/vrtprocesseddataset.py b/autotest/gdrivers/vrtprocesseddataset.py index 03acbb2a3218..1c353d170732 100755 --- a/autotest/gdrivers/vrtprocesseddataset.py +++ b/autotest/gdrivers/vrtprocesseddataset.py @@ -1,7 +1,5 @@ #!/usr/bin/env pytest ############################################################################### -# $Id$ -# # Project: GDAL/OGR Test Suite # Purpose: Test VRTProcessedDataset support. # Author: Even Rouault <even.rouault at spatialys.com> @@ -28,13 +26,14 @@ # DEALINGS IN THE SOFTWARE. ############################################################################### -import struct - import gdaltest import pytest from osgeo import gdal +np = pytest.importorskip("numpy") +pytest.importorskip("osgeo.gdal_array") + ############################################################################### # Test error cases in general VRTProcessedDataset XML structure @@ -139,9 +138,9 @@ def test_vrtprocesseddataset_affine_combination_nominal(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 3) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x03") - src_ds.GetRasterBand(2).WriteRaster(0, 0, 2, 1, b"\x02\x06") - src_ds.GetRasterBand(3).WriteRaster(0, 0, 2, 1, b"\x03\x03") + src_ds.GetRasterBand(1).WriteArray(np.array([[1, 3]])) + src_ds.GetRasterBand(2).WriteArray(np.array([[2, 6]])) + src_ds.GetRasterBand(3).WriteArray(np.array([[3, 3]])) src_ds.Close() ds = gdal.Open( @@ -168,9 +167,11 @@ def test_vrtprocesseddataset_affine_combination_nominal(tmp_vsimem): assert ds.GetSpatialRef() is None assert ds.GetGeoTransform(can_return_null=True) is None assert ds.GetRasterBand(1).DataType == gdal.GDT_Byte - assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (15, 10 + 6) - assert struct.unpack("B" * 2, ds.GetRasterBand(2).ReadRaster()) == (20 + 3, 20 + 3) - assert struct.unpack("B" * 2, ds.GetRasterBand(3).ReadRaster()) == (30 + 1, 32) + np.testing.assert_equal(ds.GetRasterBand(1).ReadAsArray(), np.array([[15, 10 + 6]])) + np.testing.assert_equal( + ds.GetRasterBand(2).ReadAsArray(), np.array([[20 + 3, 20 + 3]]) + ) + np.testing.assert_equal(ds.GetRasterBand(3).ReadAsArray(), np.array([[30 + 1, 32]])) ############################################################################### @@ -233,9 +234,9 @@ def test_vrtprocesseddataset_affine_combination_nodata(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 2) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x02") + src_ds.GetRasterBand(1).WriteArray(np.array([[1, 2]])) src_ds.GetRasterBand(1).SetNoDataValue(1) - src_ds.GetRasterBand(2).WriteRaster(0, 0, 2, 1, b"\x03\x03") + src_ds.GetRasterBand(2).WriteArray(np.array([[3, 3]])) src_ds.GetRasterBand(2).SetNoDataValue(1) src_ds.Close() @@ -255,17 +256,17 @@ def test_vrtprocesseddataset_affine_combination_nodata(tmp_vsimem): """ ) assert ds.GetRasterBand(1).DataType == gdal.GDT_Byte - assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (1, 5) + np.testing.assert_equal(ds.GetRasterBand(1).ReadAsArray(), np.array([[1, 5]])) # 0 should actually be 3-2=1, but this is the nodata value hence the replacement value - assert struct.unpack("B" * 2, ds.GetRasterBand(2).ReadRaster()) == (1, 0) + np.testing.assert_equal(ds.GetRasterBand(2).ReadAsArray(), np.array([[1, 0]])) def test_vrtprocesseddataset_affine_combination_nodata_as_parameter(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 2) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x02") - src_ds.GetRasterBand(2).WriteRaster(0, 0, 2, 1, b"\x03\x03") + src_ds.GetRasterBand(1).WriteArray(np.array([[1, 2]])) + src_ds.GetRasterBand(2).WriteArray(np.array([[3, 3]])) src_ds.Close() ds = gdal.Open( @@ -287,9 +288,9 @@ def test_vrtprocesseddataset_affine_combination_nodata_as_parameter(tmp_vsimem): """ ) assert ds.GetRasterBand(1).DataType == gdal.GDT_Byte - assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (255, 5) + np.testing.assert_equal(ds.GetRasterBand(1).ReadAsArray(), np.array([[255, 5]])) # 254 should actually be 256+1*2+(-1)*3=255, but this is the nodata value hence the replacement value - assert struct.unpack("B" * 2, ds.GetRasterBand(2).ReadRaster()) == (255, 254) + np.testing.assert_equal(ds.GetRasterBand(2).ReadAsArray(), np.array([[255, 254]])) ############################################################################### @@ -300,8 +301,8 @@ def test_vrtprocesseddataset_affine_combination_replacement_nodata(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 2) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x02") - src_ds.GetRasterBand(2).WriteRaster(0, 0, 2, 1, b"\x03\x03") + src_ds.GetRasterBand(1).WriteArray(np.array([[1, 2]])) + src_ds.GetRasterBand(2).WriteArray(np.array([[3, 3]])) src_ds.Close() ds = gdal.Open( @@ -323,9 +324,9 @@ def test_vrtprocesseddataset_affine_combination_replacement_nodata(tmp_vsimem): """ ) assert ds.GetRasterBand(1).DataType == gdal.GDT_Byte - assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (255, 5) + np.testing.assert_equal(ds.GetRasterBand(1).ReadAsArray(), np.array([[255, 5]])) # 254 should actually be 256+1*2+(-1)*3=255, but this is the nodata value hence the replacement value - assert struct.unpack("B" * 2, ds.GetRasterBand(2).ReadRaster()) == (255, 128) + np.testing.assert_equal(ds.GetRasterBand(2).ReadAsArray(), np.array([[255, 128]])) ############################################################################### @@ -423,8 +424,8 @@ def test_vrtprocesseddataset_lut_nominal(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 3, 1, 2) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") - src_ds.GetRasterBand(2).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + src_ds.GetRasterBand(1).WriteArray(np.array([[1, 2, 3]])) + src_ds.GetRasterBand(2).WriteArray(np.array([[1, 2, 3]])) src_ds.Close() ds = gdal.Open( @@ -442,8 +443,10 @@ def test_vrtprocesseddataset_lut_nominal(tmp_vsimem): </VRTDataset> """ ) - assert struct.unpack("B" * 3, ds.GetRasterBand(1).ReadRaster()) == (10, 15, 20) - assert struct.unpack("B" * 3, ds.GetRasterBand(2).ReadRaster()) == (100, 150, 200) + np.testing.assert_equal(ds.GetRasterBand(1).ReadAsArray(), np.array([[10, 15, 20]])) + np.testing.assert_equal( + ds.GetRasterBand(2).ReadAsArray(), np.array([[100, 150, 200]]) + ) ############################################################################### @@ -454,9 +457,9 @@ def test_vrtprocesseddataset_lut_nodata(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 4, 1, 2) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 4, 1, b"\x00\x01\x02\x03") + src_ds.GetRasterBand(1).WriteArray(np.array([[0, 1, 2, 3]])) src_ds.GetRasterBand(1).SetNoDataValue(0) - src_ds.GetRasterBand(2).WriteRaster(0, 0, 4, 1, b"\x00\x01\x02\x03") + src_ds.GetRasterBand(2).WriteArray(np.array([[0, 1, 2, 3]])) src_ds.GetRasterBand(2).SetNoDataValue(0) src_ds.Close() @@ -475,12 +478,11 @@ def test_vrtprocesseddataset_lut_nodata(tmp_vsimem): </VRTDataset> """ ) - assert struct.unpack("B" * 4, ds.GetRasterBand(1).ReadRaster()) == (0, 10, 15, 20) - assert struct.unpack("B" * 4, ds.GetRasterBand(2).ReadRaster()) == ( - 0, - 100, - 150, - 200, + np.testing.assert_equal( + ds.GetRasterBand(1).ReadAsArray(), np.array([[0, 10, 15, 20]]) + ) + np.testing.assert_equal( + ds.GetRasterBand(2).ReadAsArray(), np.array([[0, 100, 150, 200]]) ) @@ -492,8 +494,8 @@ def test_vrtprocesseddataset_lut_nodata_as_parameter(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 4, 1, 2) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 4, 1, b"\x00\x01\x02\x03") - src_ds.GetRasterBand(2).WriteRaster(0, 0, 4, 1, b"\x00\x01\x02\x03") + src_ds.GetRasterBand(1).WriteArray(np.array([[0, 1, 2, 3]])) + src_ds.GetRasterBand(2).WriteArray(np.array([[0, 1, 2, 3]])) src_ds.Close() ds = gdal.Open( @@ -513,12 +515,11 @@ def test_vrtprocesseddataset_lut_nodata_as_parameter(tmp_vsimem): </VRTDataset> """ ) - assert struct.unpack("B" * 4, ds.GetRasterBand(1).ReadRaster()) == (1, 10, 15, 20) - assert struct.unpack("B" * 4, ds.GetRasterBand(2).ReadRaster()) == ( - 1, - 100, - 150, - 200, + np.testing.assert_equal( + ds.GetRasterBand(1).ReadAsArray(), np.array([[1, 10, 15, 20]]) + ) + np.testing.assert_equal( + ds.GetRasterBand(2).ReadAsArray(), np.array([[1, 100, 150, 200]]) ) @@ -530,8 +531,8 @@ def test_vrtprocesseddataset_lut_errors(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 3, 1, 2) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") - src_ds.GetRasterBand(2).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + src_ds.GetRasterBand(1).WriteArray(np.array([[1, 2, 3]])) + src_ds.GetRasterBand(2).WriteArray(np.array([[1, 2, 3]])) src_ds.Close() with pytest.raises(Exception, match="Step 'nr 1' lacks required Argument"): @@ -607,8 +608,8 @@ def test_vrtprocesseddataset_dehazing_nominal(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 6, 1, 2) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 6, 1, b"\x01\x02\x03\xff\x01\x01") - src_ds.GetRasterBand(2).WriteRaster(0, 0, 6, 1, b"\x01\x02\x03\xff\x01\x01") + src_ds.GetRasterBand(1).WriteArray(np.array([[1, 2, 3, 255, 1, 1]])) + src_ds.GetRasterBand(2).WriteArray(np.array([[1, 2, 3, 255, 1, 1]])) src_ds.GetRasterBand(1).SetNoDataValue(255) src_ds.GetRasterBand(2).SetNoDataValue(255) src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) @@ -616,8 +617,8 @@ def test_vrtprocesseddataset_dehazing_nominal(tmp_vsimem): gain_filename = str(tmp_vsimem / "gain.tif") gain_ds = gdal.GetDriverByName("GTiff").Create(gain_filename, 6, 1, 2) - gain_ds.GetRasterBand(1).WriteRaster(0, 0, 6, 1, b"\x02\x04\x06\x01\xfe\x01") - gain_ds.GetRasterBand(2).WriteRaster(0, 0, 6, 1, b"\x03\x05\x07\x01\xfe\x01") + gain_ds.GetRasterBand(1).WriteArray(np.array([[2, 4, 6, 1, 254, 1]])) + gain_ds.GetRasterBand(2).WriteArray(np.array([[3, 5, 7, 1, 254, 1]])) gain_ds.GetRasterBand(1).SetNoDataValue(254) gain_ds.GetRasterBand(2).SetNoDataValue(254) gain_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) @@ -625,8 +626,8 @@ def test_vrtprocesseddataset_dehazing_nominal(tmp_vsimem): offset_filename = str(tmp_vsimem / "offset.tif") offset_ds = gdal.GetDriverByName("GTiff").Create(offset_filename, 6, 1, 2) - offset_ds.GetRasterBand(1).WriteRaster(0, 0, 6, 1, b"\x01\x02\x03\x01\x01\xfd") - offset_ds.GetRasterBand(2).WriteRaster(0, 0, 6, 1, b"\x02\x03\x04\x01\x01\xfd") + offset_ds.GetRasterBand(1).WriteArray(np.array([[1, 2, 3, 1, 1, 253]])) + offset_ds.GetRasterBand(2).WriteArray(np.array([[2, 3, 4, 1, 1, 253]])) offset_ds.GetRasterBand(1).SetNoDataValue(253) offset_ds.GetRasterBand(2).SetNoDataValue(253) offset_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) @@ -655,21 +656,11 @@ def test_vrtprocesseddataset_dehazing_nominal(tmp_vsimem): </VRTDataset> """ ) - assert struct.unpack("B" * 6, ds.GetRasterBand(1).ReadRaster()) == ( - 2, - 6, - 15, - 255, - 255, - 255, + np.testing.assert_equal( + ds.GetRasterBand(1).ReadAsArray(), np.array([[2, 6, 15, 255, 255, 255]]) ) - assert struct.unpack("B" * 6, ds.GetRasterBand(2).ReadRaster()) == ( - 2, - 7, - 16, - 255, - 255, - 255, + np.testing.assert_equal( + ds.GetRasterBand(2).ReadAsArray(), np.array([[2, 7, 16, 255, 255, 255]]) ) @@ -682,19 +673,21 @@ def test_vrtprocesseddataset_dehazing_different_resolution(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 6, 2, 1) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 6, 2, b"\x01\x01\x02\x02\x03\x03" * 2) + src_ds.GetRasterBand(1).WriteArray( + np.array([[1, 1, 2, 2, 3, 3], [1, 1, 2, 2, 3, 3]]) + ) src_ds.SetGeoTransform([0, 0.5, 0, 0, 0, 0.5]) src_ds.Close() gain_filename = str(tmp_vsimem / "gain.tif") gain_ds = gdal.GetDriverByName("GTiff").Create(gain_filename, 3, 1, 1) - gain_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x02\x04\x06") + gain_ds.GetRasterBand(1).WriteArray(np.array([[2, 4, 6]])) gain_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) gain_ds.Close() offset_filename = str(tmp_vsimem / "offset.tif") offset_ds = gdal.GetDriverByName("GTiff").Create(offset_filename, 3, 1, 1) - offset_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + offset_ds.GetRasterBand(1).WriteArray(np.array([[1, 2, 3]])) offset_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) offset_ds.Close() @@ -715,19 +708,9 @@ def test_vrtprocesseddataset_dehazing_different_resolution(tmp_vsimem): </VRTDataset> """ ) - assert struct.unpack("B" * 12, ds.GetRasterBand(1).ReadRaster()) == ( - 1, - 2, - 6, - 8, - 15, - 15, - 1, - 2, - 6, - 8, - 15, - 15, + np.testing.assert_equal( + ds.GetRasterBand(1).ReadAsArray(), + np.array([[1, 2, 6, 8, 15, 15], [1, 2, 6, 8, 15, 15]]), ) @@ -739,7 +722,7 @@ def test_vrtprocesseddataset_dehazing_error(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 3, 1, 1) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 3, 1, b"\x01\x02\x03") + src_ds.GetRasterBand(1).WriteArray(np.array([[1, 2, 3]])) src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) src_ds.Close() @@ -930,18 +913,10 @@ def test_vrtprocesseddataset_trimming_nominal(tmp_vsimem): B = 200.0 NIR = 100.0 - src_ds.GetRasterBand(1).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, int(R), 150, 200, 0, 0, 0) - ) - src_ds.GetRasterBand(2).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, int(G), 200, 100, 0, 0, 0) - ) - src_ds.GetRasterBand(3).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, int(B), 100, 150, 0, 0, 0) - ) - src_ds.GetRasterBand(4).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, int(NIR), 150, 200, 0, 0, 0) - ) + src_ds.GetRasterBand(1).WriteArray(np.array([[int(R), 150, 200, 0, 0, 0]])) + src_ds.GetRasterBand(2).WriteArray(np.array([[int(G), 200, 100, 0, 0, 0]])) + src_ds.GetRasterBand(3).WriteArray(np.array([[int(B), 100, 150, 0, 0, 0]])) + src_ds.GetRasterBand(4).WriteArray(np.array([[int(NIR), 150, 200, 0, 0, 0]])) src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) src_ds.Close() @@ -950,8 +925,8 @@ def test_vrtprocesseddataset_trimming_nominal(tmp_vsimem): localMaxRGB = 205.0 - trimming_ds.GetRasterBand(1).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, int(localMaxRGB), 210, 220, 0, 0, 0) + trimming_ds.GetRasterBand(1).WriteArray( + np.array([[int(localMaxRGB), 210, 220, 0, 0, 0]]) ) trimming_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) trimming_ds.Close() @@ -998,54 +973,26 @@ def test_vrtprocesseddataset_trimming_nominal(tmp_vsimem): # print(outputR, outputG, outputB, outputNIR) - assert ( - round(outputR) - == struct.unpack("B", ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1))[0] - ) - assert ( - round(outputG) - == struct.unpack("B", ds.GetRasterBand(2).ReadRaster(0, 0, 1, 1))[0] - ) - assert ( - round(outputB) - == struct.unpack("B", ds.GetRasterBand(3).ReadRaster(0, 0, 1, 1))[0] - ) - assert ( - round(outputNIR) - == struct.unpack("B", ds.GetRasterBand(4).ReadRaster(0, 0, 1, 1))[0] - ) + assert round(outputR) == ds.GetRasterBand(1).ReadAsArray(0, 0, 1, 1)[0][0] + assert round(outputG) == ds.GetRasterBand(2).ReadAsArray(0, 0, 1, 1)[0][0] + assert round(outputB) == ds.GetRasterBand(3).ReadAsArray(0, 0, 1, 1)[0][0] + assert round(outputNIR) == ds.GetRasterBand(4).ReadAsArray(0, 0, 1, 1)[0][0] - assert struct.unpack("B" * 6, ds.GetRasterBand(1).ReadRaster()) == ( - 92, # round(outputR) - 135, - 164, - 0, - 0, - 0, + np.testing.assert_equal( + ds.GetRasterBand(1).ReadAsArray(), + np.array([[92, 135, 164, 0, 0, 0]]), # round(outputR) ) - assert struct.unpack("B" * 6, ds.GetRasterBand(2).ReadRaster()) == ( - 139, # round(outputG) - 171, - 86, - 0, - 0, - 0, + np.testing.assert_equal( + ds.GetRasterBand(2).ReadAsArray(), + np.array([[139, 171, 86, 0, 0, 0]]), # round(outputG) ) - assert struct.unpack("B" * 6, ds.GetRasterBand(3).ReadRaster()) == ( - 176, # round(outputB) - 90, - 129, - 0, - 0, - 0, + np.testing.assert_equal( + ds.GetRasterBand(3).ReadAsArray(), + np.array([[176, 90, 129, 0, 0, 0]]), # round(outputB) ) - assert struct.unpack("B" * 6, ds.GetRasterBand(4).ReadRaster()) == ( - 88, # round(outputNIR) - 129, - 164, - 0, - 0, - 0, + np.testing.assert_equal( + ds.GetRasterBand(4).ReadAsArray(), + np.array([[88, 129, 164, 0, 0, 0]]), # round(outputNIR) ) @@ -1057,26 +1004,16 @@ def test_vrtprocesseddataset_trimming_errors(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 6, 1, 4) - src_ds.GetRasterBand(1).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, 100, 150, 200, 0, 0, 0) - ) - src_ds.GetRasterBand(2).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, 150, 200, 100, 0, 0, 0) - ) - src_ds.GetRasterBand(3).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, 200, 100, 150, 0, 0, 0) - ) - src_ds.GetRasterBand(4).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, 100, 150, 200, 0, 0, 0) - ) + src_ds.GetRasterBand(1).WriteArray(np.array([[100, 150, 200, 0, 0, 0]])) + src_ds.GetRasterBand(2).WriteArray(np.array([[150, 200, 100, 0, 0, 0]])) + src_ds.GetRasterBand(3).WriteArray(np.array([[200, 100, 150, 0, 0, 0]])) + src_ds.GetRasterBand(4).WriteArray(np.array([[100, 150, 200, 0, 0, 0]])) src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) src_ds.Close() trimming_filename = str(tmp_vsimem / "trimming.tif") trimming_ds = gdal.GetDriverByName("GTiff").Create(trimming_filename, 6, 1, 1) - trimming_ds.GetRasterBand(1).WriteRaster( - 0, 0, 6, 1, struct.pack("B" * 6, 200, 210, 220, 0, 0, 0) - ) + trimming_ds.GetRasterBand(1).WriteArray(np.array([[200, 210, 220, 0, 0, 0]])) trimming_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) trimming_ds.Close() @@ -1224,7 +1161,7 @@ def test_vrtprocesseddataset_serialize(tmp_vsimem): src_filename = str(tmp_vsimem / "src.tif") src_ds = gdal.GetDriverByName("GTiff").Create(src_filename, 2, 1, 1) - src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 1, b"\x01\x02") + src_ds.GetRasterBand(1).WriteArray(np.array([[1, 2]])) src_ds.Close() vrt_filename = str(tmp_vsimem / "the.vrt") @@ -1243,13 +1180,13 @@ def test_vrtprocesseddataset_serialize(tmp_vsimem): """ with gdaltest.tempfile(vrt_filename, content): ds = gdal.Open(vrt_filename) - assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (11, 12) + np.testing.assert_equal(ds.GetRasterBand(1).ReadAsArray(), np.array([[11, 12]])) assert ds.GetRasterBand(1).GetStatistics(False, False) == [0.0, 0.0, 0.0, -1.0] ds.GetRasterBand(1).ComputeStatistics(False) ds.Close() ds = gdal.Open(vrt_filename) - assert struct.unpack("B" * 2, ds.GetRasterBand(1).ReadRaster()) == (11, 12) + np.testing.assert_equal(ds.GetRasterBand(1).ReadAsArray(), np.array([[11, 12]])) assert ds.GetRasterBand(1).GetStatistics(False, False) == [ 11.0, 12.0, From fa8b568722067e66d3613c487407b5dcc2b482e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20=C5=BDdila?= <m.zdila@gmail.com> Date: Thu, 18 Apr 2024 00:47:48 +0200 Subject: [PATCH 034/230] gdal2tiles: added support for JPEG output Fixes #6703 --- autotest/pyscripts/test_gdal2tiles.py | 141 ++++++++++++++++++ doc/source/programs/gdal2tiles.rst | 19 ++- .../gdal-utils/osgeo_utils/gdal2tiles.py | 116 +++++++++++++- 3 files changed, 266 insertions(+), 10 deletions(-) diff --git a/autotest/pyscripts/test_gdal2tiles.py b/autotest/pyscripts/test_gdal2tiles.py index 0b24b8306679..05c1a81f028a 100755 --- a/autotest/pyscripts/test_gdal2tiles.py +++ b/autotest/pyscripts/test_gdal2tiles.py @@ -623,3 +623,144 @@ def test_gdal2tiles_excluded_values(script_path, tmp_path): (12 + 22 + 42) // 3, 255, ) + + +@pytest.mark.require_driver("JPEG") +@pytest.mark.parametrize( + "resampling, expected_stats_z0, expected_stats_z1", + ( + ( + "average", + [ + [0.0, 255.0, 62.789886474609375, 71.57543623020909], + [0.0, 255.0, 62.98188781738281, 70.54545410356597], + [0.0, 255.0, 77.94142150878906, 56.07427114858068], + ], + [ + [0.0, 255.0, 63.620819091796875, 68.38688881060699], + [0.0, 255.0, 63.620819091796875, 68.38688881060699], + [0.0, 255.0, 87.09403991699219, 53.07665243601322], + ], + ), + ( + "antialias", + [ + [0.0, 255.0, 62.66636657714844, 71.70766144632985], + [0.0, 255.0, 62.91070556640625, 70.705889259777], + [0.0, 255.0, 77.78370666503906, 56.251290816620596], + ], + [ + [0.0, 255.0, 63.61163330078125, 68.49625328462534], + [0.0, 255.0, 63.61163330078125, 68.49625328462534], + [0.0, 255.0, 87.04747009277344, 53.1751939061486], + ], + ), + ), +) +def test_gdal2tiles_py_jpeg_3band_input( + script_path, tmp_path, resampling, expected_stats_z0, expected_stats_z1 +): + + if resampling == "antialias" and not pil_available(): + pytest.skip("'antialias' resampling is not available") + + out_dir_jpeg = str(tmp_path / "out_gdal2tiles_smallworld_jpeg") + + test_py_scripts.run_py_script_as_external_script( + script_path, + "gdal2tiles", + "-q -z 0-1 -r " + + resampling + + " --tiledriver=JPEG " + + test_py_scripts.get_data_path("gdrivers") + + f"small_world.tif {out_dir_jpeg}", + ) + + ds = gdal.Open(f"{out_dir_jpeg}/0/0/0.jpg") + got_stats_0 = [ + ds.GetRasterBand(i + 1).ComputeStatistics(approx_ok=0) + for i in range(ds.RasterCount) + ] + + ds = gdal.Open(f"{out_dir_jpeg}/1/0/0.jpg") + got_stats_1 = [ + ds.GetRasterBand(i + 1).ComputeStatistics(approx_ok=0) + for i in range(ds.RasterCount) + ] + + for i in range(ds.RasterCount): + assert got_stats_0[i] == pytest.approx(expected_stats_z0[i], rel=0.05), ( + i, + got_stats_0, + got_stats_1, + ) + + for i in range(ds.RasterCount): + assert got_stats_1[i] == pytest.approx(expected_stats_z1[i], rel=0.05), ( + i, + got_stats_0, + got_stats_1, + ) + + +@pytest.mark.require_driver("JPEG") +@pytest.mark.parametrize( + "resampling, expected_stats_z14, expected_stats_z13", + ( + ( + ( + "average", + [[0.0, 255.0, 44.11726379394531, 61.766206763153946]], + [[0.0, 255.0, 11.057342529296875, 36.182401045647644]], + ), + ( + "antialias", + [[0.0, 255.0, 43.9254150390625, 61.58666064861184]], + [[0.0, 255.0, 11.013427734375, 36.12022842174338]], + ), + ) + ), +) +def test_gdal2tiles_py_jpeg_1band_input( + script_path, tmp_path, resampling, expected_stats_z14, expected_stats_z13 +): + + if resampling == "antialias" and not pil_available(): + pytest.skip("'antialias' resampling is not available") + + out_dir_jpeg = str(tmp_path / "out_gdal2tiles_byte_jpeg") + + test_py_scripts.run_py_script_as_external_script( + script_path, + "gdal2tiles", + "-q -z 13-14 -r " + + resampling + + " --tiledriver=JPEG " + + test_py_scripts.get_data_path("gcore") + + f"byte.tif {out_dir_jpeg}", + ) + + ds = gdal.Open(f"{out_dir_jpeg}/14/2838/9833.jpg") + got_stats_14 = [ + ds.GetRasterBand(i + 1).ComputeStatistics(approx_ok=0) + for i in range(ds.RasterCount) + ] + + ds = gdal.Open(f"{out_dir_jpeg}/13/1419/4916.jpg") + got_stats_13 = [ + ds.GetRasterBand(i + 1).ComputeStatistics(approx_ok=0) + for i in range(ds.RasterCount) + ] + + for i in range(ds.RasterCount): + assert got_stats_14[i] == pytest.approx(expected_stats_z14[i], rel=0.05), ( + i, + got_stats_14, + got_stats_13, + ) + for i in range(ds.RasterCount): + assert got_stats_13[i] == pytest.approx(expected_stats_z13[i], rel=0.05), ( + i, + got_stats_14, + got_stats_13, + ) diff --git a/doc/source/programs/gdal2tiles.rst b/doc/source/programs/gdal2tiles.rst index 79ca7e41e8f6..f46116f11271 100644 --- a/doc/source/programs/gdal2tiles.rst +++ b/doc/source/programs/gdal2tiles.rst @@ -21,7 +21,7 @@ Synopsis [-e] [-a nodata] [-v] [-q] [-h] [-k] [-n] [-u <url>] [-w <webviewer>] [-t <title>] [-c <copyright>] [--processes=<NB_PROCESSES>] [--mpi] [--xyz] - [--tilesize=<PIXELS>] [--tmscompatible] + [--tilesize=<PIXELS>] --tiledriver=<DRIVER> [--tmscompatible] [--excluded-values=<EXCLUDED_VALUES>] [--excluded-values-pct-threshold=<EXCLUDED_VALUES_PCT_THRESHOLD>] [-g <googlekey] [-b <bingkey>] <input_file> [<output_dir>] [<COMMON_OPTIONS>] @@ -139,8 +139,8 @@ can publish a picture without proper georeferencing too. .. option:: --tiledriver=<DRIVER> Which output driver to use for the tiles, determines the file format of the tiles. - Currently PNG and WEBP are supported. Default is PNG. - Additional configuration for the WEBP driver are documented below. + Currently PNG, WEBP and JPEG (JPEG added in GDAL 3.9) are supported. Default is PNG. + Additional configuration for the WEBP and JPEG drivers are documented below. .. versionadded:: 3.6 @@ -266,6 +266,19 @@ The following configuration options are available to further customize the webp GDAL :ref:`WEBP driver <raster.webp>` documentation can be consulted +JPEG options ++++++++++++++ + +JPEG tiledriver support is new to GDAL 3.9. It is enabled by using --tiledriver=JPEG. + +Note that JPEG does not support transparency, hence edge tiles will display black +pixels in areas not covered by the source raster. + +The following configuration options are available to further customize the webp output: + +.. option:: ---jpeg-quality=JPEG_QUALITY + + QUALITY is a integer between 1-100. Default is 75. Examples diff --git a/swig/python/gdal-utils/osgeo_utils/gdal2tiles.py b/swig/python/gdal-utils/osgeo_utils/gdal2tiles.py index 6d255b1a61e3..af8a85bdda35 100644 --- a/swig/python/gdal-utils/osgeo_utils/gdal2tiles.py +++ b/swig/python/gdal-utils/osgeo_utils/gdal2tiles.py @@ -919,7 +919,12 @@ def scale_query_to_tile(dsquery, dstile, options, tilefilename=""): array[:, :, i] = gdalarray.BandReadAsArray( dsquery.GetRasterBand(i + 1), 0, 0, querysize, querysize ) - im = Image.fromarray(array, "RGBA") # Always four bands + if options.tiledriver == "JPEG" and tilebands == 2: + im = Image.fromarray(array[:, :, 0], "L") + elif options.tiledriver == "JPEG" and tilebands == 4: + im = Image.fromarray(array[:, :, 0:3], "RGB") + else: + im = Image.fromarray(array, "RGBA") im1 = im.resize((tile_size, tile_size), Image.LANCZOS) if os.path.exists(tilefilename): im0 = Image.open(tilefilename) @@ -931,6 +936,8 @@ def scale_query_to_tile(dsquery, dstile, options, tilefilename=""): params["lossless"] = True else: params["quality"] = options.webp_quality + elif options.tiledriver == "JPEG": + params["quality"] = options.jpeg_quality im1.save(tilefilename, options.tiledriver, **params) else: @@ -1314,6 +1321,8 @@ def _get_creation_options(options): copts = ["LOSSLESS=True"] else: copts = ["QUALITY=" + str(options.webp_quality)] + elif options.tiledriver == "JPEG": + copts = ["QUALITY=" + str(options.jpeg_quality)] return copts @@ -1430,7 +1439,12 @@ def create_base_tile(tile_job_info: "TileJobInfo", tile_detail: "TileDetail") -> if options.resampling != "antialias": # Write a copy of tile to png/jpg out_drv.CreateCopy( - tilefilename, dstile, strict=0, options=_get_creation_options(options) + tilefilename, + dstile + if tile_job_info.tile_driver != "JPEG" + else remove_alpha_band(dstile), + strict=0, + options=_get_creation_options(options), ) # Remove useless side car file @@ -1465,6 +1479,38 @@ def create_base_tile(tile_job_info: "TileJobInfo", tile_detail: "TileDetail") -> ) +def remove_alpha_band(src_ds): + if ( + src_ds.GetRasterBand(src_ds.RasterCount).GetColorInterpretation() + != gdal.GCI_AlphaBand + ): + return src_ds + + new_band_count = src_ds.RasterCount - 1 + + dst_ds = gdal.GetDriverByName("MEM").Create( + "", + src_ds.RasterXSize, + src_ds.RasterYSize, + new_band_count, + src_ds.GetRasterBand(1).DataType, + ) + + gt = src_ds.GetGeoTransform(can_return_null=True) + if gt: + dst_ds.SetGeoTransform(gt) + srs = src_ds.GetSpatialRef() + if srs: + dst_ds.SetSpatialRef(srs) + + for i in range(1, new_band_count + 1): + src_band = src_ds.GetRasterBand(i) + dst_band = dst_ds.GetRasterBand(i) + dst_band.WriteArray(src_band.ReadAsArray()) + + return dst_ds + + def create_overview_tile( base_tz: int, base_tiles: List[Tuple[int, int]], @@ -1542,7 +1588,35 @@ def create_overview_tile( else: tileposy = 0 - if dsquerytile.RasterCount == tilebands - 1: + if ( + tile_job_info.tile_driver == "JPEG" + and dsquerytile.RasterCount == 3 + and tilebands == 2 + ): + # Input is RGB with R=G=B. Add An alpha band + tmp_ds = mem_driver.Create( + "", dsquerytile.RasterXSize, dsquerytile.RasterYSize, 2 + ) + tmp_ds.GetRasterBand(1).WriteRaster( + 0, + 0, + tile_job_info.tile_size, + tile_job_info.tile_size, + dsquerytile.GetRasterBand(1).ReadRaster(), + ) + mask = bytearray( + [255] * (tile_job_info.tile_size * tile_job_info.tile_size) + ) + tmp_ds.GetRasterBand(2).WriteRaster( + 0, + 0, + tile_job_info.tile_size, + tile_job_info.tile_size, + mask, + ) + tmp_ds.GetRasterBand(2).SetColorInterpretation(gdal.GCI_AlphaBand) + dsquerytile = tmp_ds + elif dsquerytile.RasterCount == tilebands - 1: # assume that the alpha band is missing and add it tmp_ds = mem_driver.CreateCopy("", dsquerytile, 0) tmp_ds.AddBand() @@ -1559,7 +1633,10 @@ def create_overview_tile( ) dsquerytile = tmp_ds elif dsquerytile.RasterCount != tilebands: - raise Exception("Unexpected number of bands in base tile") + raise Exception( + "Unexpected number of bands in base tile. Got %d, expected %d" + % (dsquerytile.RasterCount, tilebands) + ) base_data = dsquerytile.ReadRaster( 0, 0, tile_job_info.tile_size, tile_job_info.tile_size @@ -1584,7 +1661,12 @@ def create_overview_tile( if options.resampling != "antialias": # Write a copy of tile to png/jpg out_driver.CreateCopy( - tilefilename, dstile, strict=0, options=_get_creation_options(options) + tilefilename, + dstile + if tile_job_info.tile_driver != "JPEG" + else remove_alpha_band(dstile), + strict=0, + options=_get_creation_options(options), ) # Remove useless side car file aux_xml = tilefilename + ".aux.xml" @@ -1772,7 +1854,7 @@ def optparse_init() -> optparse.OptionParser: p.add_option( "--tiledriver", dest="tiledriver", - choices=["PNG", "WEBP"], + choices=["PNG", "WEBP", "JPEG"], default="PNG", type="choice", help="which tile driver to use for the tiles", @@ -1883,6 +1965,17 @@ def optparse_init() -> optparse.OptionParser: ) p.add_option_group(g) + # Jpeg options + g = optparse.OptionGroup(p, "JPEG options", "Options for JPEG tiledriver") + g.add_option( + "--jpeg-quality", + dest="jpeg_quality", + type=int, + default=75, + help="quality of jpeg image, integer between 1 and 100, default is 75", + ) + p.add_option_group(g) + p.set_defaults( verbose=False, profile="mercator", @@ -1996,6 +2089,13 @@ def options_post_processing( if options.webp_quality <= 0 or options.webp_quality > 100: exit_with_error("webp_quality should be in the range [1-100]") options.webp_quality = int(options.webp_quality) + elif options.tiledriver == "JPEG": + if gdal.GetDriverByName(options.tiledriver) is None: + exit_with_error("JPEG driver is not available") + + if options.jpeg_quality <= 0 or options.jpeg_quality > 100: + exit_with_error("jpeg_quality should be in the range [1-100]") + options.jpeg_quality = int(options.jpeg_quality) # Output the results if options.verbose: @@ -2113,8 +2213,10 @@ def __init__(self, input_file: str, output_folder: str, options: Options) -> Non self.tiledriver = options.tiledriver if options.tiledriver == "PNG": self.tileext = "png" - else: + elif options.tiledriver == "WEBP": self.tileext = "webp" + else: + self.tileext = "jpg" if options.mpi: makedirs(output_folder) self.tmp_dir = tempfile.mkdtemp(dir=output_folder) From 353c9de863a5f925a2b7074d94f4d3ae3982dfa5 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Mon, 1 Apr 2024 19:59:53 -0400 Subject: [PATCH 035/230] Python bindings: Make ogr.DataSource a synonym of gdal.Dataset --- autotest/gcore/basic_test.py | 27 ++ autotest/ogr/ogr_basic_test.py | 36 +++ autotest/pymod/gdaltest.py | 5 +- swig/include/Dataset.i | 12 +- swig/include/Driver.i | 4 + swig/include/gdal.i | 9 + swig/include/ogr.i | 7 + .../include/python/docs/ogr_datasource_docs.i | 298 ------------------ swig/include/python/gdal_python.i | 67 +++- swig/include/python/ogr_python.i | 250 +-------------- swig/python/CMakeLists.txt | 1 - 11 files changed, 162 insertions(+), 554 deletions(-) delete mode 100644 swig/include/python/docs/ogr_datasource_docs.i diff --git a/autotest/gcore/basic_test.py b/autotest/gcore/basic_test.py index fadea2241720..63498fdf1581 100755 --- a/autotest/gcore/basic_test.py +++ b/autotest/gcore/basic_test.py @@ -938,3 +938,30 @@ def test_tmp_vsimem(tmp_vsimem): assert isinstance(tmp_vsimem, os.PathLike) assert gdal.VSIStatL(tmp_vsimem) is not None + + +def test_band_iter(): + + ds = gdal.Open("data/rgba.tif") + + assert len(ds) == 4 + + bands = [] + + for band in ds: + bands.append(band) + + assert len(bands) == 4 + + +def test_band_getitem(): + + ds = gdal.Open("data/rgba.tif") + + assert ds[2].this == ds.GetRasterBand(2).this + + with pytest.raises(IndexError): + ds[0] + + with pytest.raises(IndexError): + ds[5] diff --git a/autotest/ogr/ogr_basic_test.py b/autotest/ogr/ogr_basic_test.py index 103f810b4c8a..30cacecf431d 100755 --- a/autotest/ogr/ogr_basic_test.py +++ b/autotest/ogr/ogr_basic_test.py @@ -46,6 +46,8 @@ def test_ogr_basic_1(): assert ds is not None + assert isinstance(ds, ogr.DataSource) + ############################################################################### # Test Feature counting. @@ -713,6 +715,36 @@ def test_ogr_basic_dataset_slice(): assert lyrs == ["lyr1", "lyr3"] +def test_ogr_basic_dataset_iter(): + + ds = ogr.GetDriverByName("Memory").CreateDataSource("") + ds.CreateLayer("lyr1") + ds.CreateLayer("lyr2") + ds.CreateLayer("lyr3") + + layers = [] + + assert len(ds) == 3 + + for lyr in ds: + layers.append(lyr) + + assert len(layers) == 3 + + +def test_ogr_basic_dataset_getitem(): + + ds = ogr.GetDriverByName("Memory").CreateDataSource("") + ds.CreateLayer("lyr1") + ds.CreateLayer("lyr2") + ds.CreateLayer("lyr3") + + assert ds[2].this == ds.GetLayer(2).this + + with pytest.raises(IndexError): + ds[3] + + def test_ogr_basic_feature_iterator(): ds = ogr.Open("data/poly.shp") @@ -995,6 +1027,7 @@ def test_datasource_use_after_close_2(): ds2.GetLayer(0) +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_datasource_use_after_destroy(): ds = ogr.Open("data/poly.shp") @@ -1009,6 +1042,7 @@ def test_datasource_use_after_destroy(): ds2.GetLayer(0) +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_datasource_use_after_release(): ds = ogr.Open("data/poly.shp") @@ -1060,6 +1094,7 @@ def test_layer_use_after_datasource_close_3(tmp_path): lyr2.GetFeatureCount() +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_layer_use_after_datasource_destroy(): ds = ogr.Open("data/poly.shp") lyr = ds.GetLayerByName("poly") @@ -1071,6 +1106,7 @@ def test_layer_use_after_datasource_destroy(): lyr.GetFeatureCount() +@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_layer_use_after_datasource_release(): ds = ogr.Open("data/poly.shp") lyr = ds.GetLayerByName("poly") diff --git a/autotest/pymod/gdaltest.py b/autotest/pymod/gdaltest.py index 0df2d87773f1..f086817dcf76 100755 --- a/autotest/pymod/gdaltest.py +++ b/autotest/pymod/gdaltest.py @@ -49,7 +49,7 @@ import pytest -from osgeo import gdal, ogr, osr +from osgeo import gdal, osr jp2kak_drv = None jpeg2000_drv = None @@ -2073,9 +2073,6 @@ def reopen(ds, update=False, open_options=None): ds.Close() - if isinstance(ds, ogr.DataSource) and open_options is None: - return ogr.Open(ds_loc, update) - flags = 0 if update: flags = gdal.OF_UPDATE diff --git a/swig/include/Dataset.i b/swig/include/Dataset.i index 7942fe2cd447..2f3b2e9b6ab7 100644 --- a/swig/include/Dataset.i +++ b/swig/include/Dataset.i @@ -313,6 +313,16 @@ public: return GDALGetProjectionRef( self ); } +#ifdef SWIGPYTHON + int GetRefCount() { + return OGR_DS_GetRefCount(self); + } + + int GetSummaryRefCount() { + return OGR_DS_GetSummaryRefCount(self); + } +#endif + %newobject GetSpatialRef; OSRSpatialReferenceShadow *GetSpatialRef() { OGRSpatialReferenceH ref = GDALGetSpatialRef(self); @@ -910,7 +920,7 @@ CPLErr AdviseRead( int xoff, int yoff, int xsize, int ysize, } #ifdef SWIGCSHARP - + %newobject GetNextFeature; OGRFeatureShadow *GetNextFeature( OGRLayerShadow** ppoBelongingLayer = NULL, double* pdfProgressPct = NULL, diff --git a/swig/include/Driver.i b/swig/include/Driver.i index 7117cdbbcc13..6c751ed0ee6a 100644 --- a/swig/include/Driver.i +++ b/swig/include/Driver.i @@ -126,6 +126,10 @@ public: return GDALCopyDatasetFiles( self, newName, oldName ); } + const char *GetName() { + return GDALGetDescription(self); + } + int Register() { return GDALRegisterDriver( self ); } diff --git a/swig/include/gdal.i b/swig/include/gdal.i index 6212f749953a..f33cf99b59ef 100644 --- a/swig/include/gdal.i +++ b/swig/include/gdal.i @@ -2169,3 +2169,12 @@ GDALDatasetShadow* wrapper_GDALMultiDimTranslateDestName( const char* dest, %clear (const char* dest); +#if defined(SWIGPYTHON) +// This enables constructs such as isinstance(x, ogr.DataSource) to +// return True for a gdal.Dataset. We can't include it in gdal_python.i +// because Dataset is not defined at that point. +%pythoncode %{ +ogr.DataSource = Dataset +%} +#endif + diff --git a/swig/include/ogr.i b/swig/include/ogr.i index 4af4c93f524e..68c9693a3f07 100644 --- a/swig/include/ogr.i +++ b/swig/include/ogr.i @@ -871,6 +871,11 @@ public: /* OGRDataSource */ /************************************************************************/ +#ifdef SWIGPYTHON +/* In Python, ogr.DataSource and gdal.Dataset are equivalent */ +typedef GDALDatasetShadow OGRDataSourceShadow; + +#else %rename (DataSource) OGRDataSourceShadow; @@ -1033,6 +1038,8 @@ public: }; /* class OGRDataSourceShadow */ +#endif /* not SWIGPYTHON */ + #endif /* FROM_GDAL_I */ #ifdef SWIGPYTHON diff --git a/swig/include/python/docs/ogr_datasource_docs.i b/swig/include/python/docs/ogr_datasource_docs.i deleted file mode 100644 index 4ef3689b4273..000000000000 --- a/swig/include/python/docs/ogr_datasource_docs.i +++ /dev/null @@ -1,298 +0,0 @@ -%feature("docstring") OGRDataSourceShadow " -Python proxy of a vector :cpp:class:`GDALDataset`. - -Since GDAL 3.8, a DataSource can be used as a context manager. -When exiting the context, the DataSource will be closed and -features will be written to disk. -" - -%extend OGRDataSourceShadow { -// File: ogrdatasource_8cpp.xml - -%feature("docstring") Close " -Closes opened dataset and releases allocated resources. - -This method can be used to force the dataset to close -when one more references to the dataset are still -reachable. If Close is never called, the dataset will -be closed automatically during garbage collection. -" - -%feature("docstring") Destroy "void OGR_DS_Destroy(OGRDataSourceH -hDS) - -Closes opened datasource and releases allocated resources. - -This method is the same as the C++ method -OGRDataSource::DestroyDataSource(). - -Deprecated Use GDALClose() in GDAL 2.0 - -Parameters ------------ -hDS: - handle to allocated datasource object. -"; - -%feature("docstring") Reference "int OGR_DS_Reference(OGRDataSourceH -hDataSource) "; - -%feature("docstring") Dereference "int -OGR_DS_Dereference(OGRDataSourceH hDataSource) "; - -%feature("docstring") GetRefCount "int -OGR_DS_GetRefCount(OGRDataSourceH hDataSource) "; - -%feature("docstring") GetSummaryRefCount "int -OGR_DS_GetSummaryRefCount(OGRDataSourceH hDataSource) "; - -%feature("docstring") CreateLayer "OGRLayerH -OGR_DS_CreateLayer(OGRDataSourceH hDS, const char \\*pszName, -OGRSpatialReferenceH hSpatialRef, OGRwkbGeometryType eType, char -\\*\\*papszOptions) - -This function attempts to create a new layer on the data source with -the indicated name, coordinate system, geometry type. - -The papszOptions argument can be used to control driver specific -creation options. These options are normally documented in the format -specific documentation. - -Deprecated Use GDALDatasetCreateLayer() in GDAL 2.0 - -Parameters ------------ -hDS: - The dataset handle.pszName: the name for the new layer. This should ideally not match - any existing layer on the datasource. -hSpatialRef: - handle to the coordinate system to use for the new - layer, or NULL if no coordinate system is available. The driver might - only increase the reference counter of the object to take ownership, - and not make a full copy, so do not use OSRDestroySpatialReference(), - but OSRRelease() instead when you are done with the object. -eType: - the geometry type for the layer. Use wkbUnknown if there are - no constraints on the types geometry to be written. -papszOptions: - a StringList of name=value options. Options are driver - specific, and driver information can be found at the following - url:http://www.gdal.org/ogr_formats.html - - -Returns --------- -OGRLayerH: - NULL is returned on failure, or a new OGRLayer handle on success. -"; - -%feature("docstring") CopyLayer "OGRLayerH -OGR_DS_CopyLayer(OGRDataSourceH hDS, OGRLayerH hSrcLayer, const char -\\*pszNewName, char \\*\\*papszOptions) - -Duplicate an existing layer. - -This function creates a new layer, duplicate the field definitions of -the source layer and then duplicate each features of the source layer. -The papszOptions argument can be used to control driver specific -creation options. These options are normally documented in the format -specific documentation. The source layer may come from another -dataset. - -Deprecated Use GDALDatasetCopyLayer() in GDAL 2.0 - -Parameters ------------ -hDS: - handle to the data source where to create the new layer -hSrcLayer: - handle to the source layer. -pszNewName: - the name of the layer to create. -papszOptions: - a StringList of name=value options. Options are driver - specific. - -Returns -------- -OGRLayerH: - a handle to the layer, or NULL if an error occurs. -"; - -%feature("docstring") GetLayerByName "OGRLayerH -OGR_DS_GetLayerByName(OGRDataSourceH hDS, const char \\*pszLayerName) - -Fetch a layer by name. - -The returned layer remains owned by the OGRDataSource and should not -be deleted by the application. - -Deprecated Use GDALDatasetGetLayerByName() in GDAL 2.0 - -Parameters ------------ -hDS: - handle to the data source from which to get the layer. -pszLayerName: - Layer the layer name of the layer to fetch. - - -Returns --------- -OGRLayerH: - a handle to the layer, or NULL if the layer is not found or an error - occurs. -"; - -%feature("docstring") TestCapability "int -OGR_DS_TestCapability(OGRDataSourceH hDS, const char \\*pszCapability) - -Test if capability is available. - -One of the following data source capability names can be passed into -this function, and a TRUE or FALSE value will be returned indicating -whether or not the capability is available for this object. - -ODsCCreateLayer: True if this datasource can create new layers. - -ODsCDeleteLayer: True if this datasource can delete existing layers. - -ODsCCreateGeomFieldAfterCreateLayer: True if the layers of this -datasource support CreateGeomField() just after layer creation. - -ODsCCurveGeometries: True if this datasource supports writing curve -geometries. (GDAL 2.0). In that case, OLCCurveGeometries must also be -declared in layers of that dataset. - -The #define macro forms of the capability names should be used in -preference to the strings themselves to avoid misspelling. - -Deprecated Use GDALDatasetTestCapability() in GDAL 2.0 - -Parameters ------------ -hDS: - handle to the data source against which to test the capability. -pszCapability: - the capability to test. - -Returns --------- -int: - TRUE if capability available otherwise FALSE. -"; - -%feature("docstring") GetLayerCount "int -OGR_DS_GetLayerCount(OGRDataSourceH hDS) - -Get the number of layers in this data source. - -Deprecated Use GDALDatasetGetLayerCount() in GDAL 2.0 - -Parameters ------------ -hDS: - handle to the data source from which to get the number of - layers. - -Returns --------- -int: - layer count. -"; - -%feature("docstring") GetLayer "OGRLayerH -OGR_DS_GetLayer(OGRDataSourceH hDS, int iLayer) - -Fetch a layer by index. - -The returned layer remains owned by the OGRDataSource and should not -be deleted by the application. - -Deprecated Use GDALDatasetGetLayer() in GDAL 2.0 - -Parameters ------------ -hDS: - handle to the data source from which to get the layer. -iLayer: - a layer number between 0 and OGR_DS_GetLayerCount()-1. - -Returns --------- -OGRLayerH: - a handle to the layer, or NULL if iLayer is out of range or an error - occurs. -"; - -%feature("docstring") GetName "const char\\* -OGR_DS_GetName(OGRDataSourceH hDS) - -Returns the name of the data source. - -This string should be sufficient to open the data source if passed to -the same OGRSFDriver that this data source was opened with, but it -need not be exactly the same string that was used to open the data -source. Normally this is a filename. - -Deprecated Use GDALGetDescription() in GDAL 2.0 - -Parameters ------------ -hDS: - handle to the data source to get the name from. - -Returns --------- -str: - pointer to an internal name string which should not be modified or - freed by the caller. -"; - -%feature("docstring") SyncToDisk "OGRErr -OGR_DS_SyncToDisk(OGRDataSourceH hDS) - -Flush pending changes to disk. - -See GDALDataset::FlushCache() "; - -%feature("docstring") GetDriver "OGRSFDriverH -OGR_DS_GetDriver(OGRDataSourceH hDS) - -Returns the driver that the dataset was opened with. - -NOTE: Starting with GDAL 2.0, it is NOT safe to cast the returned -handle to OGRSFDriver\\*. If a C++ object is needed, the handle should -be cast to GDALDriver\\*. - -Deprecated Use GDALGetDatasetDriver() in GDAL 2.0 - -Parameters ------------ -hDS: - handle to the datasource - -Returns --------- -OGRSFDriverH: - NULL if driver info is not available, or pointer to a driver owned by - the OGRSFDriverManager. -"; - -%feature("docstring") GetStyleTable "OGRStyleTableH -OGR_DS_GetStyleTable(OGRDataSourceH hDS) - -Get style table. "; - -%feature("docstring") SetStyleTableDirectly "void -OGR_DS_SetStyleTableDirectly(OGRDataSourceH hDS, OGRStyleTableH -hStyleTable) - -Set style table (and take ownership) "; - -%feature("docstring") SetStyleTable "void -OGR_DS_SetStyleTable(OGRDataSourceH hDS, OGRStyleTableH hStyleTable) - -Set style table. "; - -} diff --git a/swig/include/python/gdal_python.i b/swig/include/python/gdal_python.i index e8713e76a783..32c4da993ef0 100644 --- a/swig/include/python/gdal_python.i +++ b/swig/include/python/gdal_python.i @@ -20,6 +20,7 @@ } // Will be turned on for GDAL 4.0 // UseExceptions(); + %} %{ @@ -177,7 +178,6 @@ static void readraster_releasebuffer(CPLErr eErr, from osgeo.gdalconst import * from osgeo import gdalconst - import sys byteorders = {"little": "<", "big": ">"} @@ -1485,6 +1485,22 @@ CPLErr ReadRaster1( double xoff, double yoff, double xsize, double ysize, else: return self._SetGCPs2(gcps, wkt_or_spatial_ref) + def Destroy(self): + import warnings + warnings.warn("Destroy() is deprecated; use a context manager or Close() instead", DeprecationWarning) + self.Close() + + def Release(self): + import warnings + warnings.warn("Release() is deprecated; use a context manager or Close() instead", DeprecationWarning) + self.Close() + + def SyncToDisk(self): + return self.FlushCache() + + def GetName(self): + return self.GetDescription() + def _add_child_ref(self, child): if child is None: return @@ -1510,6 +1526,54 @@ CPLErr ReadRaster1( double xoff, double yoff, double xsize, double ysize, def __exit__(self, *args): self.Close() + + def __bool__(self): + return True + + def __len__(self): + return self.RasterCount + self.GetLayerCount() + + def __iter__(self): + if self.RasterCount: + for band in range(1, self.RasterCount + 1): + yield self[band] + else: + for layer in range(self.GetLayerCount()): + yield self[layer] + + def __getitem__(self, value): + """Support dictionary, list, and slice -like access to the datasource. + ds[0] would return the first layer on the datasource. + ds['aname'] would return the layer named "aname". + ds[0:4] would return a list of the first four layers.""" + + if self.RasterCount and self.GetLayerCount(): + raise ValueError("Cannot access slice of Dataset with both raster bands and vector layers") + + if self.GetLayerCount(): + get = self.GetLayer + min = 0 + max = self.GetLayerCount() - 1 + else: + get = self.GetRasterBand + min = 1 + max = self.RasterCount + + if isinstance(value, slice): + output = [] + step = value.step if value.step else 1 + for i in range(value.start, value.stop, step): + lyr = self.GetLayer(i) + if lyr is None: + return output + output.append(lyr) + return output + + if value < min or value > max: + # Exception needed to make for _ in loop finish + raise IndexError(value) + + return get(value) %} %feature("pythonappend") Close %{ @@ -4748,6 +4812,7 @@ def quiet_errors(): yield finally: PopErrorHandler() + %} diff --git a/swig/include/python/ogr_python.i b/swig/include/python/ogr_python.i index bedbf5da977e..c3825197da94 100644 --- a/swig/include/python/ogr_python.i +++ b/swig/include/python/ogr_python.i @@ -30,7 +30,6 @@ %include "ogr_docs.i" %include "ogr_layer_docs.i" #ifndef FROM_GDAL_I -%include "ogr_datasource_docs.i" %include "ogr_driver_docs.i" #endif %include "ogr_feature_docs.i" @@ -48,6 +47,7 @@ %{ #define MODULE_NAME "ogr" %} +#endif %include "python_exceptions.i" %include "python_strings.i" @@ -84,254 +84,6 @@ def _WarnIfUserHasNotSpecifiedIfUsingExceptions(): args[0][i] = str(args[0][i]) %} -%extend OGRDataSourceShadow { - %pythoncode { - - def Destroy(self): - "Once called, self has effectively been destroyed. Do not access. For backwards compatibility only" - _ogr.delete_DataSource(self) - self.thisown = 0 - self.this = None - self._invalidate_layers() - - def Release(self): - "Once called, self has effectively been destroyed. Do not access. For backwards compatibility only" - _ogr.delete_DataSource(self) - self.thisown = 0 - self.this = None - self._invalidate_layers() - - def Reference(self): - "For backwards compatibility only." - return self.Reference() - - def Dereference(self): - "For backwards compatibility only." - self.Dereference() - - def __len__(self): - """Returns the number of layers on the datasource""" - return self.GetLayerCount() - - def __enter__(self): - return self - - def __exit__(self, *args): - self.Close() - - def __del__(self): - self._invalidate_layers() - - def __getitem__(self, value): - """Support dictionary, list, and slice -like access to the datasource. - ds[0] would return the first layer on the datasource. - ds['aname'] would return the layer named "aname". - ds[0:4] would return a list of the first four layers.""" - if isinstance(value, slice): - output = [] - step = value.step if value.step else 1 - for i in range(value.start, value.stop, step): - lyr = self.GetLayer(i) - if lyr is None: - return output - output.append(lyr) - return output - if isinstance(value, int): - if value > len(self) - 1: - raise IndexError - return self.GetLayer(value) - elif isinstance(value, str): - return self.GetLayer(value) - else: - raise TypeError('Input %s is not of String or Int type' % type(value)) - - def GetLayer(self, iLayer=0): - """Return the layer given an index or a name""" - - _WarnIfUserHasNotSpecifiedIfUsingExceptions() - - if isinstance(iLayer, str): - return self.GetLayerByName(str(iLayer)) - elif isinstance(iLayer, int): - return self.GetLayerByIndex(iLayer) - else: - raise TypeError("Input %s is not of String or Int type" % type(iLayer)) - - def _invalidate_layers(self, lyr = None): - if hasattr(self, '_layer_references'): - for lyr in self._layer_references: - lyr.this = None - - - def _add_layer_ref(self, lyr): - if not lyr: - return - - if not hasattr(self, '_layer_references'): - import weakref - - self._layer_references = weakref.WeakSet() - - self._layer_references.add(lyr) - } - -%feature("pythonappend") GetLayerByName %{ - self._add_layer_ref(val) -%} - -%feature("pythonappend") GetLayerByIndex %{ - self._add_layer_ref(val) -%} - -%feature("pythonappend") CreateLayer %{ - self._add_layer_ref(val) -%} - -%feature("pythonappend") CopyLayer %{ - self._add_layer_ref(val) -%} - -%feature("pythonappend") Close %{ - self.thisown = 0 - self.this = None - self._invalidate_layers() -%} - -%feature("shadow") DeleteLayer %{ - def DeleteLayer(self, value) -> "OGRErr": - """ - DeleteLayer(DataSource self, value) -> OGRErr - - Delete the indicated layer from the datasource. - - For more details: :c:func:`OGR_DS_DeleteLayer` - - Parameters - ----------- - value: str | int - index or name of the layer to delete. - - Returns - ------- - int: - :py:const:`osgeo.ogr.OGRERR_NONE` on success, or :py:const:`osgeo.ogr.OGRERR_UNSUPPORTED_OPERATION` if deleting - layers is not supported for this datasource. - """ - - if isinstance(value, str): - for i in range(self.GetLayerCount()): - lyr = self.GetLayer(i) - if lyr.GetName() == value: - return $action(self, i) - raise ValueError("Layer %s not found to delete" % value) - elif isinstance(value, int): - return $action(self, value) - else: - raise TypeError("Input %s is not of String or Int type" % type(value)) -%} - -%feature("shadow") ExecuteSQL %{ -def ExecuteSQL(self, statement, spatialFilter=None, dialect="", keep_ref_on_ds=False): - """ExecuteSQL(self, statement, spatialFilter: ogr.Geometry = None, dialect: Optional[str] = "", keep_ref_on_ds=False) -> ogr.Layer - - Execute a SQL statement against the dataset - - The result of a SQL query is: - - None (or an exception if exceptions are enabled) for statements - that are in error - - or None for statements that have no results set, - - or a ogr.Layer handle representing a results set from the query. - - Note that this ogr.Layer is in addition to the layers in the data store - and must be released with ReleaseResultSet() before the data source is closed - (destroyed). - - Starting with GDAL 3.7, this method can also be used as a context manager, - as a convenient way of automatically releasing the returned result layer. - - For more information on the SQL dialect supported internally by OGR - review the OGR SQL document (:ref:`ogr_sql_sqlite_dialect`) - Some drivers (i.e. Oracle and PostGIS) pass the SQL directly through to the - underlying RDBMS. - - The SQLITE dialect can also be used (:ref:`sql_sqlite_dialect`) - - Parameters - ---------- - statement: - the SQL statement to execute (e.g "SELECT * FROM layer") - spatialFilter: - a geometry which represents a spatial filter. Can be None - dialect: - allows control of the statement dialect. If set to None or empty string, - the OGR SQL engine will be used, except for RDBMS drivers that will - use their dedicated SQL engine, unless OGRSQL is explicitly passed as - the dialect. The SQLITE dialect can also be used. - keep_ref_on_ds: - whether the returned layer should keep a (strong) reference on - the current dataset. Cf example 2 for a use case. - - Returns - ------- - ogr.Layer: - a ogr.Layer containing the results of the query, that will be - automatically released when the context manager goes out of scope. - - Examples - -------- - 1. Use as a context manager: - - >>> with ds.ExecuteSQL("SELECT * FROM layer") as lyr: - ... print(lyr.GetFeatureCount()) - - 2. Use keep_ref_on_ds=True to return an object that keeps a reference to its dataset: - - >>> def get_sql_lyr(): - ... return gdal.OpenEx("test.shp").ExecuteSQL("SELECT * FROM test", keep_ref_on_ds=True) - ... - ... with get_sql_lyr() as lyr: - ... print(lyr.GetFeatureCount()) - """ - - sql_lyr = $action(self, statement, spatialFilter, dialect) - if sql_lyr: - import weakref - sql_lyr._to_release = True - sql_lyr._dataset_weak_ref = weakref.ref(self) - if keep_ref_on_ds: - sql_lyr._dataset_strong_ref = self - return sql_lyr -%} - - -%feature("shadow") ReleaseResultSet %{ -def ReleaseResultSet(self, sql_lyr): - """ReleaseResultSet(self, sql_lyr: ogr.Layer) - - Release ogr.Layer returned by ExecuteSQL() (when not called as an execution manager) - - The sql_lyr object is invalidated after this call. - - Parameters - ---------- - sql_lyr: - ogr.Layer got with ExecuteSQL() - """ - - if sql_lyr and not hasattr(sql_lyr, "_to_release"): - raise Exception("This layer was not returned by ExecuteSQL() and should not be released with ReleaseResultSet()") - $action(self, sql_lyr) - # Invalidates the layer - if sql_lyr: - sql_lyr.thisown = None - sql_lyr.this = None -%} - -} - -#endif - - %extend OGRLayerShadow { %pythoncode %{ def Reference(self): diff --git a/swig/python/CMakeLists.txt b/swig/python/CMakeLists.txt index 9cb1b5d32db9..fecaab2e3803 100644 --- a/swig/python/CMakeLists.txt +++ b/swig/python/CMakeLists.txt @@ -36,7 +36,6 @@ set(GDAL_PYTHON_CSOURCES ${PROJECT_SOURCE_DIR}/swig/include/python/docs/gdal_dataset_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/gdal_driver_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_docs.i - ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_datasource_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_driver_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_featuredef_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_feature_docs.i From a9745e5f8df5638c2a4e25d1ec035bb49fcf1dec Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Mon, 1 Apr 2024 21:18:38 -0400 Subject: [PATCH 036/230] Python bindings: Remove ogr.Driver --- autotest/ogr/ogr_basic_test.py | 22 ++++++++ swig/include/Driver.i | 7 ++- swig/include/gdal.i | 1 + swig/include/ogr.i | 7 +++ swig/include/python/docs/ogr_driver_docs.i | 4 -- swig/include/python/gdal_python.i | 21 +++++++ swig/include/python/ogr_python.i | 3 - swig/python/CMakeLists.txt | 1 - .../gdal-utils/osgeo_utils/gdal_retile.py | 55 ++++++++++--------- 9 files changed, 84 insertions(+), 37 deletions(-) delete mode 100644 swig/include/python/docs/ogr_driver_docs.i diff --git a/autotest/ogr/ogr_basic_test.py b/autotest/ogr/ogr_basic_test.py index 30cacecf431d..ee33cc6d5b8a 100755 --- a/autotest/ogr/ogr_basic_test.py +++ b/autotest/ogr/ogr_basic_test.py @@ -1211,3 +1211,25 @@ def test_general_cmd_line_processor(tmp_path): ["program", 2, tmp_path / "a_path", "a_string"] ) assert processed == ["program", "2", str(tmp_path / "a_path"), "a_string"] + + +def test_driver_open_throw_1(): + + with gdaltest.enable_exceptions(): + drv = ogr.GetDriverByName("ESRI Shapefile") + + assert isinstance(drv, ogr.Driver) + + with pytest.raises(RuntimeError, match="No such file or directory"): + drv.Open("does_not_exist.shp") + + +def test_driver_open_throw_2(): + + with gdaltest.enable_exceptions(): + drv = ogr.GetDriverByName("MapInfo File") + + assert isinstance(drv, ogr.Driver) + + with pytest.raises(RuntimeError, match="not recognized"): + drv.Open("data/poly.shp") diff --git a/swig/include/Driver.i b/swig/include/Driver.i index 6c751ed0ee6a..d5b9e1aac5ff 100644 --- a/swig/include/Driver.i +++ b/swig/include/Driver.i @@ -126,9 +126,12 @@ public: return GDALCopyDatasetFiles( self, newName, oldName ); } - const char *GetName() { - return GDALGetDescription(self); +#ifdef SWIGPYTHON + bool TestCapability(const char* cap) { + // TODO: should this also check DCAP entries in driver metadata? + return (OGR_Dr_TestCapability(self, cap) > 0); } +#endif int Register() { return GDALRegisterDriver( self ); diff --git a/swig/include/gdal.i b/swig/include/gdal.i index f33cf99b59ef..3799c74ed0f8 100644 --- a/swig/include/gdal.i +++ b/swig/include/gdal.i @@ -2175,6 +2175,7 @@ GDALDatasetShadow* wrapper_GDALMultiDimTranslateDestName( const char* dest, // because Dataset is not defined at that point. %pythoncode %{ ogr.DataSource = Dataset +ogr.Driver = Driver %} #endif diff --git a/swig/include/ogr.i b/swig/include/ogr.i index 68c9693a3f07..bf31e04663db 100644 --- a/swig/include/ogr.i +++ b/swig/include/ogr.i @@ -756,6 +756,11 @@ public: #ifndef FROM_GDAL_I +#ifdef SWIGPYTHON +/* In Python, gdal.Driver and ogr.Driver are equivalent */ +typedef GDALDriverShadow OGRDriverShadow; +#else + %rename (Driver) OGRDriverShadow; #ifdef SWIGCSHARP @@ -867,6 +872,8 @@ public: } /* %extend */ }; /* class OGRDriverShadow */ +#endif + /************************************************************************/ /* OGRDataSource */ /************************************************************************/ diff --git a/swig/include/python/docs/ogr_driver_docs.i b/swig/include/python/docs/ogr_driver_docs.i deleted file mode 100644 index 5f8899ead01c..000000000000 --- a/swig/include/python/docs/ogr_driver_docs.i +++ /dev/null @@ -1,4 +0,0 @@ -%extend OGRDriverShadow { -// File: ogrsfdriver_8cpp.xml - -} \ No newline at end of file diff --git a/swig/include/python/gdal_python.i b/swig/include/python/gdal_python.i index 32c4da993ef0..5ee9cda4e58c 100644 --- a/swig/include/python/gdal_python.i +++ b/swig/include/python/gdal_python.i @@ -2110,6 +2110,27 @@ def _WarnIfUserHasNotSpecifiedIfUsingOgrExceptions(): _WarnIfUserHasNotSpecifiedIfUsingExceptions() %} +%pythoncode %{ + +def CreateDataSource(self, utf8_path, options=None): + return self.Create(utf8_path, 0, 0, 0, GDT_Unknown, options or []) + +def CopyDataSource(self, ds, utf8_path, options=None): + return self.CreateCopy(utf8_path, ds, options = options or []) + +def DeleteDataSource(self, utf8_path): + return self.Delete(utf8_path) + +def Open(self, utf8_path, update=False): + return OpenEx(utf8_path, + OF_VECTOR | (OF_UPDATE if update else 0), + [self.GetDescription()]) + +def GetName(self): + return self.GetDescription() + +%} + } // End: to be removed in GDAL 4.0 diff --git a/swig/include/python/ogr_python.i b/swig/include/python/ogr_python.i index c3825197da94..04aec1e6bb13 100644 --- a/swig/include/python/ogr_python.i +++ b/swig/include/python/ogr_python.i @@ -29,9 +29,6 @@ %include "ogr_docs.i" %include "ogr_layer_docs.i" -#ifndef FROM_GDAL_I -%include "ogr_driver_docs.i" -#endif %include "ogr_feature_docs.i" %include "ogr_featuredef_docs.i" %include "ogr_fielddef_docs.i" diff --git a/swig/python/CMakeLists.txt b/swig/python/CMakeLists.txt index fecaab2e3803..8d5759cd9651 100644 --- a/swig/python/CMakeLists.txt +++ b/swig/python/CMakeLists.txt @@ -36,7 +36,6 @@ set(GDAL_PYTHON_CSOURCES ${PROJECT_SOURCE_DIR}/swig/include/python/docs/gdal_dataset_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/gdal_driver_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_docs.i - ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_driver_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_featuredef_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_feature_docs.i ${PROJECT_SOURCE_DIR}/swig/include/python/docs/ogr_fielddef_docs.i diff --git a/swig/python/gdal-utils/osgeo_utils/gdal_retile.py b/swig/python/gdal-utils/osgeo_utils/gdal_retile.py index 40a9f9d973da..a69946339c46 100644 --- a/swig/python/gdal-utils/osgeo_utils/gdal_retile.py +++ b/swig/python/gdal-utils/osgeo_utils/gdal_retile.py @@ -726,39 +726,40 @@ def createTile( def createTileIndex(Verbose, dsName, fieldName, srs, driverName): - OGRDriver = ogr.GetDriverByName(driverName) - if OGRDriver is None: - print("ESRI Shapefile driver not found", file=sys.stderr) - return 1 + with gdal.ExceptionMgr(useExceptions=False): + OGRDriver = ogr.GetDriverByName(driverName) + if OGRDriver is None: + print("ESRI Shapefile driver not found", file=sys.stderr) + return 1 - OGRDataSource = OGRDriver.Open(dsName) - if OGRDataSource is not None: - OGRDataSource.Destroy() - OGRDriver.DeleteDataSource(dsName) - if Verbose: - print("truncating index " + dsName) + OGRDataSource = OGRDriver.Open(dsName) + if OGRDataSource is not None: + OGRDataSource.Destroy() + OGRDriver.DeleteDataSource(dsName) + if Verbose: + print("truncating index " + dsName) - OGRDataSource = OGRDriver.CreateDataSource(dsName) - if OGRDataSource is None: - print("Could not open datasource " + dsName, file=sys.stderr) - return 1 + OGRDataSource = OGRDriver.CreateDataSource(dsName) + if OGRDataSource is None: + print("Could not open datasource " + dsName, file=sys.stderr) + return 1 - OGRLayer = OGRDataSource.CreateLayer("index", srs, ogr.wkbPolygon) - if OGRLayer is None: - print("Could not create Layer", file=sys.stderr) - return 1 + OGRLayer = OGRDataSource.CreateLayer("index", srs, ogr.wkbPolygon) + if OGRLayer is None: + print("Could not create Layer", file=sys.stderr) + return 1 - OGRFieldDefn = ogr.FieldDefn(fieldName, ogr.OFTString) - if OGRFieldDefn is None: - print("Could not create FieldDefn for " + fieldName, file=sys.stderr) - return 1 + OGRFieldDefn = ogr.FieldDefn(fieldName, ogr.OFTString) + if OGRFieldDefn is None: + print("Could not create FieldDefn for " + fieldName, file=sys.stderr) + return 1 - OGRFieldDefn.SetWidth(256) - if OGRLayer.CreateField(OGRFieldDefn) != 0: - print("Could not create Field for " + fieldName, file=sys.stderr) - return 1 + OGRFieldDefn.SetWidth(256) + if OGRLayer.CreateField(OGRFieldDefn) != 0: + print("Could not create Field for " + fieldName, file=sys.stderr) + return 1 - return OGRDataSource + return OGRDataSource def addFeature(TileIndexFieldName, OGRDataSource, location, xlist, ylist): From 78e9c9bcff2b71635a8049b6aabdcbe75953c078 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Wed, 3 Apr 2024 08:55:54 -0400 Subject: [PATCH 037/230] Doc: Provide Python docstrings for some ogr functions --- doc/source/api/python/osgeo.ogr.rst | 2 +- swig/include/python/docs/ogr_docs.i | 76 +++++++++++++++++++++++++++++ 2 files changed, 77 insertions(+), 1 deletion(-) diff --git a/doc/source/api/python/osgeo.ogr.rst b/doc/source/api/python/osgeo.ogr.rst index 1c9972558460..029118865a0f 100644 --- a/doc/source/api/python/osgeo.ogr.rst +++ b/doc/source/api/python/osgeo.ogr.rst @@ -5,4 +5,4 @@ osgeo.ogr module :members: :undoc-members: :show-inheritance: - :exclude-members: CreateCodedFieldDomain, CreateGeometryFromEsriJson, CreateGeometryFromGML, CreateGeometryFromJson, CreateGeometryFromWkb, CreateGeometryFromWkt, CreateGlobFieldDomain, CreateRangeFieldDomain, Feature, FeatureDefn, FieldDefn, FieldDomain, ForceTo, ForceToLineString, ForceToMultiLineString, ForceToMultiPoint, ForceToMultiPolygon, ForceToPolygon, GetFieldTypeName, GetFieldSubTypeName, GT_Flatten, GT_GetCollection, GT_GetCurve, GT_GetLinear, GT_HasM, GT_HasZ, GT_IsCurve, GT_IsNonLinear, GT_IsSubClassOf, GT_IsSurface, GT_SetM, GT_SetModifier, GT_SetZ, GeomFieldDefn, Geometry, GeometryTypeToName, Layer, StyleTable + :exclude-members: CreateCodedFieldDomain, CreateGeometryFromEsriJson, CreateGeometryFromGML, CreateGeometryFromJson, CreateGeometryFromWkb, CreateGeometryFromWkt, CreateGlobFieldDomain, CreateRangeFieldDomain, Feature, FeatureDefn, FieldDefn, FieldDomain, ForceTo, ForceToLineString, ForceToMultiLineString, ForceToMultiPoint, ForceToMultiPolygon, ForceToPolygon, GetFieldTypeName, GetFieldSubTypeName, GT_Flatten, GT_GetCollection, GT_GetCurve, GT_GetLinear, GT_HasM, GT_HasZ, GT_IsCurve, GT_IsNonLinear, GT_IsSubClassOf, GT_IsSurface, GT_SetM, GT_SetModifier, GT_SetZ, GeomFieldDefn, Geometry, GeometryTypeToName, Layer, StyleTable, thisown diff --git a/swig/include/python/docs/ogr_docs.i b/swig/include/python/docs/ogr_docs.i index 31a8890efe41..9d500e1fff4d 100644 --- a/swig/include/python/docs/ogr_docs.i +++ b/swig/include/python/docs/ogr_docs.i @@ -132,3 +132,79 @@ Examples >>> ogr.GetFieldTypeName(ogr.OFTReal) 'Real' "; + +%feature("docstring") GetDriverByName " + +Get a vector driver. Like :py:func:`gdal.GetDriverByName`, but +only returns drivers that handle vector data. + +Parameters +---------- +name : str + name of the driver to fetch + +Returns +------- +gdal.Driver + +Examples +-------- +>>> ogr.GetDriverByName('ESRI Shapefile').GetDescription() +'ESRI Shapefile' + +>>> ogr.GetDriverByName('GTiff') +>>> +" + +%feature("docstring") GetDriverCount " + +Returns the number of registered drivers that handle vector data. + +Returns +------- +int +" + +%feature("docstring") Open " + +Open a vector file as a :py:class:`gdal.Dataset`. +Equivalent to calling :py:func:`gdal.OpenEx` with the +:py:const:`gdal.OF_VECTOR` flag. + +Parameters +---------- +utf8_path : str + name of the file to open + +Returns +------- +gdal.Dataset, or ``None`` on failure + +Examples +-------- +>>> from osgeo import ogr +>>> ogr.GetDriverByName('ESRI Shapefile').GetDescription() +'ESRI Shapefile' +>>> ogr.GetDriverByName('GTiff') +>>> +"; + + +%feature("docstring") OpenShared " + +Open a vector file as a :py:class:`gdal.Dataset`. If the file has already been +opened in the current thread, return a reference to the already-opened +:py:class:`gdal.Dataset`. Equivalent to calling :py:func:`gdal.OpenEx` with the +:py:const:`gdal.OF_VECTOR` and :py:const:`gdal.OF_SHARED` flags. + +Parameters +---------- +utf8_path : str + name of the file to open + +Returns +------- +gdal.Dataset, or ``None`` on failure + +"; + From 8067deafe2f8038ee3270745b85b0fa132b7458a Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Wed, 3 Apr 2024 08:56:21 -0400 Subject: [PATCH 038/230] Python API: Ensure that ogr functions return known type if gdal not imported --- swig/include/python/ogr_python.i | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/swig/include/python/ogr_python.i b/swig/include/python/ogr_python.i index 04aec1e6bb13..f8a9af40580b 100644 --- a/swig/include/python/ogr_python.i +++ b/swig/include/python/ogr_python.i @@ -27,7 +27,9 @@ %} */ +#ifndef FROM_GDAL_I %include "ogr_docs.i" +#endif %include "ogr_layer_docs.i" %include "ogr_feature_docs.i" %include "ogr_featuredef_docs.i" @@ -68,8 +70,23 @@ def _WarnIfUserHasNotSpecifiedIfUsingExceptions(): "In GDAL 4.0, exceptions will be enabled by default.", FutureWarning) %} +// Need to ensure that gdal module has been loaded +// when calling an ogr function that returns a gdal type. %pythonprepend Open %{ _WarnIfUserHasNotSpecifiedIfUsingExceptions() + from . import gdal +%} + +%pythonprepend OpenShared %{ + from . import gdal +%} + +%pythonprepend GetDriverByName %{ + from . import gdal +%} + +%pythonprepend GetDriver %{ + from . import gdal %} // End: to be removed in GDAL 4.0 From 9f69b4c95d8670359ddf835a29ff4a5444076944 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 01:31:38 +0200 Subject: [PATCH 039/230] HDF5 multidim: implement GDALMDArray::GetBlockSize() and GetStructuralInfo() --- autotest/gdrivers/data/hdf5/deflate.h5 | Bin 0 -> 26420 bytes autotest/gdrivers/hdf5multidim.py | 13 ++++ frmts/hdf5/hdf5multidim.cpp | 84 +++++++++++++++++++++++++ 3 files changed, 97 insertions(+) create mode 100644 autotest/gdrivers/data/hdf5/deflate.h5 diff --git a/autotest/gdrivers/data/hdf5/deflate.h5 b/autotest/gdrivers/data/hdf5/deflate.h5 new file mode 100644 index 0000000000000000000000000000000000000000..f32e29f9c80f23c02829e979d361c415d27d026f GIT binary patch literal 26420 zcmeI44RBl4mB;Usli(x@1PCF}KozyamtEIV9NS6?#IfYqu_JpVg$;2)Q7kzTwq=j3 z1TO<yb_bXiC_`y!6YOTXS(?q(thd9qe54&pSr)c+$4fsZFwjZUmPym56KJO&*(F={ zocrE+S33A~ApK(6_rSjI-S@lqo_qf1-Y4tcmyfnb+ZHXjc!9#Bx?0Uqi}fk}deb=? zcGx*|P4~CaiI8@;M-x8(QD0EsubSgvR1HrT_>_gn1<Sz_rK)_26LjsbL(WouRSo~O z4i|Rk3Z?vTVHC5~`S3FjespD!wzhpBRM)sz_pSdd$bchmC(6z(z1O}hI=S<f;fXcJ z=UmTSmnmdJ=THZ1hdK9@T6p-8b-JsIuCpw=q-D3g#9=6{T7pKMyW+jNgGvR|h1Kf9 z4+Ax~8t?dgK3&kZ>XP|(@gq(x<d-?JU+NLHp!eD@T`kEZT%$j*^SWwCqa7P#@nlDL zJe_Q9>53`;HoRx4xw=%mWn&B=U1}|)R4TPIKN^7lGl4>`)EaFIWb;G0!YFt>uzgo4 zP#7+mTe~yGd}jM#t}fsl;d<q7fc`;kXKU5c@m#4d+usn)59PDa%HnF}`UEcBR`Fb^ ze_Q+fGk**ZUkk0`@z?BOTi?~=`=7Nve67#A0)1lHp{OW!rL@YAVww1=R&(cLj<3pR zkAgXK@lrRluvwov!^#KohH*E#b60==V9rLcWhXzI>mSUNa(XPIMfRxpqBDZ;!7My< z&F-WzPaYgSHoXJ_aG9b2Y#kmhX7dGT$DTai5{S-Frm28ZyH%z9EogzI7TUG8<p&3^ z%M9+yY11m)qFZ-rqtcWr|7v@5EYM?caJaA|UC0bMwdk>Up4(NW)<;{q0yrlH;=6{n z=Zb-lKME5jnBmuE?9L44v*}`{u*0qa)v*o7GcZ`_lu~ah{~8>LmRh7O*-?awhB6}~ z`GS53;9P<~WOxZ08Qq;Lj^@%sxnf_YG+dmiy?K!xO3;oTU<6m`Nsfv;VFFX0zF!t5 z)!0{6Wzlp`<!J)+tEe*tA9m8yG$r4SCEq<qQ}<oj?$-9sd3y9%^Z-%TSf|sed6%KJ z)|@KG=<{ds=oWmM!Ey3*#)q|4Ux-&#zN+Q5cw(!8`eL0po8yQ(2ZC=|*<9b%kxVId z5fB&ySOaP!MFQ4TItPBiK{eAgn!4ngi38dcCY2szaNqxg9gpXz5?GOlwZ#&#cxx<e zTd@?XL@TnHQf4$)g0tU!khSh3$HATHTP!RMW(p`u-6y+KQ})&O?ElN_9rgB-k3Vo* zTQV6_!G(5zt$$Q1tpm_XSZYd-C2NbN4(PaR`}~id6Fscj674-Yl6+cG2c(hP<V@Do zlFRrV&`If1ZpXd@1IUaaopl9-`mq}OuBGOmwQOo#syzx8L5Q6T+1V>fPA_q4w2xG{ z_C-z2M{1ExVFHJtj$bwTrM4v0r8YW)lg22eb_q18M~x|bYT7P-!};L$K?k68Z`Z{S zuS<2cfpsfzP&L}0Q@aiZ7OS(c(#=X?(awXL1L{evYbTCnr5^hq_yF}JoIpc*I7YYb z*`Vel0U22Gg=OEmPCbs5LIw_tquW|~)%8ehvwimBzOQXle}e>Mpe(N}YW}QRitHXA zJie5=;Ri>*u9m`yYQ<kl{l}pf538l<@cVrVqvFWrx4fjT$6MR%zCN>g)&DJzpL!o= zdB@xFfYyJ0g_|opS|M|V#1(2*ghxd*RcKrh5*0C4p=(7L{L|$#;kVNhk2w<$tZ(h^ z(z66U|KQN)km5MR<6m)v!r>%u$*;0+yZQ7v!pT+#orgAEd#UFE&jX$ZJP&vt@I2so z!1I9T0nY=T2RsjW9`HQyr}selY5@K1Z~ZO()$iYc-w5C$DK6lq<E=f#;gMXil-H|M zxN;JNA1a6IF0lT3b+{ImWMMV+>V~?ex|Ov%v;C{C4%LO~>l$u*W8wL2?a_7af{Fg# z;O~EUY7Z@*kB($Y`OILtnCsWhz&1wU%Li(sC(*s3HQ5`Cw?xBDfe!{Yr8Wj`87|}k zp-?<nyFS*v-n|>FjkcsVZ3K3>SlXFxfd!#_U#5_bW=gw;(xFJWDOj8AX^$nkJEFb8 z*1=410IF$jYz)>mu54-!*EiNR)P^Gsb&;m##z=EGw6ZzUT-&lK)!v=xNL}9>jP)ef z2Wx{(_4Uoc8*Z58PGh*S3GehIV2K+HSzpZM3ODEbb_Q$fXTh5z^&!MJ#XC~H!E9~^ z;6T>ZhngBztqe!Pt3r`T!^)Mj6pe(!;n{6yaBb+`5Nl1rGJ9_@Wmd^HI;&(su(Blq zYwf96qBjWZocYqOY%V?ApB^secjOCTM+MObI~<Cj6FN1U&%pf;ghEXf$Y@_?FqiJn z^uZz=b|VsrG*$rpnL${m%VkDOu$mXFT~&|AEJOkI(+J$Poimgx6~Q;5*<4X?d$DD6 z2gFH?YLB%fQ=pN~)B&L@jCUte?XyV-UJIcZItaaAe?SjZj(_|%|E1c0&f+3F{?}I= zP2W6F(u=y@xcA1rH}3y{L+@QtRpt0clNLVuh1)H4o*w_@8$9@y6WXbPWoFryV5v)C zchEA0yH(t$7k^Gc(Q^aWLQ{CGRSgSO!<)_;2JJndj~bNP=nYTf#~`e~;;{uc&!oQF z@3^!D_e7Zb=plDz^`VV!|H~fUBgNsHa(%cL(Rq%ap}ltfE401k&-HYsl%B=Kh`(dM zj7{u+-;<|byVioWU3?VGw(IC@sF!qUO(X-bCo5mbWB)9*{H^Mb6k$`K`x)hotkQ*a ztBdX4=Z5mI1L>yWVmdRHAAQHYd}#09y^p6hgZk&|`tq<@6t^qk-m+4z;OuL9=ia+# z)2^=`94@XUd*8KbTYE>@=YKTw*yXmd$DDQhW6t(h+O;w9pTGKXDiMoWuwjYvDAk)o zen(&LZMSa{huatMz(6WLQrFm5#5tQZoQcAa&n7B4+|z*vt&iGQN}b+HXLd^46`ie9 zZV*>e-j(M8&jX$ZJP&vt@I2so!1KWG>w&R}R}NjEd@HPhv5E1Oun~R63gOvV$~tUC z;OyToP}XZ!Vr*hyFT4)4&$?)Aq6Yq$um;tsv5D!KdCF>4FIJq@L0+f&{@6q(-d(Bw zdu*cjjZ2g-X?<>N;*AfVt$Z`q4Al2^xVuZeJ~naUU*POPs~TGSHncH?ZS)?3cdveK zbz&a8DDjxJ0!kovr=_5^DL6~2AD5pQOHW+CP+8AgtH&lL(b8M7L?>Fc1}&ZZHMFr# zJqP`WLtdxOI|<w!+Jf)E-KVg%-j|`>m#i?@*8$vZXz43oUaEXoqOVT;1WFvxmTrMJ zYQBnR10RL5S>1EDwR<)|*^AUu<=*H-tGd+j^1OjO<lSe@hjv$k3P;iU@#G?9eaTuj zyLXLMrXR-RMx&bhbbrC)lW5g|dJU{c-`|M}o<N&R+LL%!>o$!g!swySDadQZ9@XI8 zUgT;puMcaP+y~sf)?&!Rvs=-Brf)_|qx584*MhNu^=Um!iB41@g?={mnG!~Q6A`^@ zz*nzZ>&h*ioYD4yrKark5r~Kftt-L1<KWZhbrhoox<}J!)f$Xb<Cor7K>g!7hNj@| zsQL}m=hkc@*7(%;dHP0Z?M`fC8cVS8Vft&z8ud*t%Q&~)@dqH*K8Cs(70ROB<tC?Q zvDuAS<7cCS>6=lj2J1^|^|3y!xv_5=^Yj=r)|d6@O1OI)S~}^D!5Tc%^M<L#+%>jz zV$C}02HaYVE#nDL@N*b-;|cJ_XY{Cb^Ncdnbx@0rXH$#GGi`JpSpb%zTAedcLeE!g z4#7B&Sfk~$&j}o%`>b=zW8C!D+#SH#VhBgK>E!^<NRQ)OVoI2~#6<CIJ(@<ZZPlZ~ z_22j>bkAX4GxZr2)_i)A^3BKC9oP#|d_d2gsQ#zWdc)PAhuD}l-tFC@M+(lnI9uqM z+Sp>|CNs`W)Xk=J4XUbR$ec~P8jU{ytv!e{b<Ihzw4~46{+jxXpH0urs6B!4@-ZxH zd~a+vabTjU?96R#8ZFh4ZS?4MXL-|_>2D{N)ie4ep6Mr!@pI=sPytqO%AZ!o7Q>mo znLJ~?IWu|~AJDV=oqitB-y5ljqVIwD_czk_LH9M=Ez$Qu#5vpKT{HFwoPH0@rIgyw zIQkx_4AG~L2ps=@=`n$GUq^B(ep=vo``-{adJll?{I0-}pNKmmaP-a-amNLYe?RvN zf#Z6d7C5fQyohZ9+3ma-MrOI`Vu9m&Tr6;0kM|24*P~A0xE{>{NByFXtP?n{N0-2H zJ+2ctuE#ck<9gi0INnd{M@itg9)BlrT#t_k9M|Ikf#Y^g2prerfWUD*z94X1k3#~- z^>|j`xE?c%b6z1N8Qk<Gf#Z7oP~f;8KNC2v$FBvB>!IL9c09=c)DOD0@4|h7<9aL+ zIIhQ~0>|~ZLg2U_Va9R$sUIH_IIc&Vz;Qi#1di*mMc}v|S%KsIDF__bV@%+<9{(V4 zT#tJNj_dI$#?g8a=|Jgc1&-_Sd4c14d{N-I9#07z*Mscz(n=4|=SEs*<m-c#w7E*b zOVoJqc;<TFC~#cwodU=8E(#pi`&NPD_TD9MeEi)naD4ndByfEEJuGm1{7o^AHU-$; zH@}|{IIhPt0>}0EroeGM4htOD<EX%KJ$@>1T#sK09M|Kw0>|~JZnAwz_K=^s9_KNR z`<E11E^wT`OyIa4^#aHBSS@f|kEp;=|EV8wf#Z5?7C5fQc7fx13<@0AV>jdIJ#Dg| z(%S`&>v6ZhaXmgMa9odl0>||@C~#blM+J`S@fCsNdORm^T#pw7j_dI<<G9~Qk5>hb z>v2-xc)h0tj_cuLe>YP-To1p%aXpp_9M>Zta9oe81di*`AaGodYZyo0?~o3ZwhJ8B zBO!3)N2+hDz;QkL1&-@6B5+)fTLg~lafiThJ*YokTIm7$+(@4nX&)HrL4PN39sf)C zyiVn)m2V3i*YSG-$8|g=aC}_7E^vHY{V(IVUDS^`@Xxy8LH#3+kE^o<j*qKl0>{Ty zP~f;8wE{=>+SbFrnIv%3AL7;u9M@xmz;QiN0>|~Zk#WvHD@A6x=}v*;dK3kY>v5~V zaXs!5IIhS20>|}uNZ`004+|XEV@lw-9#04y*W($+@%BiNZwegO<FLSSJ&p<-*W;%G z$MyK7z;Qi(D{x$o>JQmHr}E?%O35$h2^`mBxxjHfE@PbZ&lS}R9M@yDz;QjI0>|}; z3mn&Dv%rx*$$#4gj_WZfa9oew0>|~ZUEsJLcQcOHL;d)qz;QkH2^`nspullG9u+vQ z$5#Z7>+zhxaXnrTIIhRb0>}OMs=#qQPBM=6yOR!-o)S2&2k~B7=>hw5<0XpLr|Ek~ ziWl-b_owe_yB^N*`1T7NkME@d$KyL7aAXJd?ka)f@!cSBJif0HI3C~a0>|S!!8m@O z`mt5uxE}oi$MqNyIIhPn0>|~ZL*Td`_X!-=V~@abJ@yM6*W(d^<Nkh(arn;PY{%0A z$MyJzz;QjkD{x$oBLc_uI4*Epk6#EJ*W<LnaXsd-_n63kT#v;9$Mv|_;Yb7TJtp#m QmwF!XJm7iY&+CEz2OMWc{Qv*} literal 0 HcmV?d00001 diff --git a/autotest/gdrivers/hdf5multidim.py b/autotest/gdrivers/hdf5multidim.py index 64dac532c916..6f1f003c326f 100755 --- a/autotest/gdrivers/hdf5multidim.py +++ b/autotest/gdrivers/hdf5multidim.py @@ -827,3 +827,16 @@ def test_hdf5_multidim_eos_swath_no_explicit_dimension_map(): coordinates[1].GetFullName() == "/HDFEOS/SWATHS/MySwath/Geolocation Fields/Latitude" ) + + +############################################################################### +# Test GetBlockSize() and GetStructuralInfo() + + +def test_hdf5_multidim_block_size_structural_info(): + + ds = gdal.OpenEx("data/hdf5/deflate.h5", gdal.OF_MULTIDIM_RASTER) + rg = ds.GetRootGroup() + var = rg.OpenMDArray("Band1") + assert var.GetBlockSize() == [1, 2] + assert var.GetStructuralInfo() == {"COMPRESSION": "DEFLATE", "FILTER": "SHUFFLE"} diff --git a/frmts/hdf5/hdf5multidim.cpp b/frmts/hdf5/hdf5multidim.cpp index 07d6be6c4e25..1bca9766cec8 100644 --- a/frmts/hdf5/hdf5multidim.cpp +++ b/frmts/hdf5/hdf5multidim.cpp @@ -293,6 +293,7 @@ class HDF5Array final : public GDALMDArray mutable bool m_bHasDimensionLabels = false; std::shared_ptr<OGRSpatialReference> m_poSRS{}; haddr_t m_nOffset; + mutable CPLStringList m_aosStructuralInfo{}; HDF5Array(const std::string &osParentName, const std::string &osName, const std::shared_ptr<HDF5SharedResources> &poShared, @@ -365,6 +366,10 @@ class HDF5Array final : public GDALMDArray std::vector<std::shared_ptr<GDALAttribute>> GetAttributes(CSLConstList papszOptions = nullptr) const override; + std::vector<GUInt64> GetBlockSize() const override; + + CSLConstList GetStructuralInfo() const override; + const void *GetRawNoDataValue() const override { return m_abyNoData.empty() ? nullptr : m_abyNoData.data(); @@ -1808,6 +1813,85 @@ HDF5Array::GetAttributes(CSLConstList papszOptions) const return m_oListAttributes; } +/************************************************************************/ +/* GetBlockSize() */ +/************************************************************************/ + +std::vector<GUInt64> HDF5Array::GetBlockSize() const +{ + HDF5_GLOBAL_LOCK(); + + const auto nDimCount = GetDimensionCount(); + std::vector<GUInt64> res(nDimCount); + if (res.empty()) + return res; + + const hid_t nListId = H5Dget_create_plist(m_hArray); + if (nListId > 0) + { + if (H5Pget_layout(nListId) == H5D_CHUNKED) + { + std::vector<hsize_t> anChunkDims(nDimCount); + const int nDimSize = H5Pget_chunk( + nListId, static_cast<int>(nDimCount), &anChunkDims[0]); + if (static_cast<size_t>(nDimSize) == nDimCount) + { + for (size_t i = 0; i < nDimCount; ++i) + { + res[i] = anChunkDims[i]; + } + } + } + + H5Pclose(nListId); + } + + return res; +} + +/************************************************************************/ +/* GetStructuralInfo() */ +/************************************************************************/ + +CSLConstList HDF5Array::GetStructuralInfo() const +{ + if (m_aosStructuralInfo.empty()) + { + HDF5_GLOBAL_LOCK(); + const hid_t nListId = H5Dget_create_plist(m_hArray); + if (nListId > 0) + { + const int nFilters = H5Pget_nfilters(nListId); + for (int i = 0; i < nFilters; ++i) + { + unsigned int flags = 0; + size_t cd_nelmts = 0; + char szName[64 + 1] = {0}; + const auto eFilter = H5Pget_filter( + nListId, i, &flags, &cd_nelmts, nullptr, 64, szName); + if (eFilter == H5Z_FILTER_DEFLATE) + { + m_aosStructuralInfo.SetNameValue("COMPRESSION", "DEFLATE"); + } + else if (eFilter == H5Z_FILTER_SZIP) + { + m_aosStructuralInfo.SetNameValue("COMPRESSION", "SZIP"); + } + else if (eFilter == H5Z_FILTER_SHUFFLE) + { + m_aosStructuralInfo.SetNameValue("FILTER", "SHUFFLE"); + } + else + { + CPLDebug("HDF5", "Filter used: %s", szName); + } + } + H5Pclose(nListId); + } + } + return m_aosStructuralInfo.List(); +} + /************************************************************************/ /* CopyBuffer() */ /************************************************************************/ From 7539e93381f1eed3375421d28915cfab86255f52 Mon Sep 17 00:00:00 2001 From: Dan Baston <dbaston@gmail.com> Date: Thu, 18 Apr 2024 05:28:21 -0400 Subject: [PATCH 040/230] autotest: run Python tests directly from source tree on Windows (#9224) * CMake: Run Python tests directly from the source tree (Windows) * Update doc/source/development/testing.rst --- autotest/CMakeLists.txt | 58 +++++++++++++++--------------- doc/source/development/testing.rst | 18 +++++++--- 2 files changed, 42 insertions(+), 34 deletions(-) diff --git a/autotest/CMakeLists.txt b/autotest/CMakeLists.txt index d28a06d7ce59..66f09f446ddd 100644 --- a/autotest/CMakeLists.txt +++ b/autotest/CMakeLists.txt @@ -49,6 +49,9 @@ if (Python_Interpreter_FOUND) configure_file(${GDAL_CMAKE_TEMPLATE_PATH}/pytest.ini.in ${CMAKE_CURRENT_BINARY_DIR}/pytest.ini @ONLY) unset(PYTEST_INI_HEADER_MESSAGE) + # Symlink test files into the build directory so that we can run tests directly from that location. + # This doesn't work on Windows, + function (copy_file_or_dir source dest) if (IS_DIRECTORY ${source}) message(STATUS "Copying contents of ${source} to ${destination}") @@ -72,44 +75,39 @@ endfunction () symlink_or_copy(${CMAKE_CURRENT_SOURCE_DIR}/conftest.py ${CMAKE_CURRENT_BINARY_DIR}/conftest.py) symlink_or_copy(${CMAKE_CURRENT_SOURCE_DIR}/run_slow_tests.sh ${CMAKE_CURRENT_BINARY_DIR}/run_slow_tests.sh) - if (NOT "${CMAKE_BINARY_DIR}" STREQUAL "${CMAKE_SOURCE_DIR}") - foreach (subdir IN ITEMS pymod proj_grids cpp/data) - if (SKIP_COPYING_AUTOTEST_SUBDIRS) - message(STATUS "Skipping copying ${CMAKE_CURRENT_SOURCE_DIR}/${subdir}") - else () + SET(pytest_dirs alg benchmark gcore gdrivers gnm ogr osr pyscripts slow_tests utilities) + + # Link test data/scripts into the build directory for convenience (non-Windows) + if (WIN32) + SET(python_test_path ${CMAKE_CURRENT_LIST_DIR}) + else() + SET(python_test_path ${CMAKE_CURRENT_BINARY_DIR}) + if (NOT "${CMAKE_BINARY_DIR}" STREQUAL "${CMAKE_SOURCE_DIR}") + foreach (subdir IN ITEMS + pymod + proj_grids + cpp/data + ${pytest_dirs}) symlink_or_copy(${CMAKE_CURRENT_SOURCE_DIR}/${subdir} ${CMAKE_CURRENT_BINARY_DIR}/${subdir}) - endif () - endforeach () + endforeach () + unset(subdir) + endif() endif() - foreach ( - tgt IN - ITEMS ogr - gcore - gdrivers - alg - osr - gnm - pyscripts - utilities - benchmark - slow_tests) - if (NOT "${CMAKE_BINARY_DIR}" STREQUAL "${CMAKE_SOURCE_DIR}") - if (SKIP_COPYING_AUTOTEST_SUBDIRS) - message(STATUS "Skipping copying ${CMAKE_CURRENT_SOURCE_DIR}/${tgt}") - else () - symlink_or_copy(${CMAKE_CURRENT_SOURCE_DIR}/${tgt} ${CMAKE_CURRENT_BINARY_DIR}/${tgt}) - endif () - endif() + foreach (tgt IN ITEMS ${pytest_dirs}) add_custom_target( autotest_${tgt} - COMMAND ${CMAKE_COMMAND} -E env ${PYTHON_RUN_ENV} ${Python_EXECUTABLE} -m pytest -c - ${CMAKE_CURRENT_BINARY_DIR}/pytest.ini ${tgt} + COMMAND ${CMAKE_COMMAND} -E env ${PYTHON_RUN_ENV} ${Python_EXECUTABLE} -m pytest + -c ${CMAKE_CURRENT_BINARY_DIR}/pytest.ini + ${python_test_path}/${tgt} DEPENDS ${GDAL_LIB_TARGET_NAME} gdalapps python_binding) - add_test(NAME autotest_${tgt} COMMAND ${Python_EXECUTABLE} -m pytest -c ${CMAKE_CURRENT_BINARY_DIR}/pytest.ini - ${tgt}) + add_test(NAME autotest_${tgt} + COMMAND ${Python_EXECUTABLE} -m pytest + -c ${CMAKE_CURRENT_BINARY_DIR}/pytest.ini + ${python_test_path}/${tgt}) set_property(TEST autotest_${tgt} PROPERTY ENVIRONMENT "${PYTHON_RUN_ENV}") endforeach () + add_custom_target( autotest COMMAND ${CMAKE_COMMAND} -E env ${PYTHON_RUN_ENV} ${Python_EXECUTABLE} -m pytest -c diff --git a/doc/source/development/testing.rst b/doc/source/development/testing.rst index 096e430a76d5..7377dbc86ddc 100644 --- a/doc/source/development/testing.rst +++ b/doc/source/development/testing.rst @@ -48,17 +48,19 @@ and the Python bindings: python3 -c 'from osgeo import gdal; print(gdal.__version__)' # 3.7.0dev-5327c149f5-dirty -List tests containing "tiff" in the name: +Tests can then be run by calling ``pytest``, for example on an individual file. +On Linux and MacOS builds, the tests are symlinked into the build directory, so this +can be done by running the following from the build directory: .. code-block:: bash - pytest --collect-only autotest -k tiff + pytest autotest/gcore/vrt_read.py -Running an individual test file +On Windows, the test files remain in the source tree, but the pytest configuration file ``pytest.ini`` is only available in the build directory. To accommodate this, the above command would be modified as follows: .. code-block:: bash - pytest autotest/gcore/vrt_read.py + pytest -c pytest.ini ../autotest/gcore/vrt_read.py A subset of tests within an individual test file can be run by providing a regular expression to the ``-k`` argument to ``pytest``. @@ -66,6 +68,14 @@ A subset of tests within an individual test file can be run by providing a regul pytest autotest/gcore/vrt_read.py -k test_vrt_read_non_existing_source +``pytest`` can also report information on the tests without running them. For +example, to list tests containing "tiff" in the name: + +.. code-block:: bash + + pytest --collect-only autotest -k tiff + + .. warning:: Not all Python tests can be run independently; some tests depend on state set by a previous tests in the same file. From 53ebd1e8f146300920bfd48fc8bbe2aaed8ed855 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 12:26:48 +0200 Subject: [PATCH 041/230] CI: add a stale target for pull requests without recent activity --- .github/workflows/stale.yml | 55 +++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 .github/workflows/stale.yml diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 000000000000..6fd9616bba74 --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,55 @@ +name: 👓 Handle stale issues +on: + schedule: + - cron: "30 2 * * *" + +permissions: + contents: read + +jobs: + stale: + permissions: + # issues: write # for actions/stale to close stale issues + pull-requests: write # for actions/stale to close stale PRs + if: github.repository_owner == 'OSGeo' + runs-on: ubuntu-latest + steps: + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-pr-message: > + The GDAL project highly values your contribution and would love to see + this work merged! + Unfortunately this PR has not had any activity in the last 28 days and + is being automatically marked as "stale". + If you think this pull request should be merged, please check + + - that all unit tests are passing + + - that all comments by reviewers have been addressed + + - that there is enough information for reviewers, in particular link + to any issues which this pull request fixes + + - that you have written unit tests where possible + + In case you should have any uncertainty, please leave a comment and we will + be happy to help you proceed with this pull request. + + If there is no further activity on this pull request, it will be closed in 2 + weeks. + + + close-pr-message: > + While we hate to see this happen, this PR has been automatically closed because + it has not had any activity in the last 6 weeks. If this pull request should be + reconsidered, please follow the guidelines in the previous comment and reopen + this pull request. Or, if you have any further questions, just ask! We love to + help, and if there's anything the GDAL project can do to help push this PR forward + please let us know how we can assist. + + + stale-pr-label: 'stale' + days-before-pr-stale: 28 + days-before-pr-close: 14 + operations-per-run: 1000 From 4edfc69ad351d2c9cd86226cffd1818720cc59da Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 14:15:28 +0200 Subject: [PATCH 042/230] gdalinfo: fix -approx_stats (master only) --- apps/gdalinfo_lib.cpp | 3 +++ autotest/utilities/test_gdalinfo_lib.py | 1 + 2 files changed, 4 insertions(+) diff --git a/apps/gdalinfo_lib.cpp b/apps/gdalinfo_lib.cpp index 6ba776d08ba2..6e41027fab46 100644 --- a/apps/gdalinfo_lib.cpp +++ b/apps/gdalinfo_lib.cpp @@ -2290,6 +2290,9 @@ GDALInfoOptionsNew(char **papszArgv, GDALInfoAppOptionsGetParser(psOptions.get(), psOptionsForBinary); argParser->parse_args_without_binary_name(aosArgv.List()); + + if (psOptions->bApproxStats) + psOptions->bStats = true; } catch (const std::exception &error) { diff --git a/autotest/utilities/test_gdalinfo_lib.py b/autotest/utilities/test_gdalinfo_lib.py index fe8dc55f5a92..6f556954b215 100755 --- a/autotest/utilities/test_gdalinfo_lib.py +++ b/autotest/utilities/test_gdalinfo_lib.py @@ -136,6 +136,7 @@ def test_gdalinfo_lib_5(): assert "computedMin" in band assert "histogram" in band assert "checksum" in band + assert "stdDev" in band assert ret["coordinateSystem"]["dataAxisToSRSAxisMapping"] == [1, 2] gdaltest.validate_json(ret, "gdalinfo_output.schema.json") From 8ebfe91eec7417bc551c1b0cb842ff47e5fd0023 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Tue, 16 Apr 2024 10:20:14 -0400 Subject: [PATCH 043/230] GDALDriver: Support OGR API Spy --- gcore/gdaldataset.cpp | 5 +++++ gcore/gdaldriver.cpp | 20 +++++++++++++++++++- ogr/ograpispy.cpp | 10 +++++----- ogr/ograpispy.h | 6 +++--- 4 files changed, 32 insertions(+), 9 deletions(-) diff --git a/gcore/gdaldataset.cpp b/gcore/gdaldataset.cpp index 49118992ed81..6e3518c70fe0 100644 --- a/gcore/gdaldataset.cpp +++ b/gcore/gdaldataset.cpp @@ -4873,6 +4873,11 @@ OGRErr GDALDatasetDeleteLayer(GDALDatasetH hDS, int iLayer) { VALIDATE_POINTER1(hDS, "GDALDatasetH", OGRERR_INVALID_HANDLE); +#ifdef OGRAPISPY_ENABLED + if (bOGRAPISpyEnabled) + OGRAPISpy_DS_DeleteLayer(hDS, iLayer); +#endif + return GDALDataset::FromHandle(hDS)->DeleteLayer(iLayer); } diff --git a/gcore/gdaldriver.cpp b/gcore/gdaldriver.cpp index 46df8d269e9e..8dbc851460f7 100644 --- a/gcore/gdaldriver.cpp +++ b/gcore/gdaldriver.cpp @@ -44,6 +44,7 @@ #include "cpl_progress.h" #include "cpl_string.h" #include "cpl_vsi.h" +#include "ograpispy.h" #include "ogr_core.h" #include "ogrsf_frmts.h" @@ -323,8 +324,18 @@ GDALDatasetH CPL_DLL CPL_STDCALL GDALCreate(GDALDriverH hDriver, { VALIDATE_POINTER1(hDriver, "GDALCreate", nullptr); - return GDALDriver::FromHandle(hDriver)->Create( + GDALDatasetH hDS = GDALDriver::FromHandle(hDriver)->Create( pszFilename, nXSize, nYSize, nBands, eBandType, papszOptions); + +#ifdef OGRAPISPY_ENABLED + if (nBands < 1) + { + OGRAPISpyCreateDataSource(hDriver, pszFilename, + const_cast<char **>(papszOptions), hDS); + } +#endif + + return hDS; } /************************************************************************/ @@ -1701,6 +1712,13 @@ CPLErr CPL_STDCALL GDALDeleteDataset(GDALDriverH hDriver, return CE_Failure; } +#ifdef OGRAPISPY_ENABLED + if (GDALGetMetadataItem(hDriver, GDAL_DCAP_VECTOR, nullptr)) + { + OGRAPISpyDeleteDataSource(hDriver, pszFilename); + } +#endif + return GDALDriver::FromHandle(hDriver)->Delete(pszFilename); } diff --git a/ogr/ograpispy.cpp b/ogr/ograpispy.cpp index 0bea96b490d6..dc8436c8b10d 100644 --- a/ogr/ograpispy.cpp +++ b/ogr/ograpispy.cpp @@ -612,8 +612,8 @@ void OGRAPISpyPostClose() } } -void OGRAPISpyCreateDataSource(OGRSFDriverH hDriver, const char *pszName, - char **papszOptions, OGRDataSourceH hDS) +void OGRAPISpyCreateDataSource(GDALDriverH hDriver, const char *pszName, + char **papszOptions, GDALDatasetH hDS) { if (!OGRAPISpyEnabled()) return; @@ -623,7 +623,7 @@ void OGRAPISpyCreateDataSource(OGRSFDriverH hDriver, const char *pszName, fprintf(fpSpyFile, "%s = ", OGRAPISpyGetDSVar(hDS).c_str()); fprintf(fpSpyFile, "ogr.GetDriverByName('%s').CreateDataSource(%s, options=%s)\n", - GDALGetDriverShortName(reinterpret_cast<GDALDriverH>(hDriver)), + GDALGetDriverShortName(hDriver), OGRAPISpyGetString(pszName).c_str(), OGRAPISpyGetOptions(papszOptions).c_str()); if (hDS != nullptr) @@ -633,14 +633,14 @@ void OGRAPISpyCreateDataSource(OGRSFDriverH hDriver, const char *pszName, OGRAPISpyFileClose(); } -void OGRAPISpyDeleteDataSource(OGRSFDriverH hDriver, const char *pszName) +void OGRAPISpyDeleteDataSource(GDALDriverH hDriver, const char *pszName) { if (!OGRAPISpyEnabled()) return; CPLMutexHolderD(&hOGRAPISpyMutex); OGRAPISpyFlushDefered(); fprintf(fpSpyFile, "ogr.GetDriverByName('%s').DeleteDataSource(%s)\n", - GDALGetDriverShortName(reinterpret_cast<GDALDriverH>(hDriver)), + GDALGetDriverShortName(hDriver), OGRAPISpyGetString(pszName).c_str()); aoSetCreatedDS.erase(pszName); OGRAPISpyFileClose(); diff --git a/ogr/ograpispy.h b/ogr/ograpispy.h index 8afab9430f7b..c6cbe2bbe0d8 100644 --- a/ogr/ograpispy.h +++ b/ogr/ograpispy.h @@ -78,9 +78,9 @@ void OGRAPISpyOpen(const char *pszName, int bUpdate, int iSnapshot, GDALDatasetH *phDS); void OGRAPISpyPreClose(GDALDatasetH hDS); void OGRAPISpyPostClose(); -void OGRAPISpyCreateDataSource(OGRSFDriverH hDriver, const char *pszName, - char **papszOptions, OGRDataSourceH hDS); -void OGRAPISpyDeleteDataSource(OGRSFDriverH hDriver, const char *pszName); +void OGRAPISpyCreateDataSource(GDALDriverH hDriver, const char *pszName, + char **papszOptions, GDALDatasetH hDS); +void OGRAPISpyDeleteDataSource(GDALDriverH hDriver, const char *pszName); void OGRAPISpy_DS_GetLayerCount(GDALDatasetH hDS); void OGRAPISpy_DS_GetLayer(GDALDatasetH hDS, int iLayer, OGRLayerH hLayer); From fe315f93acd39c039f2dc0504ba04028e94e23ae Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Tue, 16 Apr 2024 10:20:53 -0400 Subject: [PATCH 044/230] autotest ograpispy.py: use tmp_path --- autotest/ogr/ograpispy.py | 110 ++++++++++++++------------------------ 1 file changed, 40 insertions(+), 70 deletions(-) diff --git a/autotest/ogr/ograpispy.py b/autotest/ogr/ograpispy.py index 0862ff8921a1..25b4b2f38fa6 100755 --- a/autotest/ogr/ograpispy.py +++ b/autotest/ogr/ograpispy.py @@ -30,7 +30,6 @@ ############################################################################### import os -import shutil from difflib import unified_diff import gdaltest @@ -52,19 +51,21 @@ def module_disable_exceptions(): # Basic test without snapshoting -def test_ograpispy_1(): +def test_ograpispy_1(tmp_path): - os.environ["OGR_API_SPY_FILE"] = "tmp/ograpispy_1.py" + fname = str(tmp_path / "ograpispy_1.py") + + os.environ["OGR_API_SPY_FILE"] = fname test_py_scripts.run_py_script("data", "testograpispy", "") del os.environ["OGR_API_SPY_FILE"] - if not os.path.exists("tmp/ograpispy_1.py"): + if not os.path.exists(fname): pytest.skip("OGR API spy not enabled") ref_data = open("data/testograpispy.py", "rt").read() ref_data = ref_data.replace("gdal.DontUseExceptions()\n", "") ref_data = ref_data.replace("ogr.DontUseExceptions()\n", "") - got_data = open("tmp/ograpispy_1.py", "rt").read() + got_data = open(fname, "rt").read() if ref_data != got_data: print() @@ -78,86 +79,55 @@ def test_ograpispy_1(): print(line) pytest.fail("did not get expected script") - gdal.Unlink("tmp/ograpispy_1.py") - ############################################################################### # With snapshoting -def test_ograpispy_2(): +def test_ograpispy_2(tmp_path): - os.environ["OGR_API_SPY_FILE"] = "tmp/ograpispy_1.py" + os.environ["OGR_API_SPY_FILE"] = str(tmp_path / "ograpispy_1.py") test_py_scripts.run_py_script("data", "testograpispy", "") del os.environ["OGR_API_SPY_FILE"] - if not os.path.exists("tmp/ograpispy_1.py"): + if not os.path.exists(tmp_path / "ograpispy_1.py"): pytest.skip("OGR API spy not enabled") - gdal.Unlink("tmp/ograpispy_1.py") - - try: - shutil.rmtree("tmp/snapshot_1") - except OSError: - pass - - try: - ds = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource( - "tmp/ograpispy_2.shp" - ) - lyr = ds.CreateLayer("ograpispy_2") - lyr.CreateField(ogr.FieldDefn("foo", ogr.OFTString)) - ds = None - - with gdal.config_options( - { - "OGR_API_SPY_FILE": "tmp/ograpispy_2.py", - "OGR_API_SPY_SNAPSHOT_PATH": "tmp", - } - ): - ds = ogr.Open("tmp/ograpispy_2.shp", update=1) - lyr = ds.GetLayer(0) - lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) - ds = None - - ds = ogr.Open("tmp/snapshot_1/source/ograpispy_2.shp") + ds = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource( + tmp_path / "ograpispy_2.shp" + ) + lyr = ds.CreateLayer("ograpispy_2") + lyr.CreateField(ogr.FieldDefn("foo", ogr.OFTString)) + ds = None + + with gdal.config_options( + { + "OGR_API_SPY_FILE": str(tmp_path / "ograpispy_2.py"), + "OGR_API_SPY_SNAPSHOT_PATH": str(tmp_path), + } + ): + ds = ogr.Open(tmp_path / "ograpispy_2.shp", update=1) lyr = ds.GetLayer(0) - assert lyr.GetFeatureCount() == 0 + lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) ds = None - ds = ogr.Open("tmp/snapshot_1/working/ograpispy_2.shp", update=1) - lyr = ds.GetLayer(0) - assert lyr.GetFeatureCount() == 1 + ds = ogr.Open(tmp_path / "snapshot_1/source/ograpispy_2.shp") + lyr = ds.GetLayer(0) + assert lyr.GetFeatureCount() == 0 + ds = None - # Add a feature to check that running the script will work - lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) - ds = None + ds = ogr.Open(tmp_path / "snapshot_1/working/ograpispy_2.shp", update=1) + lyr = ds.GetLayer(0) + assert lyr.GetFeatureCount() == 1 - # Check script - test_py_scripts.run_py_script("tmp", "ograpispy_2", "") + # Add a feature to check that running the script will work + lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) + ds = None - ds = ogr.Open("tmp/snapshot_1/working/ograpispy_2.shp") - lyr = ds.GetLayer(0) - assert lyr.GetFeatureCount() == 1 - ds = None + # Check script + test_py_scripts.run_py_script(tmp_path, "ograpispy_2", "") - finally: - try: - shutil.rmtree("tmp/snapshot_1/working") - except OSError: - pass - - gdal.Unlink("tmp/ograpispy_2.py") - gdal.Unlink("tmp/ograpispy_2.pyc") - try: - shutil.rmtree("tmp/snapshot_1") - except OSError: - pass - try: - os.stat("tmp/ograpispy_2.shp") - ogr.GetDriverByName("ESRI Shapefile").DeleteDataSource( - "tmp/ograpispy_2.shp" - ) - except (OSError, AttributeError): - pass - gdal.Unlink("/vsimem/test2.csv") + ds = ogr.Open(tmp_path / "snapshot_1/working/ograpispy_2.shp") + lyr = ds.GetLayer(0) + assert lyr.GetFeatureCount() == 1 + ds = None From 16ec4a5231493a77aad408f7ba14da5f562f31df Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 14:54:12 +0200 Subject: [PATCH 045/230] JP2KAK: fix non-persistent read mode on overviews (master only) --- autotest/gdrivers/jp2kak.py | 13 +++++++++++++ frmts/jp2kak/jp2kakdataset.cpp | 22 +++++++++++++++++----- frmts/jp2kak/jp2kakdataset.h | 1 + 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/autotest/gdrivers/jp2kak.py b/autotest/gdrivers/jp2kak.py index c8f14d7aa6eb..4b1c169ec291 100755 --- a/autotest/gdrivers/jp2kak.py +++ b/autotest/gdrivers/jp2kak.py @@ -1010,3 +1010,16 @@ def test_jp2kak_unsupported_srs_for_gmljp2(tmp_vsimem): assert ds.GetSpatialRef().IsSame(ref_srs) # Check that we do *not* have a GMLJP2 box assert "xml:gml.root-instance" not in ds.GetMetadataDomainList() + + +############################################################################### +# Test non-persistent read mode on overviews + + +def test_jp2kak_non_persistent_read_overview(): + + with gdal.config_option("JP2KAK_PERSIST", "NO"): + ds = gdal.Open("data/jpeg2000/513x513.jp2") + cs = ds.GetRasterBand(1).GetOverview(0).Checksum() + + assert cs == 29642 diff --git a/frmts/jp2kak/jp2kakdataset.cpp b/frmts/jp2kak/jp2kakdataset.cpp index e89f39ac4aac..2a13c7d2d76c 100644 --- a/frmts/jp2kak/jp2kakdataset.cpp +++ b/frmts/jp2kak/jp2kakdataset.cpp @@ -576,10 +576,11 @@ JP2KAKDataset::JP2KAKDataset() = default; // Constructor for overview dataset JP2KAKDataset::JP2KAKDataset(JP2KAKDataset *poMainDS, int nDiscardLevels, const kdu_dims &dimsIn) - : oCodeStream(poMainDS->oCodeStream), poInput(poMainDS->poInput), - poRawInput(poMainDS->poRawInput), family(poMainDS->family), - jpip_client(poMainDS->jpip_client), dims(dimsIn), - nResCount(poMainDS->nResCount), bPreferNPReads(poMainDS->bPreferNPReads), + : m_osFilename(poMainDS->m_osFilename), oCodeStream(poMainDS->oCodeStream), + poInput(poMainDS->poInput), poRawInput(poMainDS->poRawInput), + family(poMainDS->family), jpip_client(poMainDS->jpip_client), + dims(dimsIn), nResCount(poMainDS->nResCount), + bPreferNPReads(poMainDS->bPreferNPReads), poThreadEnv(poMainDS->poThreadEnv), m_nDiscardLevels(nDiscardLevels), bCached(poMainDS->bCached), bResilient(poMainDS->bResilient), bFussy(poMainDS->bFussy), bUseYCC(poMainDS->bUseYCC), @@ -1075,6 +1076,8 @@ GDALDataset *JP2KAKDataset::Open(GDALOpenInfo *poOpenInfo) poDS->SetBand(iBand, poBand); } + poDS->m_osFilename = poOpenInfo->pszFilename; + // Create overviews if (!bHasExternalOverviews) { @@ -1270,7 +1273,16 @@ CPLErr JP2KAKDataset::DirectRasterIO(GDALRWFlag /* eRWFlag */, int nXOff, if (bPreferNPReads) { - subfile_src.open(GetDescription(), bResilient, bCached); + try + { + subfile_src.open(m_osFilename.c_str(), bResilient, bCached); + } + catch (...) + { + CPLError(CE_Failure, CPLE_AppDefined, "subfile_src.open(%s) failed", + m_osFilename.c_str()); + return CE_Failure; + } if (family != nullptr) { diff --git a/frmts/jp2kak/jp2kakdataset.h b/frmts/jp2kak/jp2kakdataset.h index 0c65a73ac9b3..0d302ce4d480 100644 --- a/frmts/jp2kak/jp2kakdataset.h +++ b/frmts/jp2kak/jp2kakdataset.h @@ -65,6 +65,7 @@ class JP2KAKDataset final : public GDALJP2AbstractDataset { friend class JP2KAKRasterBand; + std::string m_osFilename{}; kdu_codestream oCodeStream; kdu_compressed_source *poInput = nullptr; kdu_compressed_source *poRawInput = nullptr; From d9403a4167162e5e2431aa4b68c1fbc9f655391b Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 16:36:39 +0200 Subject: [PATCH 046/230] GTiff: multi-threaded decoding: fix potential mutex deadlock --- frmts/gtiff/gtiffdataset_read.cpp | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/frmts/gtiff/gtiffdataset_read.cpp b/frmts/gtiff/gtiffdataset_read.cpp index e9f52d11f706..d4f4226527a4 100644 --- a/frmts/gtiff/gtiffdataset_read.cpp +++ b/frmts/gtiff/gtiffdataset_read.cpp @@ -349,7 +349,10 @@ CPLErr GTiffDataset::ReadCompressedData(const char *pszFormat, int nXOff, struct GTiffDecompressContext { - std::mutex oMutex{}; + // The mutex must be recursive because ThreadDecompressionFuncErrorHandler() + // which acquires the mutex can be called from a section where the mutex is + // already acquired. + std::recursive_mutex oMutex{}; bool bSuccess = true; std::vector<CPLErrorHandlerAccumulatorStruct> aoErrors{}; @@ -416,7 +419,7 @@ static void CPL_STDCALL ThreadDecompressionFuncErrorHandler( { GTiffDecompressContext *psContext = static_cast<GTiffDecompressContext *>(CPLGetErrorHandlerUserData()); - std::lock_guard<std::mutex> oLock(psContext->oMutex); + std::lock_guard<std::recursive_mutex> oLock(psContext->oMutex); psContext->aoErrors.emplace_back(eErr, eErrorNum, pszMsg); } @@ -495,7 +498,7 @@ static void CPL_STDCALL ThreadDecompressionFuncErrorHandler( if (psJob->nSize == 0) { { - std::lock_guard<std::mutex> oLock(psContext->oMutex); + std::lock_guard<std::recursive_mutex> oLock(psContext->oMutex); if (!psContext->bSuccess) return; } @@ -616,7 +619,7 @@ static void CPL_STDCALL ThreadDecompressionFuncErrorHandler( if (psContext->bHasPRead) { { - std::lock_guard<std::mutex> oLock(psContext->oMutex); + std::lock_guard<std::recursive_mutex> oLock(psContext->oMutex); if (!psContext->bSuccess) return; @@ -634,7 +637,7 @@ static void CPL_STDCALL ThreadDecompressionFuncErrorHandler( { if (!AllocInputBuffer()) { - std::lock_guard<std::mutex> oLock(psContext->oMutex); + std::lock_guard<std::recursive_mutex> oLock(psContext->oMutex); psContext->bSuccess = false; return; } @@ -647,7 +650,7 @@ static void CPL_STDCALL ThreadDecompressionFuncErrorHandler( static_cast<GUIntBig>(psJob->nSize), static_cast<GUIntBig>(psJob->nOffset)); - std::lock_guard<std::mutex> oLock(psContext->oMutex); + std::lock_guard<std::recursive_mutex> oLock(psContext->oMutex); psContext->bSuccess = false; return; } @@ -655,7 +658,7 @@ static void CPL_STDCALL ThreadDecompressionFuncErrorHandler( } else { - std::lock_guard<std::mutex> oLock(psContext->oMutex); + std::lock_guard<std::recursive_mutex> oLock(psContext->oMutex); if (!psContext->bSuccess) return; @@ -808,7 +811,7 @@ static void CPL_STDCALL ThreadDecompressionFuncErrorHandler( if (!bRet) { - std::lock_guard<std::mutex> oLock(psContext->oMutex); + std::lock_guard<std::recursive_mutex> oLock(psContext->oMutex); psContext->bSuccess = false; return; } @@ -1298,7 +1301,8 @@ CPLErr GTiffDataset::MultiThreadedRead(int nXOff, int nYOff, int nXSize, // false since we could have concurrent uses of the handle, // when when reading the TIFF TileOffsets / TileByteCounts // array - std::lock_guard<std::mutex> oLock(sContext.oMutex); + std::lock_guard<std::recursive_mutex> oLock( + sContext.oMutex); IsBlockAvailable(nBlockId, &asJobs[iJob].nOffset, &asJobs[iJob].nSize); @@ -1314,7 +1318,8 @@ CPLErr GTiffDataset::MultiThreadedRead(int nXOff, int nYOff, int nXSize, { if (nFileSize == 0) { - std::lock_guard<std::mutex> oLock(sContext.oMutex); + std::lock_guard<std::recursive_mutex> oLock( + sContext.oMutex); sContext.poHandle->Seek(0, SEEK_END); nFileSize = sContext.poHandle->Tell(); } @@ -1326,7 +1331,8 @@ CPLErr GTiffDataset::MultiThreadedRead(int nXOff, int nYOff, int nXSize, static_cast<GUIntBig>(asJobs[iJob].nSize), static_cast<GUIntBig>(asJobs[iJob].nOffset)); - std::lock_guard<std::mutex> oLock(sContext.oMutex); + std::lock_guard<std::recursive_mutex> oLock( + sContext.oMutex); sContext.bSuccess = false; break; } From c1209a579a44df18a8d6fb8b9663c599f17f7789 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 17:01:49 +0200 Subject: [PATCH 047/230] Add VSIVirtualHandle::GetAdviseReadTotalBytesLimit() --- port/cpl_vsi_virtual.h | 17 +++++++++++++++++ port/cpl_vsil_curl.cpp | 26 +++++++++++++++++++++++--- port/cpl_vsil_curl_class.h | 2 ++ 3 files changed, 42 insertions(+), 3 deletions(-) diff --git a/port/cpl_vsi_virtual.h b/port/cpl_vsi_virtual.h index e2458e37371e..225e2eded076 100644 --- a/port/cpl_vsi_virtual.h +++ b/port/cpl_vsi_virtual.h @@ -87,6 +87,23 @@ struct CPL_DLL VSIVirtualHandle { } + /** Return the total maximum number of bytes that AdviseRead() can handle + * at once. + * + * Some AdviseRead() implementations may give up if the sum of the values + * in the panSizes[] array provided to AdviseRead() exceeds a limit. + * + * Callers might use that threshold to optimize the efficiency of + * AdviseRead(). + * + * A returned value of 0 indicates a unknown limit. + * @since GDAL 3.9 + */ + virtual size_t GetAdviseReadTotalBytesLimit() const + { + return 0; + } + virtual size_t Write(const void *pBuffer, size_t nSize, size_t nCount) = 0; int Printf(CPL_FORMAT_STRING(const char *pszFormat), ...) diff --git a/port/cpl_vsil_curl.cpp b/port/cpl_vsil_curl.cpp index c8d6640a7100..38d6140529bb 100644 --- a/port/cpl_vsil_curl.cpp +++ b/port/cpl_vsil_curl.cpp @@ -32,9 +32,10 @@ #include <algorithm> #include <array> -#include <set> +#include <limits> #include <map> #include <memory> +#include <set> #include "cpl_aws.h" #include "cpl_json.h" @@ -3153,6 +3154,21 @@ size_t VSICurlHandle::PRead(void *pBuffer, size_t nSize, return nRet; } +/************************************************************************/ +/* GetAdviseReadTotalBytesLimit() */ +/************************************************************************/ + +size_t VSICurlHandle::GetAdviseReadTotalBytesLimit() const +{ + return static_cast<size_t>(std::min<unsigned long long>( + std::numeric_limits<size_t>::max(), + // 100 MB + std::strtoull( + CPLGetConfigOption("CPL_VSIL_CURL_ADVISE_READ_TOTAL_BYTES_LIMIT", + "104857600"), + nullptr, 10))); +} + /************************************************************************/ /* AdviseRead() */ /************************************************************************/ @@ -3171,9 +3187,10 @@ void VSICurlHandle::AdviseRead(int nRanges, const vsi_l_offset *panOffsets, // Give up if we need to allocate too much memory vsi_l_offset nMaxSize = 0; + const size_t nLimit = GetAdviseReadTotalBytesLimit(); for (int i = 0; i < nRanges; ++i) { - if (panSizes[i] > 100 * 1024 * 1024 - nMaxSize) + if (panSizes[i] > nLimit - nMaxSize) { CPLDebug(poFS->GetDebugKey(), "Trying to request too many bytes in AdviseRead()"); @@ -3994,7 +4011,10 @@ const char *VSICurlFilesystemHandlerBase::GetActualURL(const char *pszFilename) "default='16384000'/>" \ " <Option name='CPL_VSIL_CURL_IGNORE_GLACIER_STORAGE' type='boolean' " \ "description='Whether to skip files with Glacier storage class in " \ - "directory listing.' default='YES'/>" + "directory listing.' default='YES'/>" \ + " <Option name='CPL_VSIL_CURL_ADVISE_READ_TOTAL_BYTES_LIMIT' " \ + "type='integer' description='Maximum number of bytes AdviseRead() is " \ + "allowed to fetch at once' default='104857600'/>" const char *VSICurlFilesystemHandlerBase::GetOptionsStatic() { diff --git a/port/cpl_vsil_curl_class.h b/port/cpl_vsil_curl_class.h index 97d1e618a35a..853d579954db 100644 --- a/port/cpl_vsil_curl_class.h +++ b/port/cpl_vsil_curl_class.h @@ -506,6 +506,8 @@ class VSICurlHandle : public VSIVirtualHandle void AdviseRead(int nRanges, const vsi_l_offset *panOffsets, const size_t *panSizes) override; + size_t GetAdviseReadTotalBytesLimit() const override; + bool IsKnownFileSize() const { return oFileProp.bHasComputedFileSize; From 5b948d1cc49e944521ebb7e0b2df5eb529d4a714 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 17:03:52 +0200 Subject: [PATCH 048/230] GTiff: MultiThreadedRead(): make it take into account AdviseRead() limit to reduce the number of I/O requests Fixes #9682 --- autotest/gcore/tiff_read.py | 86 +++++++++++++++++++++++++++++++ frmts/gtiff/gtiffdataset_read.cpp | 45 ++++++++++++++++ 2 files changed, 131 insertions(+) diff --git a/autotest/gcore/tiff_read.py b/autotest/gcore/tiff_read.py index 01e1d2b0a471..d580b6f70e15 100755 --- a/autotest/gcore/tiff_read.py +++ b/autotest/gcore/tiff_read.py @@ -4447,6 +4447,92 @@ def method(request): gdal.GetDriverByName("GTIFF").Delete(cog_filename) +############################################################################### +# Test GTiffDataset::MultiThreadedRead() when the amount of requested bytes +# exceed the allowed limit. + + +@pytest.mark.require_curl() +@pytest.mark.skipif( + not check_libtiff_internal_or_at_least(4, 0, 11), + reason="libtiff >= 4.0.11 required", +) +def test_tiff_read_vsicurl_multi_threaded_beyond_advise_read_limit(tmp_path): + + webserver_process = None + webserver_port = 0 + + (webserver_process, webserver_port) = webserver.launch( + handler=webserver.DispatcherHttpHandler + ) + if webserver_port == 0: + pytest.skip() + + gdal.VSICurlClearCache() + + tmp_filename = str(tmp_path / "tmp.tif") + gdal.Translate( + tmp_filename, + "data/utmsmall.tif", + options="-co TILED=YES -co COMPRESS=LZW -outsize 1024 0", + ) + ds = gdal.Open(tmp_filename) + expected_data = ds.ReadRaster() + ds = None + + try: + filesize = os.stat(tmp_filename).st_size + handler = webserver.SequentialHandler() + handler.add("HEAD", "/test.tif", 200, {"Content-Length": "%d" % filesize}) + + def method(request): + # sys.stderr.write('%s\n' % str(request.headers)) + + if request.headers["Range"].startswith("bytes="): + rng = request.headers["Range"][len("bytes=") :] + assert len(rng.split("-")) == 2 + start = int(rng.split("-")[0]) + end = int(rng.split("-")[1]) + + request.protocol_version = "HTTP/1.1" + request.send_response(206) + request.send_header("Content-type", "application/octet-stream") + request.send_header( + "Content-Range", "bytes %d-%d/%d" % (start, end, filesize) + ) + request.send_header("Content-Length", end - start + 1) + request.send_header("Connection", "close") + request.end_headers() + with open(tmp_filename, "rb") as f: + f.seek(start, 0) + request.wfile.write(f.read(end - start + 1)) + + for i in range(3): + handler.add("GET", "/test.tif", custom_method=method) + + with webserver.install_http_handler(handler): + with gdaltest.config_options( + { + "GDAL_NUM_THREADS": "2", + "CPL_VSIL_CURL_ALLOWED_EXTENSIONS": ".tif", + "GDAL_DISABLE_READDIR_ON_OPEN": "EMPTY_DIR", + "CPL_VSIL_CURL_ADVISE_READ_TOTAL_BYTES_LIMIT": str( + 2 * filesize // 3 + ), + } + ): + ds = gdal.Open("/vsicurl/http://127.0.0.1:%d/test.tif" % webserver_port) + assert ds is not None, "could not open dataset" + + got_data = ds.ReadRaster() + assert got_data == expected_data + + finally: + webserver.server_stop(webserver_process, webserver_port) + + gdal.VSICurlClearCache() + + ############################################################################### # Check that GetMetadataDomainList() works properly diff --git a/frmts/gtiff/gtiffdataset_read.cpp b/frmts/gtiff/gtiffdataset_read.cpp index d4f4226527a4..9d2dcb8e2931 100644 --- a/frmts/gtiff/gtiffdataset_read.cpp +++ b/frmts/gtiff/gtiffdataset_read.cpp @@ -1274,6 +1274,9 @@ CPLErr GTiffDataset::MultiThreadedRead(int nXOff, int nYOff, int nXSize, std::vector<size_t> anSizes(nBlocks); int iJob = 0; int nAdviseReadRanges = 0; + const size_t nAdviseReadTotalBytesLimit = + sContext.poHandle->GetAdviseReadTotalBytesLimit(); + size_t nAdviseReadAccBytes = 0; for (int y = 0; y < nYBlocks; ++y) { for (int x = 0; x < nXBlocks; ++x) @@ -1383,6 +1386,48 @@ CPLErr GTiffDataset::MultiThreadedRead(int nXOff, int nYOff, int nXSize, static_cast<size_t>(std::min<vsi_l_offset>( std::numeric_limits<size_t>::max(), asJobs[iJob].nSize)); + + // If the total number of bytes we must read excess the + // capacity of AdviseRead(), then split the RasterIO() + // request in 2 halves. + if (nAdviseReadTotalBytesLimit > 0 && + anSizes[nAdviseReadRanges] < + nAdviseReadTotalBytesLimit && + anSizes[nAdviseReadRanges] > + nAdviseReadTotalBytesLimit - nAdviseReadAccBytes && + nYBlocks >= 2) + { + const int nYOff2 = + (nBlockYStart + nYBlocks / 2) * m_nBlockYSize; + CPLDebugOnly("GTiff", + "Splitting request (%d,%d,%dx%d) into " + "(%d,%d,%dx%d) and (%d,%d,%dx%d)", + nXOff, nYOff, nXSize, nYSize, nXOff, nYOff, + nXSize, nYOff2 - nYOff, nXOff, nYOff2, + nXSize, nYOff + nYSize - nYOff2); + + asJobs.clear(); + anOffsets.clear(); + anSizes.clear(); + poQueue.reset(); + + CPLErr eErr = MultiThreadedRead( + nXOff, nYOff, nXSize, nYOff2 - nYOff, pData, + eBufType, nBandCount, panBandMap, nPixelSpace, + nLineSpace, nBandSpace); + if (eErr == CE_None) + { + eErr = MultiThreadedRead( + nXOff, nYOff2, nXSize, nYOff + nYSize - nYOff2, + static_cast<GByte *>(pData) + + (nYOff2 - nYOff) * nLineSpace, + eBufType, nBandCount, panBandMap, nPixelSpace, + nLineSpace, nBandSpace); + } + return eErr; + } + nAdviseReadAccBytes += anSizes[nAdviseReadRanges]; + ++nAdviseReadRanges; } From b59a857488dea7580487b23ebd09371b41bde69c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 17:30:02 +0200 Subject: [PATCH 049/230] Doc: development practices: move relevant content from RFC8 (#9692) --- doc/source/development/dev_practices.rst | 87 ++++++++++++++- doc/source/development/index.rst | 2 +- doc/source/development/rfc/rfc8_devguide.rst | 109 +------------------ 3 files changed, 89 insertions(+), 109 deletions(-) diff --git a/doc/source/development/dev_practices.rst b/doc/source/development/dev_practices.rst index 0432924beccc..a0664dadec50 100644 --- a/doc/source/development/dev_practices.rst +++ b/doc/source/development/dev_practices.rst @@ -1,11 +1,11 @@ .. _development_practices: -.. include:: ../substitutions.rst - ================================================================================ Development practices ================================================================================ +.. include:: ../substitutions.rst + Making changes to GDAL ---------------------- @@ -16,6 +16,89 @@ of a RFC (request for comment) document. GDAL's policy on substantial code additions is documented at :ref:`rfc-85`. +Portability +----------- + +GDAL strives to be widely portable to 32 bit and 64 bit computing +environments, as well as little-endian and big-endian ordered CPUs. +CPL functions in the port directory provide services to abstract platform +specific operations. + +Generally speaking, where available CPL functions should be used in +preference to operating system functions for operations like memory +allocation, path parsing, filesystem I/O (using VSILFILE* / VSIVirtualFile*), +ODBC access, etc. + +C/C++ standards +--------------- + +The current C and C++ standards adopted by GDAL/OGR are C99 and C++17 +(last updated per :ref:`rfc-98`)` + +Variable naming +--------------- + +Much of the existing GDAL/OGR code uses an adapted Hungarian naming +convention. Use of this convention is not mandatory, but when +maintaining code using this convention it is desirable to continue +adhering to it with changes. Most importantly, please avoiding using it +improperly as that can be very confusing. + +In Hungarian prefixing the prefix tells something about about the type, +and potentially semantics of a variable. The following are some prefixes +used in GDAL/OGR. + +- *a*: array +- *b*: C/C++ bool. In C code that pre-dates C99 adoption, it is also used for + ints with only TRUE/FALSE values. +- *by*: byte (GByte / unsigned char). +- *df*: floating point value (double precision) +- *e*: enumeration +- *i*: integer number used as a zero based array or loop index. +- *f*: floating point value (single precision) +- *h*: an opaque handle (such as GDALDatasetH). +- *n*: integer number (size unspecified) +- *o*: C++ object +- *os*: CPLString or std::string +- *p*: pointer +- *psz*: pointer to a null-terminated string. (eg. "char \*pszName;") +- *sz*: null-terminated string (eg." char szName[100];") +- *k*: compile-time constant + +Prefixes can be stacked. The following are some examples of meaningful +variables. + +- \*char !\*\ *papszTokens*: Pointer to an array of strings. +- \*int *panBands*: Pointer to the first element of an array of + numbers. +- \*double *padfScanline*: Pointer to the first element of an array of + doubles. +- \*double *pdfMeanRet*: Pointer to a single double. +- \*GDALRasterBand *poBand*: Pointer to a single object. +- \*GByte *pabyHeader*: Pointer to an array of bytes. + +It may also be noted that the standard convention for variable names is +to capitalize each word in a variable name. + +Memory allocation +----------------- + +As per :ref:`rfc-19`, you can use VSIMalloc2(x, y) instead of +doing CPLMalloc(x \* y) or VSIMalloc(x \* y). VSIMalloc2 will detect +potential overflows in the multiplication and return a NULL pointer if +it happens. This can be useful in GDAL raster drivers where x and y are +related to the raster dimensions or raster block sizes. Similarly, +VSIMalloc3(x, y, z) can be used as a replacement for CPLMalloc(x \* y \* +z). + +File naming and code formatting +------------------------------- + +- Use lower case filenames. +- Use .cpp extension for C++ files (not .cc). +- Code formatting rules are defined in :source_file:`.clang-format`. The + pre-commit utility can be used to enforce them automatically. + Git usage --------- diff --git a/doc/source/development/index.rst b/doc/source/development/index.rst index 0390d588cd76..c369d385d947 100644 --- a/doc/source/development/index.rst +++ b/doc/source/development/index.rst @@ -10,8 +10,8 @@ Development dev_environment building_from_source - testing dev_practices + testing dev_documentation cmake rfc/index diff --git a/doc/source/development/rfc/rfc8_devguide.rst b/doc/source/development/rfc/rfc8_devguide.rst index fe5743518d11..b9225c3f3644 100644 --- a/doc/source/development/rfc/rfc8_devguide.rst +++ b/doc/source/development/rfc/rfc8_devguide.rst @@ -13,110 +13,7 @@ Status: draft Purpose ------- -This document is intended to document developer practices for the -GDAL/OGR project. It will be an evolving document. +This document used to document developer practices for the +GDAL/OGR project. As it was an evolving document, its content has been moved +to :ref:`development_practices`. -Portability ------------ - -GDAL strives to be widely portable to 32bit and 64bit computing -environments. It accomplishes this in a number of ways - avoid compiler -specific directives, avoiding new, but perhaps not widely available -aspects of C++, and most importantly by abstracting platform specific -operations in CPL functions in the gdal/port directory. - -Generally speaking, where available CPL functions should be used in -preference to operating system functions for operations like memory -allocation, path parsing, filesystem io, multithreading functions, and -ODBC access. - -Variable Naming ---------------- - -Much of the existing GDAL and OGR code uses an adapted Hungarian naming -convention. Use of this convention is not mandatory, but when -maintaining code using this convention it is desirable to continue -adhering to it with changes. Most importantly, please avoiding using it -improperly as that can be very confusing. - -In Hungarian prefixing the prefix tells something about about the type, -and potentially semantics of a variable. The following are some prefixes -used in GDAL/OGR. - -- *a*: array -- *b*: C++ bool. Also used for ints with only TRUE/FALSE values in C. -- *by*: byte (GByte / unsigned char). -- *df*: floating point value (double precision) -- *e*: enumeration -- *i*: integer number used as a zero based array or loop index. -- *f*: floating point value (single precision) -- *h*: an opaque handle (such as GDALDatasetH). -- *n*: integer number (size unspecified) -- *o*: C++ object -- *os*: CPLString -- *p*: pointer -- *psz*: pointer to a zero terminated string. (eg. "char \*pszName;") -- *sz*: zero terminated string (eg." char szName[100];") -- TODO: What about constants (either global or global to a file)? - Propose: *k* - -Prefix can be stacked. The following are some examples of meaningful -variables. - -- \*char !\*\ *papszTokens*: Pointer to the an array of strings. -- \*int *panBands*: Pointer to the first element of an array of - numbers. -- \*double *padfScanline*: Pointer to the first element of an array of - doubles. -- \*double *pdfMeanRet*: Pointer to a single double. -- \*GDALRasterBand *poBand*: Pointer to a single object. -- \*GByte *pabyHeader*: Pointer to an array of bytes. - -It may also be noted that the standard convention for variable names is -to capitalize each word in a variable name. - -Memory allocation ------------------ - -As per `RFC 19: Safer memory allocation in -GDAL <./rfc19_safememalloc>`__, you can use VSIMalloc2(x, y) instead of -doing CPLMalloc(x \* y) or VSIMalloc(x \* y). VSIMalloc2 will detect -potential overflows in the multiplication and return a NULL pointer if -it happens. This can be useful in GDAL raster drivers where x and y are -related to the raster dimensions or raster block sizes. Similarly, -VSIMalloc3(x, y, z) can be used as a replacement for CPLMalloc(x \* y \* -z). - -Headers, and Comment Blocks ---------------------------- - -.. _misc-notes: - -Misc. Notes ------------ - -- Use lower case filenames. -- Use .cpp extension for C++ files (not .cc). -- Avoid spaces or other special characters in file or directory names. -- Use 4 character indentation levels. -- Use spaces instead of hard tab characters in source code. -- Try to keep lines to 79 characters or less. - -See also --------- - -- `http://erouault.blogspot.com/2016/01/software-quality-improvements-in-gdal.html <http://erouault.blogspot.com/2016/01/software-quality-improvements-in-gdal.html>`__ -- `https://travis-ci.org/OSGeo/gdal/builds <https://travis-ci.org/OSGeo/gdal/builds>`__ -- `https://ci.appveyor.com/project/OSGeo/gdal/history <https://ci.appveyor.com/project/OSGeo/gdal/history>`__ -- `https://travis-ci.org/rouault/gdal_coverage/builds <https://travis-ci.org/rouault/gdal_coverage/builds>`__ -- `https://ci.appveyor.com/project/rouault/gdal-coverage/history <https://ci.appveyor.com/project/rouault/gdal-coverage/history>`__ -- `https://gdalautotest-coverage-results.github.io/coverage_html/index.html <https://gdalautotest-coverage-results.github.io/coverage_html/index.html>`__ - -Python code ------------ - -- All Python code in autotest, swig/python/scripts and - swig/python/samples should pass OK with the Pyflakes checker (version - used currently: 0.8.1). This is asserted by Travis-CI jobs -- Python code should be written to be compatible with both Python 2 and - Python 3. From d32e234e929ecad4c53103d5552d03cea7ab7601 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 17:38:15 +0200 Subject: [PATCH 050/230] Doc: CSV: clarify /vsistdin/ use (fixes #9627) --- doc/source/drivers/vector/csv.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/source/drivers/vector/csv.rst b/doc/source/drivers/vector/csv.rst index bca06f2c88ae..21956a81d157 100644 --- a/doc/source/drivers/vector/csv.rst +++ b/doc/source/drivers/vector/csv.rst @@ -253,6 +253,10 @@ domains. Writing to /dev/stdout or /vsistdout/ is also supported. +Reading from /vsistdin/ is supported using the ``CSV:/vsistdin/`` connection +string and provided that none of the open options whose name starts with ``AUTODETECT_`` +is used. + Open options ------------ From 22bd7669bc935e66dcb723321f49a27d4324a6e9 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 17:45:13 +0200 Subject: [PATCH 051/230] JPEG: CreateCopy(): emit warning/error message before progress bar (fixes #9441) --- frmts/jpeg/jpgdataset.cpp | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/frmts/jpeg/jpgdataset.cpp b/frmts/jpeg/jpgdataset.cpp index a2d4a102ec3f..486f4fa6d908 100644 --- a/frmts/jpeg/jpgdataset.cpp +++ b/frmts/jpeg/jpgdataset.cpp @@ -4021,9 +4021,6 @@ GDALDataset *JPGDataset::CreateCopy(const char *pszFilename, void *pProgressData) { - if (!pfnProgress(0.0, nullptr, pProgressData)) - return nullptr; - const int nBands = poSrcDS->GetRasterCount(); const char *pszLossLessCopy = @@ -4039,6 +4036,9 @@ GDALDataset *JPGDataset::CreateCopy(const char *pszFilename, GDALGetCompressionFormatForJPEG(pJPEGContent, nJPEGContent) .find(";colorspace=RGBA") == std::string::npos) { + if (!pfnProgress(0.0, nullptr, pProgressData)) + return nullptr; + CPLDebug("JPEG", "Lossless copy from source dataset"); std::vector<GByte> abyJPEG; try @@ -4357,7 +4357,7 @@ GDALDataset *JPGDataset::CreateCopy(const char *pszFilename, { CPLError(bStrict ? CE_Failure : CE_Warning, CPLE_NotSupported, "JPEG driver doesn't support data type %s. " - "Only eight and twelve bit bands supported (Mk1 libjpeg).\n", + "Only eight and twelve bit bands supported.", GDALGetDataTypeName( poSrcDS->GetRasterBand(1)->GetRasterDataType())); @@ -4452,6 +4452,9 @@ GDALDataset *JPGDataset::CreateCopyStage2( return nullptr; } + if (!pfnProgress(0.0, nullptr, pProgressData)) + return nullptr; + // Initialize JPG access to the file. sCInfo.err = jpeg_std_error(&sJErr); sJErr.error_exit = JPGDataset::ErrorExit; From 122cc14376dff2dc3d85f1ff9cde382374e8c431 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Thu, 4 Apr 2024 13:57:23 -0400 Subject: [PATCH 052/230] autotest: warn if sequential tests are not sent to same parallel worker --- autotest/conftest.py | 26 ++++++++++++++++++++++++++ autotest/utilities/test_gdaltindex.py | 1 + autotest/utilities/test_gnmutils.py | 1 + cmake/template/pytest.ini.in | 6 +++++- 4 files changed, 33 insertions(+), 1 deletion(-) diff --git a/autotest/conftest.py b/autotest/conftest.py index acdfc206bbfb..ce9238a5f711 100755 --- a/autotest/conftest.py +++ b/autotest/conftest.py @@ -238,10 +238,36 @@ def pytest_collection_modifyitems(config, items): ) ) + # For any tests marked to run sequentially (pytest.mark.random_order(disabled=True) + # check to make sure they are also marked with pytest.mark.xdist_group() + # so they are sent to the same xdist worker. + unmarked_modules = set() + xdist = config.pluginmanager.getplugin("xdist") + if xdist and xdist.is_xdist_worker(item.session): + for mark in item.iter_markers("random_order"): + if ( + mark.kwargs["disabled"] + and not next(item.iter_markers("xdist_group"), None) + and item.module.__name__ not in unmarked_modules + ): + unmarked_modules.add(item.module.__name__) + import warnings + + warnings.warn( + f"module {item.module.__name__} marked as random_order(disabled=True) but does not have an assigned xdist_group" + ) + def pytest_addoption(parser): parser.addini("gdal_version", "GDAL version for which pytest.ini was generated") + # our pytest.ini specifies --dist=loadgroup but we don't want to fail if the + # user doesn't have this extension installed. + try: + import xdist # noqa: F401 + except ImportError: + parser.addoption("--dist") + def pytest_configure(config): test_version = config.getini("gdal_version") diff --git a/autotest/utilities/test_gdaltindex.py b/autotest/utilities/test_gdaltindex.py index 97fb4a3b5073..ae29e50a9883 100755 --- a/autotest/utilities/test_gdaltindex.py +++ b/autotest/utilities/test_gdaltindex.py @@ -41,6 +41,7 @@ reason="gdaltindex not available", ), pytest.mark.random_order(disabled=True), + pytest.mark.xdist_group("test_gdaltindex"), ] diff --git a/autotest/utilities/test_gnmutils.py b/autotest/utilities/test_gnmutils.py index 21fc22af17cc..77a70a6f649c 100755 --- a/autotest/utilities/test_gnmutils.py +++ b/autotest/utilities/test_gnmutils.py @@ -47,6 +47,7 @@ reason="gnmanalyse not available", ), pytest.mark.random_order(disabled=True), + pytest.mark.xdist_group("test_gnmutils"), ] diff --git a/cmake/template/pytest.ini.in b/cmake/template/pytest.ini.in index b55a3c534251..d1894eb217ee 100644 --- a/cmake/template/pytest.ini.in +++ b/cmake/template/pytest.ini.in @@ -27,6 +27,10 @@ markers = require_proj: Skip test(s) if required PROJ version is not available require_run_on_demand: Skip test(s) if RUN_ON_DEMAND environment variable is not set slow: Skip test(s) if GDAL_RUN_SLOW_TESTS environment variable is not set + xdist_group: Associated test with a named group that will be sent to a single parallel worker # Make sure that all markers are declared above -addopts = --strict-markers +# --strict-markers causes an error if a test has a marker that is not handled +# --dist=loadgroup instructs pytest-xdist to send tests marked with the same +# xdist_group marker to the same worker. +addopts = --strict-markers --dist=loadgroup From f8e0a777970b9997c914010372ae88d4705ff7a2 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Tue, 20 Feb 2024 12:24:09 -0500 Subject: [PATCH 053/230] autotest ogr_hana.py: mark tests as order-dependent --- autotest/ogr/ogr_hana.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/autotest/ogr/ogr_hana.py b/autotest/ogr/ogr_hana.py index e7345c3961ab..bcc63687f328 100644 --- a/autotest/ogr/ogr_hana.py +++ b/autotest/ogr/ogr_hana.py @@ -41,7 +41,10 @@ except ImportError: pytest.skip("hdbcli not available", allow_module_level=True) -pytestmark = pytest.mark.require_driver("HANA") +pytestmark = [ + pytest.mark.require_driver("HANA"), + pytest.mark.random_order(disabled=True), +] ############################################################################### @pytest.fixture(autouse=True, scope="module") From f3e04c04531b65b008b6cfebd47506b9af0f598b Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Thu, 22 Feb 2024 10:42:11 -0500 Subject: [PATCH 054/230] autotest vsiadls.py: cleanup config options after tests run --- autotest/gcore/vsiadls.py | 41 +++++++++++++-------------------------- 1 file changed, 14 insertions(+), 27 deletions(-) diff --git a/autotest/gcore/vsiadls.py b/autotest/gcore/vsiadls.py index 8d316e98affe..8ba8d2080a42 100755 --- a/autotest/gcore/vsiadls.py +++ b/autotest/gcore/vsiadls.py @@ -56,20 +56,6 @@ def open_for_read(uri): @pytest.fixture(autouse=True, scope="module") def startup_and_cleanup(): - # Unset all env vars that could influence the tests - az_vars = {} - for var, reset_val in ( - ("AZURE_STORAGE_CONNECTION_STRING", None), - ("AZURE_STORAGE_ACCOUNT", None), - ("AZURE_STORAGE_ACCESS_KEY", None), - ("AZURE_STORAGE_SAS_TOKEN", None), - ("AZURE_NO_SIGN_REQUEST", None), - ("AZURE_CONFIG_DIR", ""), - ("AZURE_STORAGE_ACCESS_TOKEN", ""), - ): - az_vars[var] = gdal.GetConfigOption(var) - gdal.SetConfigOption(var, reset_val) - with gdaltest.config_option("CPL_AZURE_VM_API_ROOT_URL", "disabled"): assert gdal.GetSignedURL("/vsiadls/foo/bar") is None @@ -82,16 +68,20 @@ def startup_and_cleanup(): if gdaltest.webserver_port == 0: pytest.skip() - gdal.SetConfigOption( - "AZURE_STORAGE_CONNECTION_STRING", - "DefaultEndpointsProtocol=http;AccountName=myaccount;AccountKey=MY_ACCOUNT_KEY;BlobEndpoint=http://127.0.0.1:%d/azure/blob/myaccount" - % gdaltest.webserver_port, - ) - gdal.SetConfigOption("AZURE_STORAGE_ACCOUNT", "") - gdal.SetConfigOption("AZURE_STORAGE_ACCESS_KEY", "") - gdal.SetConfigOption("CPL_AZURE_TIMESTAMP", "my_timestamp") - - yield + with gdal.config_options( + { + "AZURE_CONFIG_DIR": "", + "AZURE_NO_SIGN_REQUEST": None, + "AZURE_STORAGE_ACCOUNT": None, + "AZURE_STORAGE_ACCESS_KEY": None, + "AZURE_STORAGE_CONNECTION_STRING": "DefaultEndpointsProtocol=http;AccountName=myaccount;AccountKey=MY_ACCOUNT_KEY;BlobEndpoint=http://127.0.0.1:%d/azure/blob/myaccount" + % gdaltest.webserver_port, + "AZURE_STORAGE_SAS_TOKEN": None, + "CPL_AZURE_TIMESTAMP": "my_timestamp", + }, + thread_local=False, + ): + yield # Clearcache needed to close all connections, since the Python server # can only handle one connection at a time @@ -99,9 +89,6 @@ def startup_and_cleanup(): webserver.server_stop(gdaltest.webserver_process, gdaltest.webserver_port) - for var in az_vars: - gdal.SetConfigOption(var, az_vars[var]) - ############################################################################### # Basic authentication tests From 915711846a3b142f5a44a9024f20c1a8905a9a63 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Tue, 2 Apr 2024 13:39:53 -0400 Subject: [PATCH 055/230] autotest ogr_pgdump.py: use tmp_path, tmp_vsimem --- autotest/ogr/ogr_pgdump.py | 229 ++++++++++++++----------------------- 1 file changed, 84 insertions(+), 145 deletions(-) diff --git a/autotest/ogr/ogr_pgdump.py b/autotest/ogr/ogr_pgdump.py index fddb6cf7bfe5..f72db7351c8a 100755 --- a/autotest/ogr/ogr_pgdump.py +++ b/autotest/ogr/ogr_pgdump.py @@ -28,8 +28,6 @@ # DEALINGS IN THE SOFTWARE. ############################################################################### -import os - import gdaltest import ogrtest import pytest @@ -49,14 +47,9 @@ def module_disable_exceptions(): # Create table from data/poly.shp -def test_ogr_pgdump_1(): - - try: - os.remove("tmp/tpoly.sql") - except OSError: - pass +def test_ogr_pgdump_1(tmp_path): - ds = ogr.GetDriverByName("PGDump").CreateDataSource("tmp/tpoly.sql") + ds = ogr.GetDriverByName("PGDump").CreateDataSource(tmp_path / "tpoly.sql") ###################################################### # Create Layer @@ -94,10 +87,9 @@ def test_ogr_pgdump_1(): feat = shp_lyr.GetNextFeature() - dst_feat.Destroy() - ds.Destroy() + ds.Close() - with open("tmp/tpoly.sql") as f: + with open(tmp_path / "tpoly.sql") as f: sql = f.read() # print(sql) @@ -138,17 +130,12 @@ def check_and_remove(needle): # Create table from data/poly.shp with PG_USE_COPY=YES -def test_ogr_pgdump_2(): - - try: - os.remove("tmp/tpoly.sql") - except OSError: - pass +def test_ogr_pgdump_2(tmp_path): with gdal.config_option("PG_USE_COPY", "YES"): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "tmp/tpoly.sql", options=["LINEFORMAT=CRLF"] + tmp_path / "tpoly.sql", options=["LINEFORMAT=CRLF"] ) ###################################################### @@ -190,10 +177,9 @@ def test_ogr_pgdump_2(): feat = shp_lyr.GetNextFeature() - dst_feat.Destroy() - ds.Destroy() + ds.Close() - with open("tmp/tpoly.sql") as f: + with open(tmp_path / "tpoly.sql") as f: sql = f.read() # print(sql) @@ -244,17 +230,12 @@ def check_and_remove(needle): # Create table from data/poly.shp without any geometry -def test_ogr_pgdump_3(): - - try: - os.remove("tmp/tpoly.sql") - except OSError: - pass +def test_ogr_pgdump_3(tmp_path): with gdal.config_option("PG_USE_COPY", "YES"): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "tmp/tpoly.sql", options=["LINEFORMAT=LF"] + tmp_path / "tpoly.sql", options=["LINEFORMAT=LF"] ) ###################################################### @@ -305,10 +286,9 @@ def test_ogr_pgdump_3(): feat = shp_lyr.GetNextFeature() - dst_feat.Destroy() - ds.Destroy() + ds.Close() - with open("tmp/tpoly.sql") as f: + with open(tmp_path / "tpoly.sql") as f: sql = f.read() # print(sql) @@ -355,10 +335,10 @@ def check_and_remove(needle): # Test multi-geometry support -def test_ogr_pgdump_4(): +def test_ogr_pgdump_4(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_4.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_4.sql", options=["LINEFORMAT=LF"] ) assert ds.TestCapability(ogr.ODsCCreateGeomFieldAfterCreateLayer) != 0 @@ -389,12 +369,10 @@ def test_ogr_pgdump_4(): ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_4.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_4.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("ascii") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_4.sql") - # print(sql) def check_and_remove(needle): @@ -428,10 +406,10 @@ def check_and_remove(needle): # Test non nullable, unique and comment field support -def test_ogr_pgdump_non_nullable_unique_comment(): +def test_ogr_pgdump_non_nullable_unique_comment(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_5.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_5.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) field_defn = ogr.FieldDefn("field_not_nullable", ogr.OFTString) @@ -472,12 +450,10 @@ def test_ogr_pgdump_non_nullable_unique_comment(): ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_5.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_5.sql", "rb") sql = gdal.VSIFReadL(1, 1000, f).decode("ascii") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_5.sql") - # print(sql) def check_and_remove(needle): @@ -510,10 +486,10 @@ def check_and_remove(needle): # Test default values -def test_ogr_pgdump_6(): +def test_ogr_pgdump_6(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_6.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_6.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) @@ -580,12 +556,10 @@ def test_ogr_pgdump_6(): ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_6.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_6.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("ascii") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_6.sql") - # print(sql) def check_and_remove(needle): @@ -626,10 +600,10 @@ def check_and_remove(needle): # Test creating a field with the fid name (PG_USE_COPY=NO) -def test_ogr_pgdump_7(): +def test_ogr_pgdump_7(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_7.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_7.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone, options=["FID=myfid"]) @@ -699,12 +673,10 @@ def test_ogr_pgdump_7(): ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_7.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_7.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("ascii") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_7.sql") - # print(sql) def check_and_remove(needle): @@ -735,10 +707,10 @@ def check_and_remove(needle): # Test creating a field with the fid name (PG_USE_COPY=YES) -def test_ogr_pgdump_8(): +def test_ogr_pgdump_8(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_8.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_8.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone, options=["FID=myfid"]) @@ -811,12 +783,10 @@ def test_ogr_pgdump_8(): ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_8.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_8.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("ascii") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_8.sql") - # print(sql) def check_and_remove(needle): @@ -851,11 +821,11 @@ def check_and_remove(needle): @pytest.mark.parametrize("pg_use_copy", ["YES", "NO"]) -def test_ogr_pgdump_9(pg_use_copy): +def test_ogr_pgdump_9(tmp_vsimem, pg_use_copy): with gdaltest.config_option("PG_USE_COPY", pg_use_copy): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_9.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_9.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) @@ -887,12 +857,10 @@ def test_ogr_pgdump_9(pg_use_copy): ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_9.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_9.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_9.sql") - if pg_use_copy == "YES": eofield = "\t" else: @@ -909,10 +877,10 @@ def test_ogr_pgdump_9(pg_use_copy): # Export POINT EMPTY for PostGIS 2.2 -def test_ogr_pgdump_11(): +def test_ogr_pgdump_11(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_11.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_11.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) f = ogr.Feature(lyr.GetLayerDefn()) @@ -921,12 +889,10 @@ def test_ogr_pgdump_11(): f = None ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_11.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_11.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_11.sql") - # clang -m32 generates F8FF..., instead of F87F... for all other systems assert ( "0101000000000000000000F87F000000000000F87F" in sql @@ -941,10 +907,10 @@ def test_ogr_pgdump_11(): # is not-nullable, and hence the CreateGeomField() interface is used. -def test_ogr_pgdump_12(): +def test_ogr_pgdump_12(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_12.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_12.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer( "test", geom_type=ogr.wkbNone, options=["GEOMETRY_NAME=another_name"] @@ -952,12 +918,10 @@ def test_ogr_pgdump_12(): lyr.CreateGeomField(ogr.GeomFieldDefn("my_geom", ogr.wkbPoint)) ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_12.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_12.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_12.sql") - assert "another_name" in sql @@ -1098,10 +1062,10 @@ def test_ogr_pgdump_12(): @pytest.mark.parametrize("geom_type,options,wkt,expected_strings", tests_zm) -def test_ogr_pgdump_zm(geom_type, options, wkt, expected_strings): +def test_ogr_pgdump_zm(tmp_vsimem, geom_type, options, wkt, expected_strings): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_13.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_13.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=geom_type, options=options) f = ogr.Feature(lyr.GetLayerDefn()) @@ -1110,23 +1074,23 @@ def test_ogr_pgdump_zm(geom_type, options, wkt, expected_strings): f = None ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_13.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_13.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_13.sql") - for expected_string in expected_strings: assert expected_string in sql, (geom_type, options, wkt, expected_string) @pytest.mark.parametrize("geom_type,options,wkt,expected_strings", tests_zm) -def test_ogr_pgdump_zm_creategeomfield(geom_type, options, wkt, expected_strings): +def test_ogr_pgdump_zm_creategeomfield( + tmp_vsimem, geom_type, options, wkt, expected_strings +): if "GEOM_TYPE=geography" in options: return ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_13.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_13.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone, options=options) lyr.CreateGeomField(ogr.GeomFieldDefn("my_geom", geom_type)) @@ -1136,12 +1100,10 @@ def test_ogr_pgdump_zm_creategeomfield(geom_type, options, wkt, expected_strings f = None ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_13.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_13.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_13.sql") - for expected_string in expected_strings: assert expected_string in sql, (geom_type, options, wkt, expected_string) @@ -1150,11 +1112,11 @@ def test_ogr_pgdump_zm_creategeomfield(geom_type, options, wkt, expected_strings # Test description -def test_ogr_pgdump_14(): +def test_ogr_pgdump_14(tmp_vsimem): # Set with DESCRIPTION layer creation option ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_14.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_14.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer( "ogr_pgdump_14", geom_type=ogr.wkbPoint, options=["DESCRIPTION=foo"] @@ -1164,46 +1126,49 @@ def test_ogr_pgdump_14(): lyr.SetMetadataItem("DESCRIPTION", "baz") ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_14.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_14.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_14.sql") - assert ( """COMMENT ON TABLE "public"."ogr_pgdump_14" IS 'foo';""" in sql and "bar" not in sql and "baz" not in sql ) + +def test_ogr_pgdump_14_a(tmp_vsimem): + # Set with SetMetadataItem() ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_14.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_14.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("ogr_pgdump_14", geom_type=ogr.wkbPoint) lyr.SetMetadataItem("DESCRIPTION", "bar") ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_14.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_14.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_14.sql") + gdal.Unlink(tmp_vsimem / "ogr_pgdump_14.sql") assert """COMMENT ON TABLE "public"."ogr_pgdump_14" IS 'bar';""" in sql + +def test_ogr_pgdump_14_b(tmp_vsimem): + # Set with SetMetadata() ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_14.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_14.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("ogr_pgdump_14", geom_type=ogr.wkbPoint) lyr.SetMetadata({"DESCRIPTION": "baz"}) ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_14.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_14.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_14.sql") assert """COMMENT ON TABLE "public"."ogr_pgdump_14" IS 'baz';""" in sql @@ -1211,10 +1176,10 @@ def test_ogr_pgdump_14(): # NULL vs unset -def test_ogr_pgdump_15(): +def test_ogr_pgdump_15(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_15.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_15.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) @@ -1226,12 +1191,10 @@ def test_ogr_pgdump_15(): f = None ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_15.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_15.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_15.sql") - assert ( 'INSERT INTO "public"."test" ("str") VALUES (NULL)' in sql or 'INSERT INTO "public"."test" DEFAULT VALUES' in sql @@ -1243,11 +1206,11 @@ def test_ogr_pgdump_15(): @pytest.mark.parametrize("pg_use_copy", ["YES", "NO"]) -def test_ogr_pgdump_16(pg_use_copy): +def test_ogr_pgdump_16(tmp_vsimem, pg_use_copy): with gdal.config_option("PG_USE_COPY", pg_use_copy): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_16.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_16.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) @@ -1257,12 +1220,10 @@ def test_ogr_pgdump_16(pg_use_copy): f = None ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_16.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_16.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_16.sql") - assert ( """SELECT setval(pg_get_serial_sequence('"public"."test"', 'ogc_fid'), MAX("ogc_fid")) FROM "public"."test";""" in sql @@ -1273,10 +1234,10 @@ def test_ogr_pgdump_16(pg_use_copy): # Test temporary layer creation option -def test_ogr_pgdump_17(): +def test_ogr_pgdump_17(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/ogr_pgdump_17.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "ogr_pgdump_17.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint, options=["TEMPORARY=ON"]) f = ogr.Feature(lyr.GetLayerDefn()) @@ -1285,12 +1246,10 @@ def test_ogr_pgdump_17(): f = None ds = None - f = gdal.VSIFOpenL("/vsimem/ogr_pgdump_17.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ogr_pgdump_17.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/ogr_pgdump_17.sql") - # print(sql) def check_and_remove(needle): @@ -1319,11 +1278,11 @@ def check_and_remove(needle): @pytest.mark.parametrize("pg_use_copy", ["YES", "NO"]) -def test_ogr_pgdump_GEOM_COLUMN_POSITION_END(pg_use_copy): +def test_ogr_pgdump_GEOM_COLUMN_POSITION_END(tmp_vsimem, pg_use_copy): with gdaltest.config_option("PG_USE_COPY", pg_use_copy): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/test_ogr_pgdump_GEOM_COLUMN_POSITION_END.sql", + tmp_vsimem / "test_ogr_pgdump_GEOM_COLUMN_POSITION_END.sql", options=["LINEFORMAT=LF"], ) lyr = ds.CreateLayer( @@ -1342,12 +1301,12 @@ def test_ogr_pgdump_GEOM_COLUMN_POSITION_END(pg_use_copy): f = None ds = None - f = gdal.VSIFOpenL("/vsimem/test_ogr_pgdump_GEOM_COLUMN_POSITION_END.sql", "rb") + f = gdal.VSIFOpenL( + tmp_vsimem / "test_ogr_pgdump_GEOM_COLUMN_POSITION_END.sql", "rb" + ) sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/test_ogr_pgdump_GEOM_COLUMN_POSITION_END.sql") - # print(sql) def check_and_remove(needle): @@ -1386,11 +1345,11 @@ def check_and_remove(needle): @pytest.mark.parametrize("pg_use_copy", ["YES", "NO"]) -def test_ogr_pgdump_GEOM_COLUMN_POSITION_END_FID_empty(pg_use_copy): +def test_ogr_pgdump_GEOM_COLUMN_POSITION_END_FID_empty(tmp_vsimem, pg_use_copy): with gdaltest.config_option("PG_USE_COPY", pg_use_copy): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/test_ogr_pgdump_GEOM_COLUMN_POSITION_END_FID_empty.sql", + tmp_vsimem / "test_ogr_pgdump_GEOM_COLUMN_POSITION_END_FID_empty.sql", options=["LINEFORMAT=LF"], ) lyr = ds.CreateLayer( @@ -1410,13 +1369,11 @@ def test_ogr_pgdump_GEOM_COLUMN_POSITION_END_FID_empty(pg_use_copy): ds = None f = gdal.VSIFOpenL( - "/vsimem/test_ogr_pgdump_GEOM_COLUMN_POSITION_END_FID_empty.sql", "rb" + tmp_vsimem / "test_ogr_pgdump_GEOM_COLUMN_POSITION_END_FID_empty.sql", "rb" ) sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/test_ogr_pgdump_GEOM_COLUMN_POSITION_END_FID_empty.sql") - # print(sql) def check_and_remove(needle): @@ -1454,10 +1411,10 @@ def check_and_remove(needle): # Test creating a layer without feature -def test_ogr_pgdump_no_feature(): +def test_ogr_pgdump_no_feature(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/test_ogr_pgdump_no_feature.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "test_ogr_pgdump_no_feature.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer( "test", geom_type=ogr.wkbPoint, options=["GEOM_COLUMN_POSITION=END"] @@ -1465,12 +1422,10 @@ def test_ogr_pgdump_no_feature(): lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) ds = None - f = gdal.VSIFOpenL("/vsimem/test_ogr_pgdump_no_feature.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "test_ogr_pgdump_no_feature.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/test_ogr_pgdump_no_feature.sql") - # print(sql) def check_and_remove(needle): @@ -1496,10 +1451,10 @@ def check_and_remove(needle): # Test CREATE_TABLE=NO -def test_ogr_pgdump_CREATE_TABLE_NO(): +def test_ogr_pgdump_CREATE_TABLE_NO(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/test_ogr_pgdump_CREATE_TABLE_NO.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "test_ogr_pgdump_CREATE_TABLE_NO.sql", options=["LINEFORMAT=LF"] ) lyr = ds.CreateLayer( "test", @@ -1513,12 +1468,10 @@ def test_ogr_pgdump_CREATE_TABLE_NO(): lyr.CreateFeature(f) ds = None - f = gdal.VSIFOpenL("/vsimem/test_ogr_pgdump_CREATE_TABLE_NO.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "test_ogr_pgdump_CREATE_TABLE_NO.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/test_ogr_pgdump_CREATE_TABLE_NO.sql") - # print(sql) def check_and_remove(needle): @@ -1540,10 +1493,10 @@ def check_and_remove(needle): # Test long identifiers -def test_ogr_pgdump_long_identifiers(): +def test_ogr_pgdump_long_identifiers(tmp_vsimem): ds = ogr.GetDriverByName("PGDump").CreateDataSource( - "/vsimem/test_ogr_pgdump_long_identifiers.sql", options=["LINEFORMAT=LF"] + tmp_vsimem / "test_ogr_pgdump_long_identifiers.sql", options=["LINEFORMAT=LF"] ) long_name = "test_" + ("X" * 64) + "_long_name" @@ -1558,12 +1511,10 @@ def test_ogr_pgdump_long_identifiers(): lyr.CreateFeature(f) ds = None - f = gdal.VSIFOpenL("/vsimem/test_ogr_pgdump_long_identifiers.sql", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "test_ogr_pgdump_long_identifiers.sql", "rb") sql = gdal.VSIFReadL(1, 10000, f).decode("utf8") gdal.VSIFCloseL(f) - gdal.Unlink("/vsimem/test_ogr_pgdump_long_identifiers.sql") - # print(sql) def check_and_remove(needle): @@ -1638,15 +1589,3 @@ def test_ogr_pgdump_LAUNDER_ASCII(tmp_vsimem): gdal.VSIFCloseL(f) assert '"ae"' in sql assert '"be"' in sql - - -############################################################################### -# Cleanup - - -def test_ogr_pgdump_cleanup(): - - try: - os.remove("tmp/tpoly.sql") - except OSError: - pass From 3a57c6c38992d0d2a124ac4b08e483c9a5635603 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Tue, 2 Apr 2024 13:54:39 -0400 Subject: [PATCH 056/230] autotest ogr_pds4.py: use tmp_path, tmp_vsimem --- autotest/ogr/ogr_pds4.py | 147 ++++++++++++++++----------------------- 1 file changed, 61 insertions(+), 86 deletions(-) diff --git a/autotest/ogr/ogr_pds4.py b/autotest/ogr/ogr_pds4.py index 0aeb15957565..d05d0e08c245 100755 --- a/autotest/ogr/ogr_pds4.py +++ b/autotest/ogr/ogr_pds4.py @@ -130,7 +130,7 @@ def validate_xml(filename): ) ds = gdal.OpenEx( - "GMLAS:" + filename, + f"GMLAS:{filename}", open_options=[ "VALIDATE=YES", "FAIL_IF_VALIDATION_ERROR=YES", @@ -219,18 +219,18 @@ def test_ogr_pds4_read_table_character_test_ogrsf(): assert "INFO" in ret and "ERROR" not in ret -def test_ogr_pds4_append_and_modify_table_character(): +def test_ogr_pds4_append_and_modify_table_character(tmp_vsimem): gdal.FileFromMemBuffer( - "/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml", + tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.xml", open("data/pds4/ele_evt_12hr_orbit_2011-2012_truncated.xml", "rb").read(), ) gdal.FileFromMemBuffer( - "/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.tab", + tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.tab", open("data/pds4/ele_evt_12hr_orbit_2011-2012_truncated.tab", "rb").read(), ) - ds = ogr.Open("/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml", update=1) + ds = ogr.Open(tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.xml", update=1) lyr = ds.GetLayer(0) assert lyr.TestCapability(ogr.OLCSequentialWrite) f = ogr.Feature(lyr.GetLayerDefn()) @@ -243,10 +243,10 @@ def test_ogr_pds4_append_and_modify_table_character(): assert not f.IsFieldSet("MET") ds = None - assert validate_xml("/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml") + assert validate_xml(tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.xml") # Re-open - ds = ogr.Open("/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml", update=1) + ds = ogr.Open(tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.xml", update=1) lyr = ds.GetLayer(0) assert lyr.GetFeatureCount() == 6 f = lyr.GetFeature(6) @@ -254,30 +254,26 @@ def test_ogr_pds4_append_and_modify_table_character(): assert f.GetGeometryRef() is None ds = None - ogr.GetDriverByName("PDS4").DeleteDataSource( - "/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml" - ) - -def test_ogr_pds4_delete_from_table_character(): +def test_ogr_pds4_delete_from_table_character(tmp_vsimem): gdal.FileFromMemBuffer( - "/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml", + tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.xml", open("data/pds4/ele_evt_12hr_orbit_2011-2012_truncated.xml", "rb").read(), ) gdal.FileFromMemBuffer( - "/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.tab", + tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.tab", open("data/pds4/ele_evt_12hr_orbit_2011-2012_truncated.tab", "rb").read(), ) - ds = ogr.Open("/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml", update=1) + ds = ogr.Open(tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.xml", update=1) lyr = ds.GetLayer(0) assert lyr.TestCapability(ogr.OLCDeleteFeature) assert lyr.DeleteFeature(2) == 0 assert lyr.GetFeatureCount() == 4 ds = None - f = gdal.VSIFOpenL("/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.xml", "rb") data = gdal.VSIFReadL(1, 100000, f).decode("ascii") gdal.VSIFCloseL(f) @@ -286,10 +282,10 @@ def test_ogr_pds4_delete_from_table_character(): assert "<description>EE event number. The value is repeated for" in data assert "<Special_Constants>" in data - assert validate_xml("/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml") + assert validate_xml(tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.xml") # Re-open - ds = ogr.Open("/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml", update=1) + ds = ogr.Open(tmp_vsimem / "ele_evt_12hr_orbit_2011-2012_truncated.xml", update=1) lyr = ds.GetLayer(0) assert lyr.GetFeatureCount() == 4 @@ -306,37 +302,31 @@ def test_ogr_pds4_delete_from_table_character(): assert f["BP_LOW"] == 102.9400024 ds = None - ogr.GetDriverByName("PDS4").DeleteDataSource( - "/vsimem/ele_evt_12hr_orbit_2011-2012_truncated.xml" - ) - -def test_ogr_pds4_read_write_table_character_test_ogrsf(): +def test_ogr_pds4_read_write_table_character_test_ogrsf(tmp_path): import test_cli_utilities if test_cli_utilities.get_test_ogrsf_path() is None: pytest.skip() - open("tmp/ele_evt_12hr_orbit_2011-2012_truncated.xml", "wb").write( + open(tmp_path / "ele_evt_12hr_orbit_2011-2012_truncated.xml", "wb").write( open("data/pds4/ele_evt_12hr_orbit_2011-2012_truncated.xml", "rb").read() ) - open("tmp/ele_evt_12hr_orbit_2011-2012_truncated.tab", "wb").write( + open(tmp_path / "ele_evt_12hr_orbit_2011-2012_truncated.tab", "wb").write( open("data/pds4/ele_evt_12hr_orbit_2011-2012_truncated.tab", "rb").read() ) ret = gdaltest.runexternal( test_cli_utilities.get_test_ogrsf_path() - + " tmp/ele_evt_12hr_orbit_2011-2012_truncated.xml" + + f" {tmp_path}/ele_evt_12hr_orbit_2011-2012_truncated.xml" ) - gdal.Unlink("tmp/ele_evt_12hr_orbit_2011-2012_truncated.xml") - gdal.Unlink("tmp/ele_evt_12hr_orbit_2011-2012_truncated.tab") assert "INFO" in ret and "ERROR" not in ret, ret @pytest.mark.parametrize("line_ending", [None, "CRLF", "LF", "error"]) -def test_ogr_pds4_create_table_character(line_ending): +def test_ogr_pds4_create_table_character(tmp_vsimem, line_ending): options = [ "VAR_LOGICAL_IDENTIFIER=urn:foo:bar:baz:logical_identifier", @@ -349,7 +339,7 @@ def test_ogr_pds4_create_table_character(line_ending): ] ds = ogr.GetDriverByName("PDS4").CreateDataSource( - "/vsimem/test.xml", options=options + tmp_vsimem / "test.xml", options=options ) layer_creation_options = ["TABLE_TYPE=CHARACTER"] @@ -382,7 +372,7 @@ def test_ogr_pds4_create_table_character(line_ending): lyr.CreateFeature(f) ds = None - f = gdal.VSIFOpenL("/vsimem/test.xml", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "test.xml", "rb") data = gdal.VSIFReadL(1, 100000, f).decode("ascii") gdal.VSIFCloseL(f) @@ -398,11 +388,11 @@ def test_ogr_pds4_create_table_character(line_ending): if line_ending is None: # Only do that check in that configuration for faster test execution - assert validate_xml("/vsimem/test.xml") + assert validate_xml(tmp_vsimem / "test.xml") - assert gdal.VSIStatL("/vsimem/test/0f_oo.dat") + assert gdal.VSIStatL(tmp_vsimem / "test/0f_oo.dat") - f = gdal.VSIFOpenL("/vsimem/test/0f_oo.dat", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "test/0f_oo.dat", "rb") data = gdal.VSIFReadL(1, 100000, f).decode("ascii") gdal.VSIFCloseL(f) if line_ending == "LF": @@ -411,7 +401,7 @@ def test_ogr_pds4_create_table_character(line_ending): else: assert "\r\n" in data - ds = ogr.Open("/vsimem/test.xml") + ds = ogr.Open(tmp_vsimem / "test.xml") lyr = ds.GetLayer(0) assert lyr.GetLayerDefn().GetFieldCount() == 8 f = lyr.GetNextFeature() @@ -429,7 +419,7 @@ def test_ogr_pds4_create_table_character(line_ending): # Only do that part in that configuration for faster test execution # Add new layer - ds = ogr.Open("/vsimem/test.xml", update=1) + ds = ogr.Open(tmp_vsimem / "test.xml", update=1) lyr = ds.CreateLayer("bar", options=["TABLE_TYPE=CHARACTER"]) lyr.CreateField(ogr.FieldDefn("int", ogr.OFTInteger)) f = ogr.Feature(lyr.GetLayerDefn()) @@ -437,9 +427,9 @@ def test_ogr_pds4_create_table_character(line_ending): lyr.CreateFeature(f) ds = None - assert validate_xml("/vsimem/test.xml") + assert validate_xml(tmp_vsimem / "test.xml") - ds = ogr.Open("/vsimem/test.xml") + ds = ogr.Open(tmp_vsimem / "test.xml") lyr = ds.GetLayerByName("bar") f = lyr.GetNextFeature() assert f["int"] == 123 @@ -450,11 +440,10 @@ def test_ogr_pds4_create_table_character(line_ending): ds = None - ogr.GetDriverByName("PDS4").DeleteDataSource("/vsimem/test.xml") - gdal.Rmdir("/vsimem/test") + ogr.GetDriverByName("PDS4").DeleteDataSource(tmp_vsimem / "test.xml") -def test_ogr_pds4_create_with_srs(): +def test_ogr_pds4_create_with_srs(tmp_vsimem): options = [ "VAR_LOGICAL_IDENTIFIER=urn:foo:bar:baz:logical_identifier", @@ -462,7 +451,7 @@ def test_ogr_pds4_create_with_srs(): ] ds = ogr.GetDriverByName("PDS4").CreateDataSource( - "/vsimem/test.xml", options=options + tmp_vsimem / "test.xml", options=options ) sr = osr.SpatialReference() sr.SetFromUserInput("WGS84") @@ -477,17 +466,17 @@ def test_ogr_pds4_create_with_srs(): lyr.CreateFeature(f) ds = None - assert validate_xml("/vsimem/test.xml") + assert validate_xml(tmp_vsimem / "test.xml") - assert gdal.VSIStatL("/vsimem/bar.dat") + assert gdal.VSIStatL(tmp_vsimem / "bar.dat") - f = gdal.VSIFOpenL("/vsimem/test.xml", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "test.xml", "rb") data = gdal.VSIFReadL(1, 100000, f).decode("ascii") gdal.VSIFCloseL(f) assert "<local_identifier_reference>bar</local_identifier_reference>" in data assert "<local_identifier>bar</local_identifier>" in data - ds = ogr.Open("/vsimem/test.xml") + ds = ogr.Open(tmp_vsimem / "test.xml") lyr = ds.GetLayerByName("bar") assert lyr.GetSpatialRef() assert lyr.GetSpatialRef().IsGeographic() @@ -495,10 +484,8 @@ def test_ogr_pds4_create_with_srs(): assert f.GetGeometryRef().ExportToIsoWkt() == "POINT Z (1 2 3)" ds = None - ogr.GetDriverByName("PDS4").DeleteDataSource("/vsimem/test.xml") - -def test_ogr_pds4_create_table_binary(): +def test_ogr_pds4_create_table_binary(tmp_vsimem): options = [ "VAR_LOGICAL_IDENTIFIER=urn:foo:bar:baz:logical_identifier", @@ -514,7 +501,7 @@ def test_ogr_pds4_create_table_binary(): for endianness in ["LSB", "MSB"]: ds = ogr.GetDriverByName("PDS4").CreateDataSource( - "/vsimem/test.xml", options=options + tmp_vsimem / "test.xml", options=options ) layername = endianness @@ -565,7 +552,7 @@ def test_ogr_pds4_create_table_binary(): ds = None - f = gdal.VSIFOpenL("/vsimem/test.xml", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "test.xml", "rb") data = gdal.VSIFReadL(1, 100000, f).decode("ascii") gdal.VSIFCloseL(f) @@ -587,9 +574,9 @@ def test_ogr_pds4_create_table_binary(): assert "Unsigned" in data, data assert "Signed" not in data, data - assert validate_xml("/vsimem/test.xml") + assert validate_xml(tmp_vsimem / "test.xml") - ds = ogr.Open("/vsimem/test.xml") + ds = ogr.Open(tmp_vsimem / "test.xml") layername = endianness lyr = ds.GetLayerByName(layername) assert lyr.GetLayerDefn().GetFieldCount() == 11 @@ -609,7 +596,7 @@ def test_ogr_pds4_create_table_binary(): ds = None # Add new layer - ds = ogr.Open("/vsimem/test.xml", update=1) + ds = ogr.Open(tmp_vsimem / "test.xml", update=1) sr = osr.SpatialReference() sr.SetFromUserInput("WGS84") lyr = ds.CreateLayer( @@ -620,21 +607,18 @@ def test_ogr_pds4_create_table_binary(): lyr.CreateFeature(f) ds = None - assert validate_xml("/vsimem/test.xml") + assert validate_xml(tmp_vsimem / "test.xml") - ds = ogr.Open("/vsimem/test.xml") + ds = ogr.Open(tmp_vsimem / "test.xml") lyr = ds.GetLayerByName("bar") f = lyr.GetNextFeature() assert f.GetGeometryRef().ExportToIsoWkt() == "POINT Z (1 2 3)" ds = None - ogr.GetDriverByName("PDS4").DeleteDataSource("/vsimem/test.xml") - gdal.Rmdir("/vsimem/test") - @pytest.mark.parametrize("line_ending", [None, "CRLF", "LF", "error"]) @pytest.mark.require_driver("CSV") -def test_ogr_pds4_create_table_delimited(line_ending): +def test_ogr_pds4_create_table_delimited(tmp_vsimem, line_ending): options = [ "VAR_LOGICAL_IDENTIFIER=urn:foo:bar:baz:logical_identifier", @@ -647,7 +631,7 @@ def test_ogr_pds4_create_table_delimited(line_ending): ] ds = ogr.GetDriverByName("PDS4").CreateDataSource( - "/vsimem/test.xml", options=options + tmp_vsimem / "test.xml", options=options ) layer_creation_options = [] @@ -682,7 +666,7 @@ def test_ogr_pds4_create_table_delimited(line_ending): lyr.CreateFeature(f) ds = None - f = gdal.VSIFOpenL("/vsimem/test.xml", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "test.xml", "rb") data = gdal.VSIFReadL(1, 100000, f).decode("ascii") gdal.VSIFCloseL(f) @@ -697,9 +681,9 @@ def test_ogr_pds4_create_table_delimited(line_ending): if line_ending is None: # Only do that check in that configuration for faster test execution - assert validate_xml("/vsimem/test.xml") + assert validate_xml(tmp_vsimem / "test.xml") - ds = gdal.OpenEx("/vsimem/test.xml") + ds = gdal.OpenEx(tmp_vsimem / "test.xml") assert ds assert ds.GetLayerCount() == 1 fl = ds.GetFileList() @@ -709,7 +693,7 @@ def test_ogr_pds4_create_table_delimited(line_ending): assert "foo.vrt" in fl[2] ds = None - f = gdal.VSIFOpenL("/vsimem/test/foo.csv", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "test/foo.csv", "rb") data = gdal.VSIFReadL(1, 100000, f).decode("ascii") gdal.VSIFCloseL(f) if line_ending == "LF": @@ -718,7 +702,7 @@ def test_ogr_pds4_create_table_delimited(line_ending): else: assert "\r\n" in data - for filename in ["/vsimem/test.xml", "/vsimem/test/foo.vrt"]: + for filename in [tmp_vsimem / "test.xml", tmp_vsimem / "test/foo.vrt"]: ds = ogr.Open(filename) lyr = ds.GetLayer(0) assert lyr.GetLayerDefn().GetFieldCount() == 8, filename @@ -738,7 +722,7 @@ def test_ogr_pds4_create_table_delimited(line_ending): # Only do that part in that configuration for faster test execution # Add new layer - ds = ogr.Open("/vsimem/test.xml", update=1) + ds = ogr.Open(tmp_vsimem / "test.xml", update=1) lyr = ds.CreateLayer( "no_geom", geom_type=ogr.wkbNone, options=["TABLE_TYPE=DELIMITED"] ) @@ -748,9 +732,9 @@ def test_ogr_pds4_create_table_delimited(line_ending): lyr.CreateFeature(f) ds = None - assert validate_xml("/vsimem/test.xml") + assert validate_xml(tmp_vsimem / "test.xml") - ds = ogr.Open("/vsimem/test.xml") + ds = ogr.Open(tmp_vsimem / "test.xml") lyr = ds.GetLayerByName("no_geom") f = lyr.GetNextFeature() assert f["int"] == 123 @@ -761,9 +745,6 @@ def test_ogr_pds4_create_table_delimited(line_ending): ds = None - ogr.GetDriverByName("PDS4").DeleteDataSource("/vsimem/test.xml") - gdal.Rmdir("/vsimem/test") - def test_ogr_pds4_read_table_binary_group_field(): @@ -777,7 +758,7 @@ def test_ogr_pds4_read_table_binary_group_field(): assert f["solar_mon_spectrum_23_253_5"] == 31259 -def test_ogr_pds4_create_table_delimited_with_srs_no_vrt(): +def test_ogr_pds4_create_table_delimited_with_srs_no_vrt(tmp_vsimem): options = [ "VAR_LOGICAL_IDENTIFIER=urn:foo:bar:baz:logical_identifier", @@ -790,7 +771,7 @@ def test_ogr_pds4_create_table_delimited_with_srs_no_vrt(): ] ds = ogr.GetDriverByName("PDS4").CreateDataSource( - "/vsimem/test.xml", options=options + tmp_vsimem / "test.xml", options=options ) srs = osr.SpatialReference() srs.SetFromUserInput("+proj=tmerc +datum=WGS84") @@ -800,9 +781,9 @@ def test_ogr_pds4_create_table_delimited_with_srs_no_vrt(): lyr.CreateFeature(f) ds = None - assert validate_xml("/vsimem/test.xml") + assert validate_xml(tmp_vsimem / "test.xml") - ds = ogr.Open("/vsimem/test.xml") + ds = ogr.Open(tmp_vsimem / "test.xml") lyr = ds.GetLayerByName("foo") wkt = lyr.GetSpatialRef().ExportToWkt() assert wkt.replace( @@ -813,31 +794,25 @@ def test_ogr_pds4_create_table_delimited_with_srs_no_vrt(): ds = None - ogr.GetDriverByName("PDS4").DeleteDataSource("/vsimem/test.xml") - gdal.Rmdir("/vsimem/test") - -def test_ogr_pds4_read_table_delimited_test_ogrsf(): +def test_ogr_pds4_read_table_delimited_test_ogrsf(tmp_path): import test_cli_utilities if test_cli_utilities.get_test_ogrsf_path() is None: pytest.skip() - open("tmp/poly_delimited.xml", "wb").write( + open(tmp_path / "poly_delimited.xml", "wb").write( open("data/pds4/poly_delimited.xml", "rb").read() ) - open("tmp/poly_delimited.csv", "wb").write( + open(tmp_path / "poly_delimited.csv", "wb").write( open("data/pds4/poly_delimited.csv", "rb").read() ) ret = gdaltest.runexternal( - test_cli_utilities.get_test_ogrsf_path() + " tmp/poly_delimited.xml" + test_cli_utilities.get_test_ogrsf_path() + f" {tmp_path}/poly_delimited.xml" ) - gdal.Unlink("tmp/poly_delimited.xml") - gdal.Unlink("tmp/poly_delimited.csv") - assert "INFO" in ret and "ERROR" not in ret From 6ff5f8a50bfc03b8305fba11de356c20ad6bacef Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Wed, 28 Jun 2023 13:28:45 -0400 Subject: [PATCH 057/230] autotest: Allow custom temp dir in GDALTest This allows a temporary directory generated by pytest to be passed to GDALTest, preventing collisions when running tests in parallel. --- autotest/gcore/hfa_write.py | 10 ++++++---- autotest/pymod/gdaltest.py | 38 +++++++++++++++++++++++++++++-------- 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/autotest/gcore/hfa_write.py b/autotest/gcore/hfa_write.py index 21ebd586cd7a..da0ea139c08a 100755 --- a/autotest/gcore/hfa_write.py +++ b/autotest/gcore/hfa_write.py @@ -397,8 +397,8 @@ def test_hfa_update_existing_aux_overviews(): ], ) @pytest.mark.require_driver("HFA") -def test_hfa_create_normal(filename, checksum, testfunction): - ut = gdaltest.GDALTest("HFA", filename, 1, checksum) +def test_hfa_create_normal(filename, checksum, testfunction, tmp_path): + ut = gdaltest.GDALTest("HFA", filename, 1, checksum, tmpdir=str(tmp_path)) getattr(ut, testfunction)() @@ -419,8 +419,10 @@ def test_hfa_create_normal(filename, checksum, testfunction): ], ) @pytest.mark.require_driver("HFA") -def test_hfa_create_spill(filename, checksum, testfunction): - ut = gdaltest.GDALTest("HFA", filename, 1, checksum, options=["USE_SPILL=YES"]) +def test_hfa_create_spill(filename, checksum, testfunction, tmp_path): + ut = gdaltest.GDALTest( + "HFA", filename, 1, checksum, options=["USE_SPILL=YES"], tmpdir=str(tmp_path) + ) getattr(ut, testfunction)() diff --git a/autotest/pymod/gdaltest.py b/autotest/pymod/gdaltest.py index 0df2d87773f1..509e7f87f52a 100755 --- a/autotest/pymod/gdaltest.py +++ b/autotest/pymod/gdaltest.py @@ -105,6 +105,7 @@ def __init__( filename_absolute=0, chksum_after_reopening=None, open_options=None, + tmpdir=None, ): self.driver = None self.drivername = drivername @@ -132,6 +133,11 @@ def __init__( self.options = [] if options is None else options self.open_options = open_options + if tmpdir is None: + self.tmpdir = "tmp/" + else: + self.tmpdir = tmpdir + def testDriver(self): if self.driver is None: self.driver = gdal.GetDriverByName(self.drivername) @@ -399,7 +405,9 @@ def testCreateCopy( if vsimem: new_filename = "/vsimem/" + os.path.basename(self.filename) + ".tst" else: - new_filename = "tmp/" + os.path.basename(self.filename) + ".tst" + new_filename = os.path.join( + self.tmpdir, os.path.basename(self.filename) + ".tst" + ) if quiet_error_handler: gdal.PushErrorHandler("CPLQuietErrorHandler") @@ -581,7 +589,9 @@ def testCreate( if vsimem: new_filename = "/vsimem/" + self.filename + ".tst" else: - new_filename = "tmp/" + os.path.basename(self.filename) + ".tst" + new_filename = os.path.join( + self.tmpdir, os.path.basename(self.filename) + ".tst" + ) new_ds = self.driver.Create( new_filename, @@ -662,7 +672,9 @@ def testSetGeoTransform(self): xsize = src_ds.RasterXSize ysize = src_ds.RasterYSize - new_filename = "tmp/" + os.path.basename(self.filename) + ".tst" + new_filename = os.path.join( + self.tmpdir, os.path.basename(self.filename) + ".tst" + ) new_ds = self.driver.Create( new_filename, xsize, @@ -718,7 +730,9 @@ def testSetProjection(self, prj=None, expected_prj=None): xsize = src_ds.RasterXSize ysize = src_ds.RasterYSize - new_filename = "tmp/" + os.path.basename(self.filename) + ".tst" + new_filename = os.path.join( + self.tmpdir, os.path.basename(self.filename) + ".tst" + ) new_ds = self.driver.Create( new_filename, xsize, @@ -782,7 +796,9 @@ def testSetMetadata(self): xsize = src_ds.RasterXSize ysize = src_ds.RasterYSize - new_filename = "tmp/" + os.path.basename(self.filename) + ".tst" + new_filename = os.path.join( + self.tmpdir, os.path.basename(self.filename) + ".tst" + ) new_ds = self.driver.Create( new_filename, xsize, @@ -831,7 +847,9 @@ def testSetNoDataValue(self, delete=False): xsize = src_ds.RasterXSize ysize = src_ds.RasterYSize - new_filename = "tmp/" + os.path.basename(self.filename) + ".tst" + new_filename = os.path.join( + self.tmpdir, os.path.basename(self.filename) + ".tst" + ) dt = src_ds.GetRasterBand(self.band).DataType new_ds = self.driver.Create( new_filename, @@ -901,7 +919,9 @@ def testSetDescription(self): xsize = src_ds.RasterXSize ysize = src_ds.RasterYSize - new_filename = "tmp/" + os.path.basename(self.filename) + ".tst" + new_filename = os.path.join( + self.tmpdir, os.path.basename(self.filename) + ".tst" + ) new_ds = self.driver.Create( new_filename, xsize, @@ -944,7 +964,9 @@ def testSetUnitType(self): xsize = src_ds.RasterXSize ysize = src_ds.RasterYSize - new_filename = "tmp/" + os.path.basename(self.filename) + ".tst" + new_filename = os.path.join( + self.tmpdir, os.path.basename(self.filename) + ".tst" + ) new_ds = self.driver.Create( new_filename, xsize, From e712b990679cf52f465f7236c551373944a1e640 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Tue, 2 Apr 2024 20:00:58 -0400 Subject: [PATCH 058/230] autotest tiff_read.py: use tmp_path, tmp_vsimem, parametrize --- autotest/gcore/tiff_read.py | 788 +++++++++++++++++------------------- 1 file changed, 382 insertions(+), 406 deletions(-) diff --git a/autotest/gcore/tiff_read.py b/autotest/gcore/tiff_read.py index 01e1d2b0a471..6e41defd2b1a 100755 --- a/autotest/gcore/tiff_read.py +++ b/autotest/gcore/tiff_read.py @@ -80,8 +80,8 @@ ids=[tup[0].split(".")[0] for tup in init_list], ) @pytest.mark.require_driver("GTiff") -def test_tiff_open(filename, band, checksum): - ut = gdaltest.GDALTest("GTiff", filename, band, checksum) +def test_tiff_open(filename, band, checksum, tmp_path): + ut = gdaltest.GDALTest("GTiff", filename, band, checksum, tmpdir=tmp_path) ut.testOpen() @@ -621,7 +621,7 @@ def test_tiff_multi_images(): # Test reading a tiff from a memory buffer (#2931) -def test_tiff_vsimem(): +def test_tiff_vsimem(tmp_vsimem): try: gdal.FileFromMemBuffer @@ -631,16 +631,16 @@ def test_tiff_vsimem(): content = open("data/byte.tif", mode="rb").read() # Create in-memory file - gdal.FileFromMemBuffer("/vsimem/tiffinmem", content) + gdal.FileFromMemBuffer(tmp_vsimem / "tiffinmem", content) - ds = gdal.Open("/vsimem/tiffinmem", gdal.GA_Update) + ds = gdal.Open(tmp_vsimem / "tiffinmem", gdal.GA_Update) assert ( ds.GetRasterBand(1).Checksum() == 4672 ), "Expected checksum = %d. Got = %d" % (4672, ds.GetRasterBand(1).Checksum()) ds.GetRasterBand(1).Fill(0) ds = None - ds = gdal.Open("/vsimem/tiffinmem") + ds = gdal.Open(tmp_vsimem / "tiffinmem") assert ds.GetRasterBand(1).Checksum() == 0, "Expected checksum = %d. Got = %d" % ( 0, ds.GetRasterBand(1).Checksum(), @@ -648,22 +648,19 @@ def test_tiff_vsimem(): ds = None # Also test with anti-slash - ds = gdal.Open("/vsimem\\tiffinmem") + ds = gdal.Open(f"{tmp_vsimem}\\tiffinmem") assert ds.GetRasterBand(1).Checksum() == 0, "Expected checksum = %d. Got = %d" % ( 0, ds.GetRasterBand(1).Checksum(), ) ds = None - # Release memory associated to the in-memory file - gdal.Unlink("/vsimem/tiffinmem") - ############################################################################### # Test reading a tiff from inside a zip in a memory buffer ! -def test_tiff_vsizip_and_mem(): +def test_tiff_vsizip_and_mem(tmp_vsimem): try: gdal.FileFromMemBuffer @@ -673,16 +670,13 @@ def test_tiff_vsizip_and_mem(): content = open("data/byte.tif.zip", mode="rb").read() # Create in-memory file - gdal.FileFromMemBuffer("/vsimem/tiffinmem.zip", content) + gdal.FileFromMemBuffer(tmp_vsimem / "tiffinmem.zip", content) - ds = gdal.Open("/vsizip/vsimem/tiffinmem.zip/byte.tif") + ds = gdal.Open(f"/vsizip/{tmp_vsimem}/tiffinmem.zip/byte.tif") assert ( ds.GetRasterBand(1).Checksum() == 4672 ), "Expected checksum = %d. Got = %d" % (4672, ds.GetRasterBand(1).Checksum()) - # Release memory associated to the in-memory file - gdal.Unlink("/vsimem/tiffinmem.zip") - ############################################################################### # Test reading a GeoTIFF with only ProjectedCSTypeGeoKey defined (ticket #3019) @@ -790,12 +784,12 @@ def test_tiff_read_stats_from_pam(): # Test extracting georeferencing from a .TAB file -def test_tiff_read_from_tab(): +def test_tiff_read_from_tab(tmp_path): - ds = gdal.GetDriverByName("GTiff").Create("tmp/tiff_read_from_tab.tif", 1, 1) + ds = gdal.GetDriverByName("GTiff").Create(tmp_path / "tiff_read_from_tab.tif", 1, 1) ds = None - f = open("tmp/tiff_read_from_tab.tab", "wt") + f = open(tmp_path / "tiff_read_from_tab.tab", "wt") f.write( """!table !version 300 @@ -814,14 +808,14 @@ def test_tiff_read_from_tab(): ) f.close() - ds = gdal.Open("tmp/tiff_read_from_tab.tif") + ds = gdal.Open(tmp_path / "tiff_read_from_tab.tif") gt = ds.GetGeoTransform() wkt = ds.GetProjectionRef() ds = None - gdal.GetDriverByName("GTiff").Delete("tmp/tiff_read_from_tab.tif") + gdal.GetDriverByName("GTiff").Delete(tmp_path / "tiff_read_from_tab.tif") - assert not os.path.exists("tmp/tiff_read_from_tab.tab") + assert not os.path.exists(tmp_path / "tiff_read_from_tab.tab") assert gt == ( 400000.0, @@ -976,15 +970,13 @@ def test_tiff_read_buggy_packbits(): # Test reading a GeoEye _rpc.txt (#3639) -def test_tiff_read_rpc_txt(): +def test_tiff_read_rpc_txt(tmp_path): - shutil.copy("data/byte.tif", "tmp/test.tif") - shutil.copy("data/test_rpc.txt", "tmp/test_rpc.txt") - ds = gdal.Open("tmp/test.tif") + shutil.copy("data/byte.tif", tmp_path / "test.tif") + shutil.copy("data/test_rpc.txt", tmp_path / "test_rpc.txt") + ds = gdal.Open(tmp_path / "test.tif") rpc_md = ds.GetMetadata("RPC") ds = None - os.remove("tmp/test.tif") - os.remove("tmp/test_rpc.txt") assert rpc_md["HEIGHT_OFF"] == "+0300.000 meters", ( 'HEIGHT_OFF wrong:"' + rpc_md["HEIGHT_OFF"] + '"' @@ -1024,22 +1016,19 @@ def test_tiff_read_rpc_tif(): # StripByteCounts (279) LONG (4) 1<1> -def test_tiff_small(): +def test_tiff_small(tmp_vsimem): content = "\x49\x49\x2A\x00\x08\x00\x00\x00\x04\x00\x00\x01\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x01\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x11\x01\x04\x00\x01\x00\x00\x00\x00\x00\x00\x00\x17\x01\x04\x00\x01\x00\x00\x00\x01\x00\x00\x00" # Create in-memory file - gdal.FileFromMemBuffer("/vsimem/small.tif", content) + gdal.FileFromMemBuffer(tmp_vsimem / "small.tif", content) - ds = gdal.Open("/vsimem/small.tif") + ds = gdal.Open(tmp_vsimem / "small.tif") assert ds.GetRasterBand(1).Checksum() == 0, "Expected checksum = %d. Got = %d" % ( 0, ds.GetRasterBand(1).Checksum(), ) - # Release memory associated to the in-memory file - gdal.Unlink("/vsimem/small.tif") - ############################################################################### # Test that we can workaround a DoS with @@ -1226,23 +1215,19 @@ def method(request): # Test reading a TIFF made of a single-strip that is more than 2GB (#5403) -def test_tiff_read_huge4GB(): +def test_tiff_read_huge4GB(tmp_path): - if not gdaltest.filesystem_supports_sparse_files("tmp"): + if not gdaltest.filesystem_supports_sparse_files(tmp_path): ds = gdal.Open("data/huge4GB.tif") assert ds is not None else: - shutil.copy("data/huge4GB.tif", "tmp/huge4GB.tif") - f = open("tmp/huge4GB.tif", "rb+") + shutil.copy("data/huge4GB.tif", tmp_path / "huge4GB.tif") + f = open(tmp_path / "huge4GB.tif", "rb+") f.seek(65535 * 65535 + 401) f.write(" ".encode("ascii")) f.close() - ds = gdal.Open("tmp/huge4GB.tif") - if ds is None: - os.remove("tmp/huge4GB.tif") - pytest.fail() - ds = None - os.remove("tmp/huge4GB.tif") + ds = gdal.Open(tmp_path / "huge4GB.tif") + assert ds is not None ############################################################################### @@ -2160,17 +2145,15 @@ def test_tiff_read_md1(): # See https://github.com/OSGeo/gdal/issues/4037 -def test_tiff_read_non_conformant_imd(): +def test_tiff_read_non_conformant_imd(tmp_vsimem): gdal.FileFromMemBuffer( - "/vsimem/test.imd", + tmp_vsimem / "test.imd", """BEGIN_GROUP = foo\n\tkey = value with space ' not quoted;\n\tkey2 = another one ;\r\nEND_GROUP\nEND\n""", ) - gdal.FileFromMemBuffer("/vsimem/test.tif", open("data/byte.tif", "rb").read()) - ds = gdal.Open("/vsimem/test.tif") + gdal.FileFromMemBuffer(tmp_vsimem / "test.tif", open("data/byte.tif", "rb").read()) + ds = gdal.Open(tmp_vsimem / "test.tif") md = ds.GetMetadata("IMD") - gdal.Unlink("/vsimem/test.imd") - gdal.Unlink("/vsimem/test.tif") assert md == {"foo.key": "value with space ' not quoted", "foo.key2": "another one"} @@ -2537,7 +2520,7 @@ def test_tiff_read_md11(): # Check read Dimap metadata format -def test_tiff_read_md12(): +def test_tiff_read_md12(tmp_path): ds = gdal.Open( "../gdrivers/data/dimap2/single_component/IMG_foo_R2C1.TIF", gdal.GA_ReadOnly @@ -2572,20 +2555,17 @@ def test_tiff_read_md12(): # Test not valid DIMAP product [https://github.com/OSGeo/gdal/issues/431] shutil.copy( "../gdrivers/data/dimap2/single_component/IMG_foo_R2C1.TIF", - "tmp/IMG_foo_temp.TIF", + tmp_path / "IMG_foo_temp.TIF", ) shutil.copy( - "../gdrivers/data/dimap2/single_component/DIM_foo.XML", "tmp/DIM_foo.XML" + "../gdrivers/data/dimap2/single_component/DIM_foo.XML", tmp_path / "DIM_foo.XML" ) shutil.copy( - "../gdrivers/data/dimap2/single_component/RPC_foo.XML", "tmp/RPC_foo.XML" + "../gdrivers/data/dimap2/single_component/RPC_foo.XML", tmp_path / "RPC_foo.XML" ) - ds = gdal.Open("tmp/IMG_foo_temp.TIF", gdal.GA_ReadOnly) + ds = gdal.Open(tmp_path / "IMG_foo_temp.TIF", gdal.GA_ReadOnly) filelist = ds.GetFileList() ds = None - gdal.Unlink("tmp/IMG_foo_temp.TIF") - gdal.Unlink("tmp/DIM_foo.XML") - gdal.Unlink("tmp/RPC_foo.XML") assert len(filelist) <= 1, "did not get expected file list." @@ -2695,82 +2675,86 @@ def test_tiff_read_logl_as_rgba(): # -def test_tiff_read_strip_separate_as_rgba(): +def test_tiff_read_strip_separate_as_rgba(tmp_vsimem): if not gdaltest.supports_force_rgba: pytest.skip() # 3 band gdal.Translate( - "/vsimem/tiff_read_strip_separate_as_rgba.tif", + tmp_vsimem / "tiff_read_strip_separate_as_rgba.tif", "data/rgbsmall.tif", options="-co INTERLEAVE=BAND", ) with gdal.config_option("GTIFF_FORCE_RGBA", "YES"): - ds = gdal.Open("/vsimem/tiff_read_strip_separate_as_rgba.tif") + ds = gdal.Open(tmp_vsimem / "tiff_read_strip_separate_as_rgba.tif") got_cs = [ds.GetRasterBand(i + 1).Checksum() for i in range(ds.RasterCount)] assert got_cs == [21212, 21053, 21349, 30658] ds = None - gdal.Unlink("/vsimem/tiff_read_strip_separate_as_rgba.tif") + +def test_tiff_read_strip_separate_as_rgba_bis(tmp_vsimem): + + if not gdaltest.supports_force_rgba: + pytest.skip() # 3 band with PHOTOMETRIC_MINISBLACK to trigger gtStripSeparate() to # use the single band code path gdal.Translate( - "/vsimem/tiff_read_strip_separate_as_rgba.tif", + tmp_vsimem / "tiff_read_strip_separate_as_rgba.tif", "data/rgbsmall.tif", options="-co INTERLEAVE=BAND -co PHOTOMETRIC=MINISBLACK", ) with gdal.config_option("GTIFF_FORCE_RGBA", "YES"): - ds = gdal.Open("/vsimem/tiff_read_strip_separate_as_rgba.tif") + ds = gdal.Open(tmp_vsimem / "tiff_read_strip_separate_as_rgba.tif") got_cs = [ds.GetRasterBand(i + 1).Checksum() for i in range(ds.RasterCount)] assert got_cs == [21212, 21212, 21212, 30658] ds = None - gdal.Unlink("/vsimem/tiff_read_strip_separate_as_rgba.tif") - ############################################################################### # -def test_tiff_read_tiled_separate_as_rgba(): +def test_tiff_read_tiled_separate_as_rgba(tmp_vsimem): if not gdaltest.supports_force_rgba: pytest.skip() # 3 band gdal.Translate( - "/vsimem/tiff_read_tiled_separate_as_rgba.tif", + tmp_vsimem / "tiff_read_tiled_separate_as_rgba.tif", "data/rgbsmall.tif", options="-co TILED=YES -co INTERLEAVE=BAND", ) with gdal.config_option("GTIFF_FORCE_RGBA", "YES"): - ds = gdal.Open("/vsimem/tiff_read_tiled_separate_as_rgba.tif") + ds = gdal.Open(tmp_vsimem / "tiff_read_tiled_separate_as_rgba.tif") got_cs = [ds.GetRasterBand(i + 1).Checksum() for i in range(ds.RasterCount)] assert got_cs == [21212, 21053, 21349, 30658] ds = None - gdal.Unlink("/vsimem/tiff_read_tiled_separate_as_rgba.tif") + +def test_tiff_read_tiled_separate_as_rgba_bis(tmp_vsimem): + + if not gdaltest.supports_force_rgba: + pytest.skip() # Single band gdal.Translate( - "/vsimem/tiff_read_tiled_separate_as_rgba.tif", + tmp_vsimem / "tiff_read_tiled_separate_as_rgba.tif", "data/byte.tif", options="-co TILED=YES -co INTERLEAVE=BAND", ) with gdal.config_option("GTIFF_FORCE_RGBA", "YES"): - ds = gdal.Open("/vsimem/tiff_read_tiled_separate_as_rgba.tif") + ds = gdal.Open(tmp_vsimem / "tiff_read_tiled_separate_as_rgba.tif") got_cs = [ds.GetRasterBand(i + 1).Checksum() for i in range(ds.RasterCount)] assert got_cs == [4672, 4672, 4672, 4873] ds = None - gdal.Unlink("/vsimem/tiff_read_tiled_separate_as_rgba.tif") - ############################################################################### # @@ -2815,9 +2799,9 @@ def test_tiff_read_one_strip_no_bytecount(): # Test GDAL_GEOREF_SOURCES -def test_tiff_read_nogeoref(): - - tests = [ +@pytest.mark.parametrize( + "config_option_value,copy_pam,copy_worldfile,copy_tabfile,expected_srs,expected_gt", + [ (None, True, True, False, 'LOCAL_CS["PAM"]', (1.0, 2.0, 3.0, 4.0, 5.0, 6.0)), (None, True, True, True, 'LOCAL_CS["PAM"]', (1.0, 2.0, 3.0, 4.0, 5.0, 6.0)), ( @@ -2931,96 +2915,98 @@ def test_tiff_read_nogeoref(): (99.5, 1.0, 0.0, 200.5, 0.0, -1.0), ), ("NONE", True, True, False, "", (0.0, 1.0, 0.0, 0.0, 0.0, 1.0)), - ] + ], +) +def test_tiff_read_nogeoref( + tmp_vsimem, + config_option_value, + copy_pam, + copy_worldfile, + copy_tabfile, + expected_srs, + expected_gt, +): - for ( - config_option_value, - copy_pam, - copy_worldfile, - copy_tabfile, - expected_srs, - expected_gt, - ) in tests: - for iteration in range(2): - with gdal.config_option("GDAL_GEOREF_SOURCES", config_option_value): + for iteration in range(2): + with gdal.config_option("GDAL_GEOREF_SOURCES", config_option_value): + gdal.FileFromMemBuffer( + tmp_vsimem / "byte_nogeoref.tif", + open("data/byte_nogeoref.tif", "rb").read(), + ) + if copy_pam: gdal.FileFromMemBuffer( - "/vsimem/byte_nogeoref.tif", - open("data/byte_nogeoref.tif", "rb").read(), + tmp_vsimem / "byte_nogeoref.tif.aux.xml", + open("data/byte_nogeoref.tif.aux.xml", "rb").read(), ) - if copy_pam: - gdal.FileFromMemBuffer( - "/vsimem/byte_nogeoref.tif.aux.xml", - open("data/byte_nogeoref.tif.aux.xml", "rb").read(), - ) - if copy_worldfile: - gdal.FileFromMemBuffer( - "/vsimem/byte_nogeoref.tfw", - open("data/byte_nogeoref.tfw", "rb").read(), - ) - if copy_tabfile: - gdal.FileFromMemBuffer( - "/vsimem/byte_nogeoref.tab", - open("data/byte_nogeoref.tab", "rb").read(), - ) - - ds = gdal.Open("/vsimem/byte_nogeoref.tif") - if iteration == 0: - gt = ds.GetGeoTransform() - srs_wkt = ds.GetProjectionRef() - else: - srs_wkt = ds.GetProjectionRef() - gt = ds.GetGeoTransform() - ds = None - with gdal.ExceptionMgr(useExceptions=False): - gdal.Unlink("/vsimem/byte_nogeoref.tif") - gdal.Unlink("/vsimem/byte_nogeoref.tif.aux.xml") - gdal.Unlink("/vsimem/byte_nogeoref.tfw") - gdal.Unlink("/vsimem/byte_nogeoref.tab") - - if gt != expected_gt: - print("Got " + str(gt)) - print("Expected " + str(expected_gt)) - pytest.fail( - "Iteration %d, did not get expected gt for %s,copy_pam=%s,copy_worldfile=%s,copy_tabfile=%s" - % ( - iteration, - config_option_value, - str(copy_pam), - str(copy_worldfile), - str(copy_tabfile), - ) + if copy_worldfile: + gdal.FileFromMemBuffer( + tmp_vsimem / "byte_nogeoref.tfw", + open("data/byte_nogeoref.tfw", "rb").read(), + ) + if copy_tabfile: + gdal.FileFromMemBuffer( + tmp_vsimem / "byte_nogeoref.tab", + open("data/byte_nogeoref.tab", "rb").read(), ) - if ( - expected_srs == 'LOCAL_CS["PAM"]' - and srs_wkt - == 'LOCAL_CS["PAM",UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]' - ): - pass # ok - elif (expected_srs == "" and srs_wkt != "") or ( - expected_srs != "" and expected_srs not in srs_wkt - ): - print("Got " + srs_wkt) - print("Expected " + expected_srs) - pytest.fail( - "Iteration %d, did not get expected SRS for %s,copy_pam=%s,copy_worldfile=%s,copy_tabfile=%s" - % ( - iteration, - config_option_value, - str(copy_pam), - str(copy_worldfile), - str(copy_tabfile), - ) + ds = gdal.Open(tmp_vsimem / "byte_nogeoref.tif") + if iteration == 0: + gt = ds.GetGeoTransform() + srs_wkt = ds.GetProjectionRef() + else: + srs_wkt = ds.GetProjectionRef() + gt = ds.GetGeoTransform() + ds = None + with gdal.ExceptionMgr(useExceptions=False): + gdal.Unlink(tmp_vsimem / "byte_nogeoref.tif") + gdal.Unlink(tmp_vsimem / "byte_nogeoref.tif.aux.xml") + gdal.Unlink(tmp_vsimem / "byte_nogeoref.tfw") + gdal.Unlink(tmp_vsimem / "byte_nogeoref.tab") + + if gt != expected_gt: + print("Got " + str(gt)) + print("Expected " + str(expected_gt)) + pytest.fail( + "Iteration %d, did not get expected gt for %s,copy_pam=%s,copy_worldfile=%s,copy_tabfile=%s" + % ( + iteration, + config_option_value, + str(copy_pam), + str(copy_worldfile), + str(copy_tabfile), ) + ) + + if ( + expected_srs == 'LOCAL_CS["PAM"]' + and srs_wkt + == 'LOCAL_CS["PAM",UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]' + ): + pass # ok + elif (expected_srs == "" and srs_wkt != "") or ( + expected_srs != "" and expected_srs not in srs_wkt + ): + print("Got " + srs_wkt) + print("Expected " + expected_srs) + pytest.fail( + "Iteration %d, did not get expected SRS for %s,copy_pam=%s,copy_worldfile=%s,copy_tabfile=%s" + % ( + iteration, + config_option_value, + str(copy_pam), + str(copy_worldfile), + str(copy_tabfile), + ) + ) ############################################################################### # Test GDAL_GEOREF_SOURCES -def test_tiff_read_inconsistent_georef(): - - tests = [ +@pytest.mark.parametrize( + "config_option_value,copy_pam,copy_worldfile,copy_tabfile,expected_srs,expected_gt", + [ (None, True, True, True, 'LOCAL_CS["PAM"]', (1.0, 2.0, 3.0, 4.0, 5.0, 6.0)), ( None, @@ -3092,95 +3078,97 @@ def test_tiff_read_inconsistent_georef(): "_1936", (400000.0, 25.0, 0.0, 1300000.0, 0.0, -25.0), ), - ] + ], +) +def test_tiff_read_inconsistent_georef( + tmp_vsimem, + config_option_value, + copy_pam, + copy_worldfile, + copy_tabfile, + expected_srs, + expected_gt, +): - for ( - config_option_value, - copy_pam, - copy_worldfile, - copy_tabfile, - expected_srs, - expected_gt, - ) in tests: - for iteration in range(2): - with gdal.config_option("GDAL_GEOREF_SOURCES", config_option_value): + for iteration in range(2): + with gdal.config_option("GDAL_GEOREF_SOURCES", config_option_value): + gdal.FileFromMemBuffer( + tmp_vsimem / "byte_inconsistent_georef.tif", + open("data/byte_inconsistent_georef.tif", "rb").read(), + ) + if copy_pam: gdal.FileFromMemBuffer( - "/vsimem/byte_inconsistent_georef.tif", - open("data/byte_inconsistent_georef.tif", "rb").read(), + tmp_vsimem / "byte_inconsistent_georef.tif.aux.xml", + open("data/byte_inconsistent_georef.tif.aux.xml", "rb").read(), ) - if copy_pam: - gdal.FileFromMemBuffer( - "/vsimem/byte_inconsistent_georef.tif.aux.xml", - open("data/byte_inconsistent_georef.tif.aux.xml", "rb").read(), - ) - if copy_worldfile: - gdal.FileFromMemBuffer( - "/vsimem/byte_inconsistent_georef.tfw", - open("data/byte_inconsistent_georef.tfw", "rb").read(), - ) - if copy_tabfile: - gdal.FileFromMemBuffer( - "/vsimem/byte_inconsistent_georef.tab", - open("data/byte_inconsistent_georef.tab", "rb").read(), - ) - ds = gdal.Open("/vsimem/byte_inconsistent_georef.tif") - if iteration == 0: - gt = ds.GetGeoTransform() - srs_wkt = ds.GetProjectionRef() - else: - srs_wkt = ds.GetProjectionRef() - gt = ds.GetGeoTransform() - ds = None - with gdal.ExceptionMgr(useExceptions=False): - gdal.Unlink("/vsimem/byte_inconsistent_georef.tif") - gdal.Unlink("/vsimem/byte_inconsistent_georef.tif.aux.xml") - gdal.Unlink("/vsimem/byte_inconsistent_georef.tfw") - gdal.Unlink("/vsimem/byte_inconsistent_georef.tab") - - if gt != expected_gt: - print("Got " + str(gt)) - print("Expected " + str(expected_gt)) - pytest.fail( - "Iteration %d, did not get expected gt for %s,copy_pam=%s,copy_worldfile=%s,copy_tabfile=%s" - % ( - iteration, - config_option_value, - str(copy_pam), - str(copy_worldfile), - str(copy_tabfile), - ) + if copy_worldfile: + gdal.FileFromMemBuffer( + tmp_vsimem / "byte_inconsistent_georef.tfw", + open("data/byte_inconsistent_georef.tfw", "rb").read(), + ) + if copy_tabfile: + gdal.FileFromMemBuffer( + tmp_vsimem / "byte_inconsistent_georef.tab", + open("data/byte_inconsistent_georef.tab", "rb").read(), + ) + ds = gdal.Open(tmp_vsimem / "byte_inconsistent_georef.tif") + if iteration == 0: + gt = ds.GetGeoTransform() + srs_wkt = ds.GetProjectionRef() + else: + srs_wkt = ds.GetProjectionRef() + gt = ds.GetGeoTransform() + ds = None + with gdal.ExceptionMgr(useExceptions=False): + gdal.Unlink(tmp_vsimem / "byte_inconsistent_georef.tif") + gdal.Unlink(tmp_vsimem / "byte_inconsistent_georef.tif.aux.xml") + gdal.Unlink(tmp_vsimem / "byte_inconsistent_georef.tfw") + gdal.Unlink(tmp_vsimem / "byte_inconsistent_georef.tab") + + if gt != expected_gt: + print("Got " + str(gt)) + print("Expected " + str(expected_gt)) + pytest.fail( + "Iteration %d, did not get expected gt for %s,copy_pam=%s,copy_worldfile=%s,copy_tabfile=%s" + % ( + iteration, + config_option_value, + str(copy_pam), + str(copy_worldfile), + str(copy_tabfile), ) + ) - if ( - expected_srs == 'LOCAL_CS["PAM"]' - and srs_wkt - == 'LOCAL_CS["PAM",UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]' - ): - pass # ok - elif (expected_srs == "" and srs_wkt != "") or ( - expected_srs != "" and expected_srs not in srs_wkt - ): - print("Got " + srs_wkt) - print("Expected " + expected_srs) - pytest.fail( - "Iteration %d, did not get expected SRS for %s,copy_pam=%s,copy_worldfile=%s,copy_tabfile=%s" - % ( - iteration, - config_option_value, - str(copy_pam), - str(copy_worldfile), - str(copy_tabfile), - ) + if ( + expected_srs == 'LOCAL_CS["PAM"]' + and srs_wkt + == 'LOCAL_CS["PAM",UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]' + ): + pass # ok + elif (expected_srs == "" and srs_wkt != "") or ( + expected_srs != "" and expected_srs not in srs_wkt + ): + print("Got " + srs_wkt) + print("Expected " + expected_srs) + pytest.fail( + "Iteration %d, did not get expected SRS for %s,copy_pam=%s,copy_worldfile=%s,copy_tabfile=%s" + % ( + iteration, + config_option_value, + str(copy_pam), + str(copy_worldfile), + str(copy_tabfile), ) + ) ############################################################################### # Test GDAL_GEOREF_SOURCES -def test_tiff_read_gcp_internal_and_auxxml(): - - tests = [ +@pytest.mark.parametrize( + "config_option_value,copy_pam,expected_srs,expected_gcp_count", + [ (None, True, 'LOCAL_CS["PAM"]', 1), (None, False, "4326", 2), ("INTERNAL", True, "4326", 2), @@ -3191,56 +3179,59 @@ def test_tiff_read_gcp_internal_and_auxxml(): ("PAM", False, "", 0), ("PAM,INTERNAL", True, 'LOCAL_CS["PAM"]', 1), ("PAM,INTERNAL", False, "4326", 2), - ] + ], +) +def test_tiff_read_gcp_internal_and_auxxml( + tmp_vsimem, config_option_value, copy_pam, expected_srs, expected_gcp_count +): - for (config_option_value, copy_pam, expected_srs, expected_gcp_count) in tests: - for iteration in range(2): + for iteration in range(2): + gdal.FileFromMemBuffer( + tmp_vsimem / "byte_gcp.tif", open("data/byte_gcp.tif", "rb").read() + ) + if copy_pam: gdal.FileFromMemBuffer( - "/vsimem/byte_gcp.tif", open("data/byte_gcp.tif", "rb").read() + tmp_vsimem / "byte_gcp.tif.aux.xml", + open("data/byte_gcp.tif.aux.xml", "rb").read(), + ) + open_options = [] + if config_option_value is not None: + open_options += ["GEOREF_SOURCES=" + config_option_value] + ds = gdal.OpenEx(tmp_vsimem / "byte_gcp.tif", open_options=open_options) + if iteration == 0: + gcp_count = ds.GetGCPCount() + srs_wkt = ds.GetGCPProjection() + else: + srs_wkt = ds.GetGCPProjection() + gcp_count = ds.GetGCPCount() + ds = None + with gdal.ExceptionMgr(useExceptions=False): + gdal.Unlink(tmp_vsimem / "byte_gcp.tif") + gdal.Unlink(tmp_vsimem / "byte_gcp.tif.aux.xml") + + if gcp_count != expected_gcp_count: + print("Got " + str(gcp_count)) + print("Expected " + str(expected_gcp_count)) + pytest.fail( + "Iteration %d, did not get expected gcp count for %s,copy_pam=%s" + % (iteration, config_option_value, str(copy_pam)) ) - if copy_pam: - gdal.FileFromMemBuffer( - "/vsimem/byte_gcp.tif.aux.xml", - open("data/byte_gcp.tif.aux.xml", "rb").read(), - ) - open_options = [] - if config_option_value is not None: - open_options += ["GEOREF_SOURCES=" + config_option_value] - ds = gdal.OpenEx("/vsimem/byte_gcp.tif", open_options=open_options) - if iteration == 0: - gcp_count = ds.GetGCPCount() - srs_wkt = ds.GetGCPProjection() - else: - srs_wkt = ds.GetGCPProjection() - gcp_count = ds.GetGCPCount() - ds = None - with gdal.ExceptionMgr(useExceptions=False): - gdal.Unlink("/vsimem/byte_gcp.tif") - gdal.Unlink("/vsimem/byte_gcp.tif.aux.xml") - - if gcp_count != expected_gcp_count: - print("Got " + str(gcp_count)) - print("Expected " + str(expected_gcp_count)) - pytest.fail( - "Iteration %d, did not get expected gcp count for %s,copy_pam=%s" - % (iteration, config_option_value, str(copy_pam)) - ) - if ( - expected_srs == 'LOCAL_CS["PAM"]' - and srs_wkt - == 'LOCAL_CS["PAM",UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]' - ): - pass # ok - elif (expected_srs == "" and srs_wkt != "") or ( - expected_srs != "" and expected_srs not in srs_wkt - ): - print("Got " + srs_wkt) - print("Expected " + expected_srs) - pytest.fail( - "Iteration %d, did not get expected SRS for %s,copy_pam=%s" - % (iteration, config_option_value, str(copy_pam)) - ) + if ( + expected_srs == 'LOCAL_CS["PAM"]' + and srs_wkt + == 'LOCAL_CS["PAM",UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]' + ): + pass # ok + elif (expected_srs == "" and srs_wkt != "") or ( + expected_srs != "" and expected_srs not in srs_wkt + ): + print("Got " + srs_wkt) + print("Expected " + expected_srs) + pytest.fail( + "Iteration %d, did not get expected SRS for %s,copy_pam=%s" + % (iteration, config_option_value, str(copy_pam)) + ) ############################################################################### @@ -3270,25 +3261,22 @@ def test_tiff_read_aux(): ) -def test_tiff_read_one_band_from_two_bands(): +def test_tiff_read_one_band_from_two_bands(tmp_vsimem): gdal.Translate( - "/vsimem/tiff_read_one_band_from_two_bands.tif", + tmp_vsimem / "tiff_read_one_band_from_two_bands.tif", "data/byte.tif", options="-b 1 -b 1", ) gdal.Translate( - "/vsimem/tiff_read_one_band_from_two_bands_dst.tif", - "/vsimem/tiff_read_one_band_from_two_bands.tif", + tmp_vsimem / "tiff_read_one_band_from_two_bands_dst.tif", + tmp_vsimem / "tiff_read_one_band_from_two_bands.tif", options="-b 1", ) - ds = gdal.Open("/vsimem/tiff_read_one_band_from_two_bands_dst.tif") + ds = gdal.Open(tmp_vsimem / "tiff_read_one_band_from_two_bands_dst.tif") assert ds.GetRasterBand(1).Checksum() == 4672 ds = None - gdal.Unlink("/vsimem/tiff_read_one_band_from_two_bands.tif") - gdal.Unlink("/vsimem/tiff_read_one_band_from_two_bands.tif.aux.xml") - gdal.Unlink("/vsimem/tiff_read_one_band_from_two_bands_dst.tif") @pytest.mark.require_creation_option("GTiff", "JPEG") @@ -3327,9 +3315,9 @@ def test_tiff_read_corrupted_jpeg_cloud_optimized(): # Test reading YCbCr images with LZW compression -def test_tiff_read_ycbcr_lzw(): - - tests = [ +@pytest.mark.parametrize( + "filename,cs1,cs2,cs3", + [ ("ycbcr_11_lzw.tif", 13459, 12939, 12414), ("ycbcr_12_lzw.tif", 13565, 13105, 12660), ("ycbcr_14_lzw.tif", -1, -1, -1), # not supported @@ -3341,27 +3329,28 @@ def test_tiff_read_ycbcr_lzw(): ("ycbcr_42_lzw_optimized.tif", 19918, 20120, 19087), ("ycbcr_44_lzw.tif", 12994, 13229, 12149), ("ycbcr_44_lzw_optimized.tif", 19666, 19860, 18836), - ] + ], +) +def test_tiff_read_ycbcr_lzw(filename, cs1, cs2, cs3): - for (filename, cs1, cs2, cs3) in tests: - ds = gdal.Open("data/" + filename) - if cs1 == -1: - with pytest.raises(Exception): - ds.GetRasterBand(1).Checksum() - with pytest.raises(Exception): - ds.GetRasterBand(2).Checksum() - with pytest.raises(Exception): - ds.GetRasterBand(3).Checksum() - else: - got_cs1 = ds.GetRasterBand(1).Checksum() - got_cs2 = ds.GetRasterBand(2).Checksum() - got_cs3 = ds.GetRasterBand(3).Checksum() - assert got_cs1 == cs1 and got_cs2 == cs2 and got_cs3 == cs3, ( - filename, - got_cs1, - got_cs2, - got_cs3, - ) + ds = gdal.Open("data/" + filename) + if cs1 == -1: + with pytest.raises(Exception): + ds.GetRasterBand(1).Checksum() + with pytest.raises(Exception): + ds.GetRasterBand(2).Checksum() + with pytest.raises(Exception): + ds.GetRasterBand(3).Checksum() + else: + got_cs1 = ds.GetRasterBand(1).Checksum() + got_cs2 = ds.GetRasterBand(2).Checksum() + got_cs3 = ds.GetRasterBand(3).Checksum() + assert got_cs1 == cs1 and got_cs2 == cs2 and got_cs3 == cs3, ( + filename, + got_cs1, + got_cs2, + got_cs3, + ) ############################################################################### @@ -3379,9 +3368,9 @@ def test_tiff_read_ycbcr_int12(): # Test reading band unit from VERT_CS unit (#6675) -def test_tiff_read_unit_from_srs(): +def test_tiff_read_unit_from_srs(tmp_vsimem): - filename = "/vsimem/tiff_read_unit_from_srs.tif" + filename = tmp_vsimem / "tiff_read_unit_from_srs.tif" ds = gdal.GetDriverByName("GTiff").Create(filename, 1, 1) sr = osr.SpatialReference() sr.SetFromUserInput("EPSG:4326+3855") @@ -3393,8 +3382,6 @@ def test_tiff_read_unit_from_srs(): assert unit == "metre" ds = None - gdal.Unlink(filename) - ############################################################################### # Test reading ArcGIS 9.3 .aux.xml @@ -3595,61 +3582,58 @@ def test_tiff_read_excessive_memory_TIFFFillTile(): ############################################################################### -def test_tiff_read_big_strip(): +def test_tiff_read_big_strip(tmp_vsimem): if not check_libtiff_internal_or_at_least(4, 0, 8): pytest.skip() gdal.Translate( - "/vsimem/test.tif", + tmp_vsimem / "test.tif", "data/byte.tif", options="-co compress=lzw -outsize 10000 2000 -co blockysize=2000 -r bilinear -ot float32", ) if gdal.GetLastErrorMsg().find("cannot allocate") >= 0: pytest.skip() - ds = gdal.Open("/vsimem/test.tif") + ds = gdal.Open(tmp_vsimem / "test.tif") assert ds.GetRasterBand(1).Checksum() == 2676 ds = None - gdal.Unlink("/vsimem/test.tif") ############################################################################### # (Potentially) test libtiff CHUNKY_STRIP_READ_SUPPORT -def test_tiff_read_big_strip_chunky_way(): +def test_tiff_read_big_strip_chunky_way(tmp_vsimem): gdal.Translate( - "/vsimem/test.tif", + tmp_vsimem / "test.tif", "data/byte.tif", options="-co compress=lzw -outsize 1000 2001 -co blockysize=2001 -r bilinear", ) - ds = gdal.Open("/vsimem/test.tif") + ds = gdal.Open(tmp_vsimem / "test.tif") cs = ds.GetRasterBand(1).Checksum() assert cs == 38441 ds = None - gdal.Unlink("/vsimem/test.tif") ############################################################################### -def test_tiff_read_big_tile(): +def test_tiff_read_big_tile(tmp_vsimem): if not check_libtiff_internal_or_at_least(4, 0, 8): pytest.skip() gdal.Translate( - "/vsimem/test.tif", + tmp_vsimem / "test.tif", "data/byte.tif", options="-co compress=lzw -outsize 10000 2000 -co tiled=yes -co blockxsize=10000 -co blockysize=2000 -r bilinear -ot float32", ) if gdal.GetLastErrorMsg().find("cannot allocate") >= 0: pytest.skip() - ds = gdal.Open("/vsimem/test.tif") + ds = gdal.Open(tmp_vsimem / "test.tif") assert ds.GetRasterBand(1).Checksum() == 2676 ds = None - gdal.Unlink("/vsimem/test.tif") ############################################################################### @@ -3692,20 +3676,19 @@ def test_tiff_read_huge_implied_number_strips(): ############################################################################### -def test_tiff_read_many_blocks(): +def test_tiff_read_many_blocks(tmp_vsimem): md = gdal.GetDriverByName("GTiff").GetMetadata() if md["LIBTIFF"] != "INTERNAL": pytest.skip() ds = gdal.GetDriverByName("GTiff").Create( - "/vsimem/test.tif", 1, 2000000, options=["BLOCKYSIZE=1"] + tmp_vsimem / "test.tif", 1, 2000000, options=["BLOCKYSIZE=1"] ) ds = None - ds = gdal.Open("/vsimem/test.tif") + ds = gdal.Open(tmp_vsimem / "test.tif") assert ds.GetRasterBand(1).Checksum() == 0 ds = None - gdal.Unlink("/vsimem/test.tif") ############################################################################### @@ -3806,9 +3789,9 @@ def test_tiff_read_size_of_stripbytecount_lower_than_stripcount(): # Test different datatypes for StripOffsets tag with little/big, classic/bigtiff -def test_tiff_read_stripoffset_types(): - - tests = [ +@pytest.mark.parametrize( + "filename,expected_offsets", + [ ("data/classictiff_one_block_byte.tif", []), # unsupported ("data/classictiff_one_block_long.tif", [158]), ("data/classictiff_one_block_be_long.tif", [158]), @@ -3830,29 +3813,27 @@ def test_tiff_read_stripoffset_types(): ("data/bigtiff_two_strip_be_long.tif", [284, 285]), ("data/bigtiff_two_strip_long8.tif", [284, 285]), ("data/bigtiff_two_strip_be_long8.tif", [284, 285]), - ] - - for (filename, expected_offsets) in tests: + ], +) +def test_tiff_read_stripoffset_types(filename, expected_offsets): - # Only when built against internal libtiff we reject byte datatype - if ( - not expected_offsets - and gdal.GetDriverByName("GTiff").GetMetadataItem("LIBTIFF") != "INTERNAL" - ): - continue + # Only when built against internal libtiff we reject byte datatype + if ( + not expected_offsets + and gdal.GetDriverByName("GTiff").GetMetadataItem("LIBTIFF") != "INTERNAL" + ): + pytest.skip() - ds = gdal.Open(filename) - offsets = [] - for row in range(4): - with gdal.quiet_errors(): - mdi = ds.GetRasterBand(1).GetMetadataItem( - "BLOCK_OFFSET_0_%d" % row, "TIFF" - ) - if mdi is None: - break - offsets.append(int(mdi)) - if offsets != expected_offsets: - print(filename, expected_offsets, offsets) + ds = gdal.Open(filename) + offsets = [] + for row in range(4): + with gdal.quiet_errors(): + mdi = ds.GetRasterBand(1).GetMetadataItem("BLOCK_OFFSET_0_%d" % row, "TIFF") + if mdi is None: + break + offsets.append(int(mdi)) + if offsets != expected_offsets: + print(filename, expected_offsets, offsets) ############################################################################### @@ -3912,28 +3893,30 @@ def test_tiff_read_old_style_lzw(): # mmap emulation) -def test_tiff_read_mmap_interface(): +@pytest.mark.parametrize( + "options", [[], ["TILED=YES"], ["COMPRESS=LZW"], ["COMPRESS=LZW", "TILED=YES"]] +) +def test_tiff_read_mmap_interface(tmp_vsimem, options): src_ds = gdal.Open("data/byte.tif") - tmpfile = "/vsimem/tiff_read_mmap_interface.tif" - for options in [[], ["TILED=YES"], ["COMPRESS=LZW"], ["COMPRESS=LZW", "TILED=YES"]]: - gdal.GetDriverByName("GTiff").CreateCopy(tmpfile, src_ds, options=options) - with gdal.config_option("GTIFF_USE_MMAP", "YES"): - ds = gdal.Open(tmpfile) - cs = ds.GetRasterBand(1).Checksum() - assert cs == 4672, (options, cs) - - f = gdal.VSIFOpenL(tmpfile, "rb") - data = gdal.VSIFReadL(1, gdal.VSIStatL(tmpfile).size - 1, f) - gdal.VSIFCloseL(f) - f = gdal.VSIFOpenL(tmpfile, "wb") - gdal.VSIFWriteL(data, 1, len(data), f) - gdal.VSIFCloseL(f) - with gdal.config_option("GTIFF_USE_MMAP", "YES"): - ds = gdal.Open(tmpfile) - with pytest.raises(Exception): - ds.GetRasterBand(1).Checksum() - gdal.Unlink(tmpfile) + tmpfile = tmp_vsimem / "tiff_read_mmap_interface.tif" + + gdal.GetDriverByName("GTiff").CreateCopy(tmpfile, src_ds, options=options) + with gdal.config_option("GTIFF_USE_MMAP", "YES"): + ds = gdal.Open(tmpfile) + cs = ds.GetRasterBand(1).Checksum() + assert cs == 4672, (options, cs) + + f = gdal.VSIFOpenL(tmpfile, "rb") + data = gdal.VSIFReadL(1, gdal.VSIStatL(tmpfile).size - 1, f) + gdal.VSIFCloseL(f) + f = gdal.VSIFOpenL(tmpfile, "wb") + gdal.VSIFWriteL(data, 1, len(data), f) + gdal.VSIFCloseL(f) + with gdal.config_option("GTIFF_USE_MMAP", "YES"): + ds = gdal.Open(tmpfile) + with pytest.raises(Exception): + ds.GetRasterBand(1).Checksum() ############################################################################### @@ -3983,9 +3966,9 @@ def test_tiff_read_negative_scaley(): @pytest.mark.require_creation_option("GTiff", "ZSTD") -def test_tiff_read_zstd(): +def test_tiff_read_zstd(tmp_path): - ut = gdaltest.GDALTest("GTiff", "byte_zstd.tif", 1, 4672) + ut = gdaltest.GDALTest("GTiff", "byte_zstd.tif", 1, 4672, tmpdir=tmp_path) ut.testOpen() @@ -3994,9 +3977,9 @@ def test_tiff_read_zstd(): @pytest.mark.require_creation_option("GTiff", "ZSTD") -def test_tiff_read_zstd_corrupted(): +def test_tiff_read_zstd_corrupted(tmp_path): - ut = gdaltest.GDALTest("GTiff", "byte_zstd_corrupted.tif", 1, -1) + ut = gdaltest.GDALTest("GTiff", "byte_zstd_corrupted.tif", 1, -1, tmpdir=tmp_path) with pytest.raises(Exception): ut.testOpen() @@ -4006,9 +3989,9 @@ def test_tiff_read_zstd_corrupted(): @pytest.mark.require_creation_option("GTiff", "ZSTD") -def test_tiff_read_zstd_corrupted2(): +def test_tiff_read_zstd_corrupted2(tmp_path): - ut = gdaltest.GDALTest("GTiff", "byte_zstd_corrupted2.tif", 1, -1) + ut = gdaltest.GDALTest("GTiff", "byte_zstd_corrupted2.tif", 1, -1, tmpdir=tmp_path) with pytest.raises(Exception): ut.testOpen() @@ -4018,10 +4001,10 @@ def test_tiff_read_zstd_corrupted2(): @pytest.mark.require_creation_option("GTiff", "WEBP") -def test_tiff_read_webp(): +def test_tiff_read_webp(tmp_path): stats = (0, 215, 66.38, 47.186) - ut = gdaltest.GDALTest("GTiff", "tif_webp.tif", 1, None) + ut = gdaltest.GDALTest("GTiff", "tif_webp.tif", 1, None, tmpdir=tmp_path) ut.testOpen(check_approx_stat=stats, stat_epsilon=1) gdal.Unlink("data/tif_webp.tif.aux.xml") @@ -4051,24 +4034,24 @@ def test_tiff_read_1bit_2bands(): @pytest.mark.require_creation_option("GTiff", "LERC") -def test_tiff_read_lerc(): +def test_tiff_read_lerc(tmp_path): - ut = gdaltest.GDALTest("GTiff", "byte_lerc.tif", 1, 4672) + ut = gdaltest.GDALTest("GTiff", "byte_lerc.tif", 1, 4672, tmpdir=tmp_path) ut.testOpen() ############################################################################### -def test_tiff_read_overview_of_external_mask(): +def test_tiff_read_overview_of_external_mask(tmp_vsimem): - filename = "/vsimem/tiff_read_overview_of_external_mask.tif" + filename = tmp_vsimem / "tiff_read_overview_of_external_mask.tif" with gdal.config_option("GDAL_TIFF_INTERNAL_MASK", "NO"): gdal.Translate(filename, "data/byte.tif", options="-b 1 -mask 1") ds = gdal.Open(filename, gdal.GA_Update) ds.BuildOverviews("CUBIC", overviewlist=[2]) ds = None - ds = gdal.Open(filename + ".msk", gdal.GA_Update) + ds = gdal.Open(f"{filename}.msk", gdal.GA_Update) ds.BuildOverviews("NEAREST", overviewlist=[2]) ds = None ds = gdal.Open(filename) @@ -4077,9 +4060,6 @@ def test_tiff_read_overview_of_external_mask(): flags1 = ds.GetRasterBand(1).GetOverview(0).GetMaskFlags() ds = None - gdal.Unlink(filename) - gdal.Unlink(filename + ".msk") - assert cs1 == cs2 assert flags1 == gdal.GMF_PER_DATASET @@ -4177,7 +4157,7 @@ def test_tiff_read_cog_strile_arrays_zeroified_when_possible(): not check_libtiff_internal_or_at_least(4, 0, 11), reason="libtiff >= 4.0.11 required", ) -def test_tiff_read_cog_vsicurl(): +def test_tiff_read_cog_vsicurl(tmp_path): gdal.VSICurlClearCache() @@ -4190,8 +4170,8 @@ def test_tiff_read_cog_vsicurl(): if webserver_port == 0: pytest.skip() - in_filename = "tmp/test_tiff_read_cog_vsicurl_in.tif" - cog_filename = "tmp/test_tiff_read_cog_vsicurl_out.tif" + in_filename = tmp_path / "test_tiff_read_cog_vsicurl_in.tif" + cog_filename = tmp_path / "test_tiff_read_cog_vsicurl_out.tif" try: src_ds = gdal.GetDriverByName("GTIFF").Create( @@ -4316,7 +4296,7 @@ def method(request): os.environ.get("BUILD_NAME", "") == "s390x", reason="Fails on that platform", ) -def test_tiff_read_cog_with_mask_vsicurl(): +def test_tiff_read_cog_with_mask_vsicurl(tmp_path): gdal.VSICurlClearCache() @@ -4329,8 +4309,8 @@ def test_tiff_read_cog_with_mask_vsicurl(): if webserver_port == 0: pytest.skip() - in_filename = "tmp/test_tiff_read_cog_with_mask_vsicurl_in.tif" - cog_filename = "tmp/test_tiff_read_cog_with_mask_vsicurl_out.tif" + in_filename = tmp_path / "test_tiff_read_cog_with_mask_vsicurl_in.tif" + cog_filename = tmp_path / "test_tiff_read_cog_with_mask_vsicurl_out.tif" try: src_ds = gdal.GetDriverByName("GTIFF").Create( @@ -4505,30 +4485,28 @@ def test_tiff_read_geodetic_tiff_grid(): # related to precomposed vs decomposed UTF-8 filenames on MacOSX -def test_tiff_read_utf8_encoding_issue_2903(): +def test_tiff_read_utf8_encoding_issue_2903(tmp_path): if gdaltest.is_travis_branch("mingw_w64"): pytest.skip() precomposed_utf8 = b"\xc3\xa4".decode("utf-8") - tmp_tif_filename = "tmp/%s.tif" % precomposed_utf8 - tmp_tfw_filename = "tmp/%s.tfw" % precomposed_utf8 + tmp_tif_filename = tmp_path / ("%s.tif" % precomposed_utf8) + tmp_tfw_filename = tmp_path / ("%s.tfw" % precomposed_utf8) open(tmp_tif_filename, "wb").write(open("data/byte_nogeoref.tif", "rb").read()) open(tmp_tfw_filename, "wb").write(open("data/byte_nogeoref.tfw", "rb").read()) ds = gdal.Open(tmp_tif_filename) assert ds.GetGeoTransform()[0] != 0 ds = None - os.unlink(tmp_tif_filename) - os.unlink(tmp_tfw_filename) ############################################################################### # Check over precision issue with nodata and Float32 (#3791) -def test_tiff_read_overprecision_nodata_float32(): +def test_tiff_read_overprecision_nodata_float32(tmp_vsimem): - filename = "/vsimem/test_tiff_read_overprecision_nodata_float32.tif" + filename = tmp_vsimem / "test_tiff_read_overprecision_nodata_float32.tif" ds = gdal.GetDriverByName("GTiff").Create(filename, 1, 1, 1, gdal.GDT_Float32) ds.GetRasterBand(1).SetNoDataValue(-3.4e38) ds.GetRasterBand(1).Fill(-3.4e38) @@ -4543,7 +4521,6 @@ def test_tiff_read_overprecision_nodata_float32(): == ds.GetRasterBand(1).GetNoDataValue() ) ds = None - gdal.Unlink(filename) ############################################################################### @@ -4753,7 +4730,7 @@ def test_tiff_jxl_read_for_files_created_before_6393(): ], ) def test_tiff_read_multi_threaded( - reopen, write_after_reopen, xsize, ysize, nbands, dtype, creation_options + tmp_path, reopen, write_after_reopen, xsize, ysize, nbands, dtype, creation_options ): assert creation_options[0].startswith("COMPRESS=") @@ -4774,7 +4751,7 @@ def test_tiff_read_multi_threaded( 0, 0, ref_ds.RasterXSize, ref_ds.RasterYSize, buf, buf_type=gdal.GDT_Byte ) - tmpfile = "tmp/test_tiff_read_multi_threaded.tif" + tmpfile = tmp_path / "test_tiff_read_multi_threaded.tif" if not reopen: creation_options += ["NUM_THREADS=ALL_CPUS"] ds = gdal.GetDriverByName("GTiff").Create( @@ -4871,7 +4848,6 @@ def test_tiff_read_multi_threaded( ) ds = None - gdal.Unlink(tmpfile) ############################################################################### From 60e7c2da53a740f4a80c04097a659eaac97c3c10 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Tue, 2 Apr 2024 20:03:20 -0400 Subject: [PATCH 059/230] autotest ogr_fgdb.py: mark as order-dependent, add missing argument --- autotest/ogr/ogr_fgdb.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/autotest/ogr/ogr_fgdb.py b/autotest/ogr/ogr_fgdb.py index 6b335d5b269d..c43f9db3c9f0 100755 --- a/autotest/ogr/ogr_fgdb.py +++ b/autotest/ogr/ogr_fgdb.py @@ -39,7 +39,10 @@ from osgeo import gdal, ogr, osr -pytestmark = pytest.mark.require_driver("FileGDB") +pytestmark = [ + pytest.mark.require_driver("FileGDB"), + pytest.mark.random_order(disabled=True), +] ############################################################################### @pytest.fixture(autouse=True, scope="module") @@ -1697,7 +1700,7 @@ def test_ogr_fgdb_19bis(openfilegdb_drv, fgdb_drv, test_gdb): pytest.skip() with gdal.config_option("FGDB_PER_LAYER_COPYING_TRANSACTION", "FALSE"): - test_ogr_fgdb_19(openfilegdb_drv, fgdb_drv) + test_ogr_fgdb_19(openfilegdb_drv, fgdb_drv, test_gdb) ############################################################################### From 8a2e9e50e716126f19b80d54f3a1bc116167de72 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Tue, 2 Apr 2024 22:08:17 -0400 Subject: [PATCH 060/230] autotest vsiaz.py: use tmp_vsimem --- autotest/gcore/vsiaz.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/autotest/gcore/vsiaz.py b/autotest/gcore/vsiaz.py index 047fa403ff4f..688a6d498d24 100755 --- a/autotest/gcore/vsiaz.py +++ b/autotest/gcore/vsiaz.py @@ -1714,15 +1714,15 @@ def test_vsiaz_rmdirrecursive_empty_dir(): @pytest.mark.skipif( gdaltest.is_travis_branch("macos_build"), reason="randomly fails on macos" ) -def test_vsiaz_fake_sync_multithreaded_upload_chunk_size(): +def test_vsiaz_fake_sync_multithreaded_upload_chunk_size(tmp_vsimem): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() - gdal.Mkdir("/vsimem/test", 0) - gdal.FileFromMemBuffer("/vsimem/test/foo", "foo\n") + gdal.Mkdir(tmp_vsimem / "test", 0) + gdal.FileFromMemBuffer(tmp_vsimem / "test/foo", "foo\n") tab = [-1] handler = webserver.SequentialHandler() @@ -1863,7 +1863,7 @@ def cbk(pct, _, tab): with gdaltest.config_option("VSIS3_SIMULATE_THREADING", "YES", thread_local=False): with webserver.install_http_handler(handler): assert gdal.Sync( - "/vsimem/test", + tmp_vsimem / "test", "/vsiaz/test_bucket", options=["NUM_THREADS=1", "CHUNK_SIZE=3"], callback=cbk, @@ -1871,22 +1871,20 @@ def cbk(pct, _, tab): ) assert tab[0] == 1.0 - gdal.RmdirRecursive("/vsimem/test") - ############################################################################### # Test Sync() and multithreaded download of a single file -def test_vsiaz_fake_sync_multithreaded_upload_single_file(): +def test_vsiaz_fake_sync_multithreaded_upload_single_file(tmp_vsimem): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() - gdal.Mkdir("/vsimem/test", 0) - gdal.FileFromMemBuffer("/vsimem/test/foo", "foo\n") + gdal.Mkdir(tmp_vsimem / "test", 0) + gdal.FileFromMemBuffer(tmp_vsimem / "test/foo", "foo\n") handler = webserver.SequentialHandler() handler.add( @@ -1963,13 +1961,11 @@ def method(request): with gdaltest.config_option("VSIS3_SIMULATE_THREADING", "YES", thread_local=False): with webserver.install_http_handler(handler): assert gdal.Sync( - "/vsimem/test/foo", + tmp_vsimem / "test/foo", "/vsiaz/test_bucket", options=["NUM_THREADS=1", "CHUNK_SIZE=3"], ) - gdal.RmdirRecursive("/vsimem/test") - ############################################################################### # Read credentials from simulated Azure VM From 0525c92a37b0668366ccf3ade392dd6c82aed166 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Wed, 3 Apr 2024 09:09:08 -0400 Subject: [PATCH 061/230] autotest vsiadls.py: use tmp_vsimem --- autotest/gcore/vsiadls.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/autotest/gcore/vsiadls.py b/autotest/gcore/vsiadls.py index 8ba8d2080a42..d816378ac009 100755 --- a/autotest/gcore/vsiadls.py +++ b/autotest/gcore/vsiadls.py @@ -53,7 +53,7 @@ def open_for_read(uri): ############################################################################### -@pytest.fixture(autouse=True, scope="module") +@pytest.fixture(autouse=True, scope="function") def startup_and_cleanup(): with gdaltest.config_option("CPL_AZURE_VM_API_ROOT_URL", "disabled"): @@ -1009,15 +1009,15 @@ def test_vsiadls_fake_sync_error_case(): # Test Sync() and multithreaded download of a single file -def test_vsiadls_fake_sync_multithreaded_upload_single_file(): +def test_vsiadls_fake_sync_multithreaded_upload_single_file(tmp_vsimem): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() - gdal.Mkdir("/vsimem/test", 0) - gdal.FileFromMemBuffer("/vsimem/test/foo", "foo\n") + gdal.Mkdir(tmp_vsimem / "test", 0) + gdal.FileFromMemBuffer(tmp_vsimem / "test/foo", "foo\n") handler = webserver.SequentialHandler() handler.add("HEAD", "/azure/blob/myaccount/test_bucket?resource=filesystem", 200) @@ -1044,13 +1044,11 @@ def test_vsiadls_fake_sync_multithreaded_upload_single_file(): with gdaltest.config_option("VSIS3_SIMULATE_THREADING", "YES"): with webserver.install_http_handler(handler): assert gdal.Sync( - "/vsimem/test/foo", + tmp_vsimem / "test/foo", "/vsiadls/test_bucket", options=["NUM_THREADS=1", "CHUNK_SIZE=3"], ) - gdal.RmdirRecursive("/vsimem/test") - ############################################################################### # Test GetFileMetadata () / SetFileMetadata() From 26e8748ed807618b283bf18c59ce1d0d4fbf13ef Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Wed, 3 Apr 2024 09:10:13 -0400 Subject: [PATCH 062/230] autotest: ogr_pgdump.py: avoid writing to gdaltest module --- autotest/ogr/ogr_pgdump.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/autotest/ogr/ogr_pgdump.py b/autotest/ogr/ogr_pgdump.py index f72db7351c8a..29de4208452a 100755 --- a/autotest/ogr/ogr_pgdump.py +++ b/autotest/ogr/ogr_pgdump.py @@ -76,12 +76,9 @@ def test_ogr_pgdump_1(tmp_path): shp_ds = ogr.Open("data/poly.shp") shp_lyr = shp_ds.GetLayer(0) feat = shp_lyr.GetNextFeature() - gdaltest.poly_feat = [] while feat is not None: - gdaltest.poly_feat.append(feat) - dst_feat.SetFrom(feat) lyr.CreateFeature(dst_feat) @@ -166,12 +163,9 @@ def test_ogr_pgdump_2(tmp_path): shp_ds = ogr.Open("data/poly.shp") shp_lyr = shp_ds.GetLayer(0) feat = shp_lyr.GetNextFeature() - gdaltest.poly_feat = [] while feat is not None: - gdaltest.poly_feat.append(feat) - dst_feat.SetFrom(feat) lyr.CreateFeature(dst_feat) @@ -265,14 +259,11 @@ def test_ogr_pgdump_3(tmp_path): shp_ds = ogr.Open("data/poly.shp") shp_lyr = shp_ds.GetLayer(0) feat = shp_lyr.GetNextFeature() - gdaltest.poly_feat = [] i = 0 while feat is not None: - gdaltest.poly_feat.append(feat) - dst_feat.SetFrom(feat) if i == 0: # Be perverse and test the case where a feature has a geometry From 6715ed1dc19fc91f9249bcfc32cf76815579b907 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Wed, 3 Apr 2024 12:02:19 -0400 Subject: [PATCH 063/230] autotest ogr_georss.py: combine two interdependent tests --- autotest/ogr/ogr_georss.py | 54 +++++++++++++++----------------------- 1 file changed, 21 insertions(+), 33 deletions(-) diff --git a/autotest/ogr/ogr_georss.py b/autotest/ogr/ogr_georss.py index 354eae2d788f..4019e22144eb 100755 --- a/autotest/ogr/ogr_georss.py +++ b/autotest/ogr/ogr_georss.py @@ -144,42 +144,30 @@ def test_ogr_georss_1_atom_ns(): # Test writing a Atom 1.0 document (doesn't need read support) -def test_ogr_georss_1bis(): - - try: - os.remove("tmp/test_atom.xml") - except OSError: - pass - - ds = ogr.GetDriverByName("GeoRSS").CreateDataSource( - "tmp/test_atom.xml", options=["FORMAT=ATOM"] - ) - lyr = ds.CreateLayer("georss") - - for field_value in gdaltest.atom_field_values: - lyr.CreateField(ogr.FieldDefn(field_value[0], field_value[2])) - lyr.CreateField(ogr.FieldDefn("content", ogr.OFTString)) - - dst_feat = ogr.Feature(feature_def=lyr.GetLayerDefn()) - for field_value in gdaltest.atom_field_values: - dst_feat.SetField(field_value[0], field_value[1]) - dst_feat.SetField( - "content", - '<div xmlns="http://www.w3.org/1999/xhtml"><p><i>[Update: The Atom draft is finished.]</i></p></div>', - ) - - assert lyr.CreateFeature(dst_feat) == 0, "CreateFeature failed." - - ds = None - - -############################################################################### -# Test reading document created at previous step +def test_ogr_georss_1bis(tmp_path): + + with ogr.GetDriverByName("GeoRSS").CreateDataSource( + tmp_path / "test_atom.xml", options=["FORMAT=ATOM"] + ) as ds: + lyr = ds.CreateLayer("georss") + + for field_value in gdaltest.atom_field_values: + lyr.CreateField(ogr.FieldDefn(field_value[0], field_value[2])) + lyr.CreateField(ogr.FieldDefn("content", ogr.OFTString)) + + dst_feat = ogr.Feature(feature_def=lyr.GetLayerDefn()) + for field_value in gdaltest.atom_field_values: + dst_feat.SetField(field_value[0], field_value[1]) + dst_feat.SetField( + "content", + '<div xmlns="http://www.w3.org/1999/xhtml"><p><i>[Update: The Atom draft is finished.]</i></p></div>', + ) + assert lyr.CreateFeature(dst_feat) == 0, "CreateFeature failed." -def test_ogr_georss_1ter(): + # Test reading document created at previous step - return ogr_georss_test_atom("tmp/test_atom.xml") + ogr_georss_test_atom(tmp_path / "test_atom.xml") ############################################################################### From 7c31a54b94a4f3b71333d4b9248fc1cdfe386792 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Thu, 4 Apr 2024 08:27:39 -0400 Subject: [PATCH 064/230] autotest vsis3.py: avoid setting custom AWS_S3_ENDPOINT in tests that should not use it --- autotest/gcore/vsis3.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/autotest/gcore/vsis3.py b/autotest/gcore/vsis3.py index 50e61f5824fd..7d680847b011 100755 --- a/autotest/gcore/vsis3.py +++ b/autotest/gcore/vsis3.py @@ -96,12 +96,24 @@ def aws_test_config(): yield +# Launch a single webserver in a module-scoped fixture. +# Provide the port in a function-scoped fixture so that we only +# set AWS_S3_ENDPOINT for tests that are using it. @pytest.fixture(scope="module") -def webserver_port(): +def webserver_launch(): + + process, port = webserver.launch(handler=webserver.DispatcherHttpHandler) + + yield process, port + + webserver.server_stop(process, port) + + +@pytest.fixture(scope="function") +def webserver_port(webserver_launch): + + webserver_process, webserver_port = webserver_launch - webserver_process, webserver_port = webserver.launch( - handler=webserver.DispatcherHttpHandler - ) try: if webserver_port == 0: pytest.skip() @@ -112,13 +124,12 @@ def webserver_port(): finally: gdal.VSICurlClearCache() - webserver.server_stop(webserver_process, webserver_port) - ############################################################################### def test_vsis3_init(aws_test_config): + options = { "AWS_SECRET_ACCESS_KEY": "", "AWS_ACCESS_KEY_ID": "", From b4af5008c494ee4a3da33d631f150a826dedbee6 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Thu, 4 Apr 2024 11:04:02 -0400 Subject: [PATCH 065/230] autotest vsis3.py: use tmp_vsimem fixture --- autotest/gcore/vsis3.py | 242 ++++++++++++++++++---------------------- 1 file changed, 111 insertions(+), 131 deletions(-) diff --git a/autotest/gcore/vsis3.py b/autotest/gcore/vsis3.py index 7d680847b011..1be6a17ffef4 100755 --- a/autotest/gcore/vsis3.py +++ b/autotest/gcore/vsis3.py @@ -182,6 +182,7 @@ def test_vsis3_no_sign_request(aws_test_config_as_config_options_or_credentials) def test_vsis3_sync_multithreaded_download( + tmp_vsimem, aws_test_config_as_config_options_or_credentials, ): def cbk(pct, _, tab): @@ -203,28 +204,31 @@ def cbk(pct, _, tab): ): assert gdal.Sync( "/vsis3/cdn.proj.org/test_dummy", - "/vsimem/test_vsis3_no_sign_request_sync", + tmp_vsimem / "test_vsis3_no_sign_request_sync", options=["NUM_THREADS=2"], callback=cbk, callback_data=tab, ) assert tab[0] == 1.0 assert ( - gdal.VSIStatL("/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo").size + gdal.VSIStatL( + tmp_vsimem / "test_vsis3_no_sign_request_sync/test_dummy/foo" + ).size == 4 ) assert ( - gdal.VSIStatL("/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar").size + gdal.VSIStatL( + tmp_vsimem / "test_vsis3_no_sign_request_sync/test_dummy/bar" + ).size == 4 ) - gdal.RmdirRecursive("/vsimem/test_vsis3_no_sign_request_sync") ############################################################################### # Test Sync() and multithreaded download and CHUNK_SIZE -def test_vsis3_sync_multithreaded_download_chunk_size(aws_test_config): +def test_vsis3_sync_multithreaded_download_chunk_size(tmp_vsimem, aws_test_config): def cbk(pct, _, tab): assert pct >= tab[0] tab[0] = pct @@ -240,23 +244,25 @@ def cbk(pct, _, tab): with gdaltest.config_options(options, thread_local=False): assert gdal.Sync( "/vsis3/cdn.proj.org/test_dummy", - "/vsimem/test_vsis3_no_sign_request_sync", + tmp_vsimem / "test_vsis3_no_sign_request_sync", options=["NUM_THREADS=2", "CHUNK_SIZE=3"], callback=cbk, callback_data=tab, ) assert tab[0] == 1.0 assert ( - gdal.VSIStatL("/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo").size + gdal.VSIStatL( + tmp_vsimem / "test_vsis3_no_sign_request_sync/test_dummy/foo" + ).size == 4 ) assert ( - gdal.VSIStatL("/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar").size + gdal.VSIStatL( + tmp_vsimem / "test_vsis3_no_sign_request_sync/test_dummy/bar" + ).size == 4 ) - gdal.RmdirRecursive("/vsimem/test_vsis3_no_sign_request_sync") - ############################################################################### # Error cases @@ -3302,7 +3308,7 @@ def test_vsis3_8(aws_test_config, webserver_port): # Test vsisync() with SYNC_STRATEGY=ETAG -def test_vsis3_sync_etag(aws_test_config, webserver_port): +def test_vsis3_sync_etag(tmp_vsimem, aws_test_config, webserver_port): gdal.VSICurlClearCache() @@ -3335,7 +3341,7 @@ def test_vsis3_sync_etag(aws_test_config, webserver_port): expected_headers={"Content-Length": "3", "x-amz-storage-class": "GLACIER"}, ) - gdal.FileFromMemBuffer("/vsimem/testsync.txt", "foo") + gdal.FileFromMemBuffer(tmp_vsimem / "testsync.txt", "foo") def cbk(pct, _, tab): assert pct > tab[0] @@ -3345,7 +3351,7 @@ def cbk(pct, _, tab): tab = [0] with webserver.install_http_handler(handler): assert gdal.Sync( - "/vsimem/testsync.txt", + tmp_vsimem / "testsync.txt", "/vsis3/out", options=options + ["x-amz-storage-class=GLACIER"], callback=cbk, @@ -3356,9 +3362,9 @@ def cbk(pct, _, tab): # Re-try with cached ETag. Should generate no network access handler = webserver.SequentialHandler() with webserver.install_http_handler(handler): - assert gdal.Sync("/vsimem/testsync.txt", "/vsis3/out", options=options) + assert gdal.Sync(tmp_vsimem / "testsync.txt", "/vsis3/out", options=options) assert gdal.Sync( - "/vsimem/testsync.txt", "/vsis3/out/testsync.txt", options=options + tmp_vsimem / "testsync.txt", "/vsis3/out/testsync.txt", options=options ) gdal.VSICurlClearCache() @@ -3377,17 +3383,17 @@ def cbk(pct, _, tab): "foo", ) with webserver.install_http_handler(handler): - assert gdal.Sync("/vsis3/out/testsync.txt", "/vsimem/", options=options) + assert gdal.Sync("/vsis3/out/testsync.txt", tmp_vsimem, options=options) # Shouldn't do any copy, but hard to verify with webserver.install_http_handler(webserver.SequentialHandler()): - assert gdal.Sync("/vsis3/out/testsync.txt", "/vsimem/", options=options) + assert gdal.Sync("/vsis3/out/testsync.txt", tmp_vsimem, options=options) assert gdal.Sync( - "/vsis3/out/testsync.txt", "/vsimem/testsync.txt", options=options + "/vsis3/out/testsync.txt", tmp_vsimem / "testsync.txt", options=options ) # Modify target file, and redo synchronization - gdal.FileFromMemBuffer("/vsimem/testsync.txt", "bar") + gdal.FileFromMemBuffer(tmp_vsimem / "testsync.txt", "bar") handler = webserver.SequentialHandler() handler.add( @@ -3398,9 +3404,9 @@ def cbk(pct, _, tab): "foo", ) with webserver.install_http_handler(handler): - assert gdal.Sync("/vsis3/out/testsync.txt", "/vsimem/", options=options) + assert gdal.Sync("/vsis3/out/testsync.txt", tmp_vsimem, options=options) - f = gdal.VSIFOpenL("/vsimem/testsync.txt", "rb") + f = gdal.VSIFOpenL(tmp_vsimem / "testsync.txt", "rb") data = gdal.VSIFReadL(1, 3, f).decode("ascii") gdal.VSIFCloseL(f) assert data == "foo" @@ -3422,15 +3428,15 @@ def cbk(pct, _, tab): "foo", ) with webserver.install_http_handler(handler): - assert gdal.Sync("/vsimem/testsync.txt", "/vsis3/out", options=options) + assert gdal.Sync(tmp_vsimem / "testsync.txt", "/vsis3/out", options=options) - gdal.Unlink("/vsimem/testsync.txt") + gdal.Unlink(tmp_vsimem / "testsync.txt") # Directory copying gdal.VSICurlClearCache() - gdal.Mkdir("/vsimem/subdir", 0) - gdal.FileFromMemBuffer("/vsimem/subdir/testsync.txt", "foo") + gdal.Mkdir(tmp_vsimem / "subdir", 0) + gdal.FileFromMemBuffer(tmp_vsimem / "subdir/testsync.txt", "foo") handler = webserver.SequentialHandler() handler.add( "GET", @@ -3452,19 +3458,18 @@ def cbk(pct, _, tab): """, ) with webserver.install_http_handler(handler): - assert gdal.Sync("/vsimem/subdir/", "/vsis3/out", options=options) - gdal.RmdirRecursive("/vsimem/subdir") + assert gdal.Sync(f"{tmp_vsimem}/subdir/", "/vsis3/out", options=options) ############################################################################### # Test vsisync() with SYNC_STRATEGY=TIMESTAMP -def test_vsis3_sync_timestamp(aws_test_config, webserver_port): +def test_vsis3_sync_timestamp(tmp_vsimem, aws_test_config, webserver_port): options = ["SYNC_STRATEGY=TIMESTAMP"] - gdal.FileFromMemBuffer("/vsimem/testsync.txt", "foo") + gdal.FileFromMemBuffer(tmp_vsimem / "testsync.txt", "foo") # S3 to local: S3 file is older -> download gdal.VSICurlClearCache() @@ -3488,7 +3493,7 @@ def test_vsis3_sync_timestamp(aws_test_config, webserver_port): "foo", ) with webserver.install_http_handler(handler): - assert gdal.Sync("/vsis3/out/testsync.txt", "/vsimem/", options=options) + assert gdal.Sync("/vsis3/out/testsync.txt", tmp_vsimem, options=options) # S3 to local: S3 file is newer -> do nothing gdal.VSICurlClearCache() @@ -3506,7 +3511,7 @@ def test_vsis3_sync_timestamp(aws_test_config, webserver_port): "foo", ) with webserver.install_http_handler(handler): - assert gdal.Sync("/vsis3/out/testsync.txt", "/vsimem/", options=options) + assert gdal.Sync("/vsis3/out/testsync.txt", tmp_vsimem, options=options) # Local to S3: S3 file is older -> upload gdal.VSICurlClearCache() @@ -3525,7 +3530,7 @@ def test_vsis3_sync_timestamp(aws_test_config, webserver_port): handler.add("PUT", "/out/testsync.txt", 200) with webserver.install_http_handler(handler): assert gdal.Sync( - "/vsimem/testsync.txt", "/vsis3/out/testsync.txt", options=options + tmp_vsimem / "testsync.txt", "/vsis3/out/testsync.txt", options=options ) # Local to S3: S3 file is newer -> do nothing @@ -3544,11 +3549,9 @@ def test_vsis3_sync_timestamp(aws_test_config, webserver_port): ) with webserver.install_http_handler(handler): assert gdal.Sync( - "/vsimem/testsync.txt", "/vsis3/out/testsync.txt", options=options + tmp_vsimem / "testsync.txt", "/vsis3/out/testsync.txt", options=options ) - gdal.Unlink("/vsimem/testsync.txt") - ############################################################################### # Test vsisync() failure @@ -3559,9 +3562,9 @@ def test_vsis3_sync_timestamp(aws_test_config, webserver_port): gdaltest.is_ci(), reason="test skipped on CI due to it not being reliable (also fails randomly when run locally)", ) -def test_vsis3_sync_failed(aws_test_config, webserver_port): +def test_vsis3_sync_failed(tmp_vsimem, aws_test_config, webserver_port): - gdal.FileFromMemBuffer("/vsimem/testsync.txt", "foo") + gdal.FileFromMemBuffer(tmp_vsimem / "testsync.txt", "foo") # S3 to local: S3 file is older -> download gdal.VSICurlClearCache() @@ -3603,20 +3606,20 @@ def test_vsis3_sync_failed(aws_test_config, webserver_port): with webserver.install_http_handler(handler): with pytest.raises( Exception, - match="Copying of /vsis3/out/testsync.txt to /vsimem/testsync.txt failed: 2 bytes were copied whereas 3 were expected", + match=f"Copying of /vsis3/out/testsync.txt to {tmp_vsimem}/testsync.txt failed: 2 bytes were copied whereas 3 were expected", ): - gdal.Sync("/vsis3/out/testsync.txt", "/vsimem/") + gdal.Sync("/vsis3/out/testsync.txt", tmp_vsimem) ############################################################################### # Test vsisync() with SYNC_STRATEGY=OVERWRITE -def test_vsis3_sync_overwrite(aws_test_config, webserver_port): +def test_vsis3_sync_overwrite(tmp_vsimem, aws_test_config, webserver_port): options = ["SYNC_STRATEGY=OVERWRITE"] - gdal.FileFromMemBuffer("/vsimem/testsync.txt", "foo") + gdal.FileFromMemBuffer(tmp_vsimem / "testsync.txt", "foo") # S3 to local: S3 file is newer gdal.VSICurlClearCache() @@ -3640,7 +3643,7 @@ def test_vsis3_sync_overwrite(aws_test_config, webserver_port): "foo", ) with webserver.install_http_handler(handler): - assert gdal.Sync("/vsis3/out/testsync.txt", "/vsimem/", options=options) + assert gdal.Sync("/vsis3/out/testsync.txt", tmp_vsimem, options=options) # Local to S3: S3 file is newer gdal.VSICurlClearCache() @@ -3659,17 +3662,15 @@ def test_vsis3_sync_overwrite(aws_test_config, webserver_port): handler.add("PUT", "/out/testsync.txt", 200) with webserver.install_http_handler(handler): assert gdal.Sync( - "/vsimem/testsync.txt", "/vsis3/out/testsync.txt", options=options + tmp_vsimem / "testsync.txt", "/vsis3/out/testsync.txt", options=options ) - gdal.Unlink("/vsimem/testsync.txt") - ############################################################################### # Test vsisync() with source in /vsis3 with implicit directories -def test_vsis3_sync_implicit_directories(aws_test_config, webserver_port): +def test_vsis3_sync_implicit_directories(tmp_path, aws_test_config, webserver_port): gdal.VSICurlClearCache() @@ -3714,15 +3715,13 @@ def test_vsis3_sync_implicit_directories(aws_test_config, webserver_port): """, ) handler.add("GET", "/mybucket/subdir/implicit_subdir/testsync.txt", 200, {}, b"abc") - tmpdirname = "tmp/test_vsis3_sync_implicit_directories" + tmpdirname = f"{tmp_path}/test_vsis3_sync_implicit_directories" gdal.Mkdir(tmpdirname, 0o755) - try: - with webserver.install_http_handler(handler): - assert gdal.Sync("/vsis3/mybucket/subdir/", tmpdirname + "/") - assert os.path.exists(tmpdirname + "/implicit_subdir") - assert os.path.exists(tmpdirname + "/implicit_subdir/testsync.txt") - finally: - gdal.RmdirRecursive(tmpdirname) + + with webserver.install_http_handler(handler): + assert gdal.Sync("/vsis3/mybucket/subdir/", tmpdirname + "/") + assert os.path.exists(tmpdirname + "/implicit_subdir") + assert os.path.exists(tmpdirname + "/implicit_subdir/testsync.txt") ############################################################################### @@ -4047,7 +4046,7 @@ def test_vsis3_fake_rename_on_existing_dir(aws_test_config, webserver_port): def test_vsis3_fake_sync_multithreaded_upload_chunk_size( - aws_test_config, webserver_port + tmp_vsimem, aws_test_config, webserver_port ): gdal.VSICurlClearCache() @@ -4057,8 +4056,8 @@ def cbk(pct, _, tab): tab[0] = pct return True - gdal.Mkdir("/vsimem/test", 0) - gdal.FileFromMemBuffer("/vsimem/test/foo", "foo\n") + gdal.Mkdir(tmp_vsimem / "test", 0) + gdal.FileFromMemBuffer(tmp_vsimem / "test/foo", "foo\n") tab = [-1] handler = webserver.SequentialHandler() @@ -4153,7 +4152,7 @@ def method(request): with gdaltest.config_option("VSIS3_SIMULATE_THREADING", "YES", thread_local=False): with webserver.install_http_handler(handler): assert gdal.Sync( - "/vsimem/test", + tmp_vsimem / "test", "/vsis3/test_bucket", options=[ "NUM_THREADS=1", @@ -4165,17 +4164,15 @@ def method(request): ) assert tab[0] == 1.0 - gdal.RmdirRecursive("/vsimem/test") - def test_vsis3_fake_sync_multithreaded_upload_chunk_size_failure( - aws_test_config, webserver_port + tmp_vsimem, aws_test_config, webserver_port ): gdal.VSICurlClearCache() - gdal.Mkdir("/vsimem/test", 0) - gdal.FileFromMemBuffer("/vsimem/test/foo", "foo\n") + gdal.Mkdir(tmp_vsimem / "test", 0) + gdal.FileFromMemBuffer(tmp_vsimem / "test/foo", "foo\n") handler = webserver.SequentialHandler() handler.add("GET", "/test_bucket/?prefix=test%2F", 200) @@ -4210,13 +4207,11 @@ def test_vsis3_fake_sync_multithreaded_upload_chunk_size_failure( with webserver.install_http_handler(handler): with gdal.quiet_errors(): assert not gdal.Sync( - "/vsimem/test", + tmp_vsimem / "test", "/vsis3/test_bucket", options=["NUM_THREADS=1", "CHUNK_SIZE=3"], ) - gdal.RmdirRecursive("/vsimem/test") - ############################################################################### # Test reading/writing metadata @@ -4516,18 +4511,18 @@ def test_vsis3_random_write_on_existing_file_that_does_not_exist( # Read credentials from simulated ~/.aws/credentials -def test_vsis3_read_credentials_file(aws_test_config, webserver_port): +def test_vsis3_read_credentials_file(tmp_vsimem, aws_test_config, webserver_port): options = { "AWS_SECRET_ACCESS_KEY": "", "AWS_ACCESS_KEY_ID": "", - "CPL_AWS_CREDENTIALS_FILE": "/vsimem/aws_credentials", + "CPL_AWS_CREDENTIALS_FILE": f"{tmp_vsimem}/aws_credentials", } gdal.VSICurlClearCache() gdal.FileFromMemBuffer( - "/vsimem/aws_credentials", + tmp_vsimem / "aws_credentials", """ [unrelated] aws_access_key_id = foo @@ -4556,24 +4551,22 @@ def test_vsis3_read_credentials_file(aws_test_config, webserver_port): assert data == "foo" - gdal.Unlink("/vsimem/aws_credentials") - ############################################################################### # Read credentials from simulated ~/.aws/config -def test_vsis3_read_config_file(aws_test_config, webserver_port): +def test_vsis3_read_config_file(tmp_vsimem, aws_test_config, webserver_port): options = { "AWS_SECRET_ACCESS_KEY": "", "AWS_ACCESS_KEY_ID": "", - "AWS_CONFIG_FILE": "/vsimem/aws_config", + "AWS_CONFIG_FILE": f"{tmp_vsimem}/aws_config", } gdal.VSICurlClearCache() gdal.FileFromMemBuffer( - "/vsimem/aws_config", + tmp_vsimem / "aws_config", """ [unrelated] aws_access_key_id = foo @@ -4603,25 +4596,25 @@ def test_vsis3_read_config_file(aws_test_config, webserver_port): assert data == "foo" - gdal.Unlink("/vsimem/aws_config") - ############################################################################### # Read credentials from simulated ~/.aws/credentials and ~/.aws/config -def test_vsis3_read_credentials_config_file(aws_test_config, webserver_port): +def test_vsis3_read_credentials_config_file( + tmp_vsimem, aws_test_config, webserver_port +): options = { "AWS_SECRET_ACCESS_KEY": "", "AWS_ACCESS_KEY_ID": "", - "CPL_AWS_CREDENTIALS_FILE": "/vsimem/aws_credentials", - "AWS_CONFIG_FILE": "/vsimem/aws_config", + "CPL_AWS_CREDENTIALS_FILE": f"{tmp_vsimem}/aws_credentials", + "AWS_CONFIG_FILE": f"{tmp_vsimem}/aws_config", } gdal.VSICurlClearCache() gdal.FileFromMemBuffer( - "/vsimem/aws_credentials", + tmp_vsimem / "aws_credentials", """ [unrelated] aws_access_key_id = foo @@ -4636,7 +4629,7 @@ def test_vsis3_read_credentials_config_file(aws_test_config, webserver_port): ) gdal.FileFromMemBuffer( - "/vsimem/aws_config", + tmp_vsimem / "aws_config", """ [unrelated] aws_access_key_id = foo @@ -4666,9 +4659,6 @@ def test_vsis3_read_credentials_config_file(aws_test_config, webserver_port): assert data == "foo" - gdal.Unlink("/vsimem/aws_credentials") - gdal.Unlink("/vsimem/aws_config") - ############################################################################### # Read credentials from simulated ~/.aws/credentials and ~/.aws/config with @@ -4743,20 +4733,20 @@ def test_vsis3_read_credentials_config_file_non_default_profile( def test_vsis3_read_credentials_config_file_inconsistent( - aws_test_config, webserver_port + tmp_vsimem, aws_test_config, webserver_port ): options = { "AWS_SECRET_ACCESS_KEY": "", "AWS_ACCESS_KEY_ID": "", - "CPL_AWS_CREDENTIALS_FILE": "/vsimem/aws_credentials", - "AWS_CONFIG_FILE": "/vsimem/aws_config", + "CPL_AWS_CREDENTIALS_FILE": f"{tmp_vsimem}/aws_credentials", + "AWS_CONFIG_FILE": f"{tmp_vsimem}/aws_config", } gdal.VSICurlClearCache() gdal.FileFromMemBuffer( - "/vsimem/aws_credentials", + tmp_vsimem / "aws_credentials", """ [unrelated] aws_access_key_id = foo @@ -4771,7 +4761,7 @@ def test_vsis3_read_credentials_config_file_inconsistent( ) gdal.FileFromMemBuffer( - "/vsimem/aws_config", + tmp_vsimem / "aws_config", """ [unrelated] aws_access_key_id = foo @@ -4804,9 +4794,6 @@ def test_vsis3_read_credentials_config_file_inconsistent( assert data == "foo" - gdal.Unlink("/vsimem/aws_credentials") - gdal.Unlink("/vsimem/aws_config") - ############################################################################### # Read credentials from sts AssumeRoleWithWebIdentity @@ -5290,7 +5277,9 @@ def test_vsis3_read_credentials_AWS_CONTAINER_CREDENTIALS_FULL_URI( # Read credentials from an assumed role -def test_vsis3_read_credentials_assumed_role(aws_test_config, webserver_port): +def test_vsis3_read_credentials_assumed_role( + tmp_vsimem, aws_test_config, webserver_port +): if webserver_port == 8080: expected_signature1 = ( @@ -5337,8 +5326,8 @@ def test_vsis3_read_credentials_assumed_role(aws_test_config, webserver_port): options = { "AWS_SECRET_ACCESS_KEY": "", "AWS_ACCESS_KEY_ID": "", - "CPL_AWS_CREDENTIALS_FILE": "/vsimem/aws_credentials", - "AWS_CONFIG_FILE": "/vsimem/aws_config", + "CPL_AWS_CREDENTIALS_FILE": f"{tmp_vsimem}/aws_credentials", + "AWS_CONFIG_FILE": f"{tmp_vsimem}/aws_config", "AWS_PROFILE": "my_profile", "AWS_STS_ENDPOINT": "localhost:%d" % webserver_port, } @@ -5346,7 +5335,7 @@ def test_vsis3_read_credentials_assumed_role(aws_test_config, webserver_port): gdal.VSICurlClearCache() gdal.FileFromMemBuffer( - "/vsimem/aws_credentials", + tmp_vsimem / "aws_credentials", """ [foo] aws_access_key_id = AWS_ACCESS_KEY_ID @@ -5355,7 +5344,7 @@ def test_vsis3_read_credentials_assumed_role(aws_test_config, webserver_port): ) gdal.FileFromMemBuffer( - "/vsimem/aws_config", + tmp_vsimem / "aws_config", """ [profile my_profile] role_arn = arn:aws:iam::557268267719:role/role @@ -5474,14 +5463,11 @@ def test_vsis3_read_credentials_assumed_role(aws_test_config, webserver_port): gdal.VSIFCloseL(f) assert data == "foo" - gdal.Unlink("/vsimem/aws_credentials") - gdal.Unlink("/vsimem/aws_config") - ############################################################################### # Read credentials from sts AssumeRoleWithWebIdentity def test_vsis3_read_credentials_sts_assume_role_with_web_identity_from_config_file( - aws_test_config, webserver_port + tmp_vsimem, aws_test_config, webserver_port ): if webserver_port == 8080: @@ -5520,8 +5506,8 @@ def test_vsis3_read_credentials_sts_assume_role_with_web_identity_from_config_fi options = { "AWS_SECRET_ACCESS_KEY": "", "AWS_ACCESS_KEY_ID": "", - "CPL_AWS_CREDENTIALS_FILE": "/vsimem/aws_credentials", - "AWS_CONFIG_FILE": "/vsimem/aws_config", + "CPL_AWS_CREDENTIALS_FILE": f"{tmp_vsimem}/aws_credentials", + "AWS_CONFIG_FILE": f"{tmp_vsimem}/aws_config", "AWS_PROFILE": "my_profile", "AWS_STS_ENDPOINT": "localhost:%d" % webserver_port, "CPL_AWS_STS_ROOT_URL": "http://localhost:%d" % webserver_port, @@ -5529,16 +5515,16 @@ def test_vsis3_read_credentials_sts_assume_role_with_web_identity_from_config_fi gdal.VSICurlClearCache() - gdal.FileFromMemBuffer("/vsimem/web_identity_token_file", "token\n") + gdal.FileFromMemBuffer(tmp_vsimem / "web_identity_token_file", "token\n") - gdal.FileFromMemBuffer("/vsimem/aws_credentials", "") + gdal.FileFromMemBuffer(tmp_vsimem / "aws_credentials", "") gdal.FileFromMemBuffer( - "/vsimem/aws_config", - """ + tmp_vsimem / "aws_config", + f""" [profile foo] role_arn = foo_role_arn -web_identity_token_file = /vsimem/web_identity_token_file +web_identity_token_file = {tmp_vsimem}/web_identity_token_file [profile my_profile] role_arn = my_profile_role_arn source_profile = foo @@ -5651,33 +5637,27 @@ def test_vsis3_read_credentials_sts_assume_role_with_web_identity_from_config_fi }, ) - try: - with webserver.install_http_handler(handler): - with gdaltest.config_options(options, thread_local=False): - f = open_for_read("/vsis3/s3_fake_bucket/resource") + with webserver.install_http_handler(handler): + with gdaltest.config_options(options, thread_local=False): + f = open_for_read("/vsis3/s3_fake_bucket/resource") + assert f is not None + data = gdal.VSIFReadL(1, 4, f).decode("ascii") + gdal.VSIFCloseL(f) + assert data == "foo" + + with webserver.install_http_handler(handler2): + with gdaltest.config_options(options, thread_local=False): + f = open_for_read("/vsis3/s3_fake_bucket/resource2") assert f is not None data = gdal.VSIFReadL(1, 4, f).decode("ascii") gdal.VSIFCloseL(f) - assert data == "foo" - - with webserver.install_http_handler(handler2): - with gdaltest.config_options(options, thread_local=False): - f = open_for_read("/vsis3/s3_fake_bucket/resource2") - assert f is not None - data = gdal.VSIFReadL(1, 4, f).decode("ascii") - gdal.VSIFCloseL(f) - assert data == "foo" - - f = open_for_read("/vsis3/s3_fake_bucket/resource3") - assert f is not None - data = gdal.VSIFReadL(1, 4, f).decode("ascii") - gdal.VSIFCloseL(f) - assert data == "foo" + assert data == "foo" - finally: - gdal.Unlink("/vsimem/web_identity_token_file") - gdal.Unlink("/vsimem/aws_credentials") - gdal.Unlink("/vsimem/aws_config") + f = open_for_read("/vsis3/s3_fake_bucket/resource3") + assert f is not None + data = gdal.VSIFReadL(1, 4, f).decode("ascii") + gdal.VSIFCloseL(f) + assert data == "foo" ############################################################################### From 735b1a09e52bbaafea6559d560c84fcd6306709e Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Fri, 5 Apr 2024 15:37:55 -0400 Subject: [PATCH 066/230] autotest ogr_hana.py: skip tests if connection string not set --- autotest/ogr/ogr_hana.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autotest/ogr/ogr_hana.py b/autotest/ogr/ogr_hana.py index bcc63687f328..d6a6266f9c28 100644 --- a/autotest/ogr/ogr_hana.py +++ b/autotest/ogr/ogr_hana.py @@ -1250,7 +1250,7 @@ def get_connection_str(): if uri is not None: conn_str = uri + ";ENCRYPT=YES;SSL_VALIDATE_CERTIFICATE=false;CHAR_AS_UTF8=1" else: - conn_str = "HANA:autotest" + pytest.skip("OGR_HANA_CONNECTION_STRING not set") return conn_str From 260a1aa71d6e6eb4974cf205533e753aa7b319b6 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Fri, 5 Apr 2024 17:33:26 -0400 Subject: [PATCH 067/230] autotest ogr_db2_hack.py: combine interdependent tests --- autotest/ogr/ogr_db2_hack.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/autotest/ogr/ogr_db2_hack.py b/autotest/ogr/ogr_db2_hack.py index 67999500b1da..9878ad6d6be4 100755 --- a/autotest/ogr/ogr_db2_hack.py +++ b/autotest/ogr/ogr_db2_hack.py @@ -55,12 +55,8 @@ def test_ogr_db2_hack_1(): assert wkb[0] == "1", "WKB wkbNDR point geometry has wrong byte order" - -############################################################################### -# Verify that we can turn DB2 V7.2 mode back off! - - -def test_ogr_db2_hack_2(): + ############################################################################### + # Verify that we can turn DB2 V7.2 mode back off! assert ( ogr.SetGenerate_DB2_V72_BYTE_ORDER(0) == 0 From e1d1993780b51408e78c7d7e5692e381883cbc54 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Fri, 5 Apr 2024 21:56:40 -0400 Subject: [PATCH 068/230] autotest ogr_openfilegdb.py: use tmp_path, tmp_vsimem --- autotest/ogr/ogr_openfilegdb.py | 124 +- autotest/ogr/ogr_openfilegdb_write.py | 5095 ++++++++++++------------- 2 files changed, 2521 insertions(+), 2698 deletions(-) diff --git a/autotest/ogr/ogr_openfilegdb.py b/autotest/ogr/ogr_openfilegdb.py index b58f56196532..d61b3a3b4dd9 100755 --- a/autotest/ogr/ogr_openfilegdb.py +++ b/autotest/ogr/ogr_openfilegdb.py @@ -828,22 +828,25 @@ def test_ogr_openfilegdb_str_indexed_truncated(): # Test opening an unzipped dataset -def test_ogr_openfilegdb_5(): +@pytest.fixture() +def testopenfilegdb(tmp_path): try: - shutil.rmtree("tmp/testopenfilegdb.gdb") - except OSError: - pass - try: - gdaltest.unzip("tmp/", "data/filegdb/testopenfilegdb.gdb.zip") + gdaltest.unzip(tmp_path, "data/filegdb/testopenfilegdb.gdb.zip") except OSError: pytest.skip() + try: - os.stat("tmp/testopenfilegdb.gdb") + os.stat(tmp_path / "testopenfilegdb.gdb") except OSError: pytest.skip() - ds = ogr.Open("tmp/testopenfilegdb.gdb") + return tmp_path / "testopenfilegdb.gdb" + + +def test_ogr_openfilegdb_5(testopenfilegdb): + + ds = ogr.Open(testopenfilegdb) assert ds is not None @@ -1041,16 +1044,11 @@ def test_ogr_openfilegdb_8(): # Test reading a .gdbtable outside a .gdb -def test_ogr_openfilegdb_9(): - - try: - os.stat("tmp/testopenfilegdb.gdb") - except OSError: - pytest.skip() +def test_ogr_openfilegdb_9(tmp_path, testopenfilegdb): - shutil.copy("tmp/testopenfilegdb.gdb/a00000009.gdbtable", "tmp/a00000009.gdbtable") - shutil.copy("tmp/testopenfilegdb.gdb/a00000009.gdbtablx", "tmp/a00000009.gdbtablx") - ds = ogr.Open("tmp/a00000009.gdbtable") + shutil.copy(testopenfilegdb / "a00000009.gdbtable", tmp_path / "a00000009.gdbtable") + shutil.copy(testopenfilegdb / "a00000009.gdbtablx", tmp_path / "a00000009.gdbtablx") + ds = ogr.Open(tmp_path / "a00000009.gdbtable") assert ds is not None lyr = ds.GetLayer(0) feat = lyr.GetNextFeature() @@ -1078,19 +1076,14 @@ def unfuzz(backup): @gdaltest.disable_exceptions() -def test_ogr_openfilegdb_10(): +def test_ogr_openfilegdb_10(testopenfilegdb, tmp_path): - try: - os.stat("tmp/testopenfilegdb.gdb") - except OSError: - pytest.skip() - - shutil.copytree("tmp/testopenfilegdb.gdb", "tmp/testopenfilegdb_fuzzed.gdb") + shutil.copytree(testopenfilegdb, tmp_path / "testopenfilegdb_fuzzed.gdb") if False: # pylint: disable=using-constant-test for filename in [ - "tmp/testopenfilegdb_fuzzed.gdb/a00000001.gdbtable", - "tmp/testopenfilegdb_fuzzed.gdb/a00000001.gdbtablx", + tmp_path / "testopenfilegdb_fuzzed.gdb/a00000001.gdbtable", + tmp_path / "testopenfilegdb_fuzzed.gdb/a00000001.gdbtablx", ]: errors = set() offsets = [] @@ -1101,7 +1094,7 @@ def test_ogr_openfilegdb_10(): backup = fuzz(filename, offset) gdal.ErrorReset() # print(offset) - ds = ogr.Open("tmp/testopenfilegdb_fuzzed.gdb") + ds = ogr.Open(tmp_path / "testopenfilegdb_fuzzed.gdb") error_msg = gdal.GetLastErrorMsg() feat = None if ds is not None: @@ -1128,8 +1121,9 @@ def test_ogr_openfilegdb_10(): print(offsets) for filename in [ - "tmp/testopenfilegdb_fuzzed.gdb/a00000004.gdbindexes", - "tmp/testopenfilegdb_fuzzed.gdb/a00000004.CatItemsByPhysicalName.atx", + tmp_path / "testopenfilegdb_fuzzed.gdb/a00000004.gdbindexes", + tmp_path + / "testopenfilegdb_fuzzed.gdb/a00000004.CatItemsByPhysicalName.atx", ]: errors = set() offsets = [] @@ -1140,7 +1134,7 @@ def test_ogr_openfilegdb_10(): backup = fuzz(filename, offset) gdal.ErrorReset() # print(offset) - ds = ogr.Open("tmp/testopenfilegdb_fuzzed.gdb") + ds = ogr.Open(tmp_path / "testopenfilegdb_fuzzed.gdb") error_msg = gdal.GetLastErrorMsg() feat = None if ds is not None: @@ -1171,7 +1165,7 @@ def test_ogr_openfilegdb_10(): for (filename, offsets) in [ ( - "tmp/testopenfilegdb_fuzzed.gdb/a00000001.gdbtable", + tmp_path / "testopenfilegdb_fuzzed.gdb/a00000001.gdbtable", [ 4, 5, @@ -1202,7 +1196,7 @@ def test_ogr_openfilegdb_10(): ], ), ( - "tmp/testopenfilegdb_fuzzed.gdb/a00000001.gdbtablx", + tmp_path / "testopenfilegdb_fuzzed.gdb/a00000001.gdbtablx", [4, 7, 11, 12, 16, 31, 5136, 5140, 5142, 5144], ), ]: @@ -1210,7 +1204,7 @@ def test_ogr_openfilegdb_10(): backup = fuzz(filename, offset) with gdal.quiet_errors(): gdal.ErrorReset() - ds = ogr.Open("tmp/testopenfilegdb_fuzzed.gdb") + ds = ogr.Open(tmp_path / "testopenfilegdb_fuzzed.gdb") error_msg = gdal.GetLastErrorMsg() feat = None if ds is not None: @@ -1233,7 +1227,7 @@ def test_ogr_openfilegdb_10(): for (filename, offsets) in [ ( - "tmp/testopenfilegdb_fuzzed.gdb/a00000004.gdbindexes", + tmp_path / "testopenfilegdb_fuzzed.gdb/a00000004.gdbindexes", [ 0, 4, @@ -1260,7 +1254,8 @@ def test_ogr_openfilegdb_10(): ], ), ( - "tmp/testopenfilegdb_fuzzed.gdb/a00000004.CatItemsByPhysicalName.atx", + tmp_path + / "testopenfilegdb_fuzzed.gdb/a00000004.CatItemsByPhysicalName.atx", [4, 12, 8196, 8300, 8460, 8620, 8780, 8940, 9100, 12290, 12294, 12298], ), ]: @@ -1269,7 +1264,7 @@ def test_ogr_openfilegdb_10(): backup = fuzz(filename, offset) with gdal.quiet_errors(): gdal.ErrorReset() - ds = ogr.Open("tmp/testopenfilegdb_fuzzed.gdb") + ds = ogr.Open(tmp_path / "testopenfilegdb_fuzzed.gdb") error_msg = gdal.GetLastErrorMsg() feat = None if ds is not None: @@ -1983,13 +1978,9 @@ def test_ogr_openfilegdb_read_domains(): # Test writing field domains -def test_ogr_openfilegdb_write_domains_from_other_gdb(): +def test_ogr_openfilegdb_write_domains_from_other_gdb(tmp_path): - out_dir = "tmp/test_ogr_fgdb_write_domains.gdb" - try: - shutil.rmtree(out_dir) - except OSError: - pass + out_dir = tmp_path / "test_ogr_fgdb_write_domains.gdb" ds = gdal.VectorTranslate( out_dir, "data/filegdb/Domains.gdb", options="-f OpenFileGDB" @@ -2472,28 +2463,28 @@ def test_ogr_openfilegdb_read_relationships(): @pytest.mark.skipif(sys.platform != "linux", reason="Incorrect platform") -def test_ogr_openfilegdb_read_readonly_in_update_mode(): +def test_ogr_openfilegdb_read_readonly_in_update_mode(tmp_path): if os.getuid() == 0: pytest.skip("running as root... skipping") - shutil.copytree("data/filegdb/Domains.gdb", "tmp/testreadonly.gdb") - os.chmod("tmp/testreadonly.gdb", 0o555) - for f in os.listdir("tmp/testreadonly.gdb"): - os.chmod("tmp/testreadonly.gdb/" + f, 0o555) + shutil.copytree("data/filegdb/Domains.gdb", tmp_path / "testreadonly.gdb") + os.chmod(tmp_path / "testreadonly.gdb", 0o555) + for f in os.listdir(tmp_path / "testreadonly.gdb"): + os.chmod(f"{tmp_path}/testreadonly.gdb/{f}", 0o555) try: with pytest.raises(Exception): - ogr.Open("tmp/testreadonly.gdb", update=1) + ogr.Open(tmp_path / "testreadonly.gdb", update=1) - assert ogr.Open("tmp/testreadonly.gdb") + assert ogr.Open(tmp_path / "testreadonly.gdb") # Only turn on a few system tables in read-write mode, but not the # layer of interest - for f in os.listdir("tmp/testreadonly.gdb"): + for f in os.listdir(tmp_path / "testreadonly.gdb"): if f.startswith("a00000001.") or f.startswith("a00000004."): - os.chmod("tmp/testreadonly.gdb/" + f, 0o755) - ds = ogr.Open("tmp/testreadonly.gdb", update=1) + os.chmod(f"{tmp_path}/testreadonly.gdb/{f}", 0o755) + ds = ogr.Open(tmp_path / "testreadonly.gdb", update=1) lyr = ds.GetLayer(0) with pytest.raises( Exception, match="Cannot open Roads in update mode, but only in read-only" @@ -2502,10 +2493,10 @@ def test_ogr_openfilegdb_read_readonly_in_update_mode(): assert lyr.TestCapability(ogr.OLCSequentialWrite) == 0 finally: - os.chmod("tmp/testreadonly.gdb", 0o755) - for f in os.listdir("tmp/testreadonly.gdb"): - os.chmod("tmp/testreadonly.gdb/" + f, 0o755) - shutil.rmtree("tmp/testreadonly.gdb") + os.chmod(tmp_path / "testreadonly.gdb", 0o755) + for f in os.listdir(tmp_path / "testreadonly.gdb"): + os.chmod(f"{tmp_path}/testreadonly.gdb/{f}", 0o755) + shutil.rmtree(tmp_path / "testreadonly.gdb") ############################################################################### @@ -2704,24 +2695,3 @@ def test_ogr_openfilegdb_get_extent_getextent3d(): 0.0, ) ) - - -############################################################################### -# Cleanup - - -def test_ogr_openfilegdb_cleanup(): - - try: - shutil.rmtree("tmp/testopenfilegdb.gdb") - except OSError: - pass - try: - os.remove("tmp/a00000009.gdbtable") - os.remove("tmp/a00000009.gdbtablx") - except OSError: - pass - try: - shutil.rmtree("tmp/testopenfilegdb_fuzzed.gdb") - except OSError: - pass diff --git a/autotest/ogr/ogr_openfilegdb_write.py b/autotest/ogr/ogr_openfilegdb_write.py index f49478e73332..628f4b75a74c 100755 --- a/autotest/ogr/ogr_openfilegdb_write.py +++ b/autotest/ogr/ogr_openfilegdb_write.py @@ -67,11 +67,11 @@ def setup_driver(): ############################################################################### -def test_ogr_openfilegdb_invalid_filename(): +def test_ogr_openfilegdb_invalid_filename(tmp_vsimem): with gdal.quiet_errors(): ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource( - "/vsimem/bad.extension" + tmp_vsimem / "bad.extension" ) assert ds is None @@ -85,9 +85,9 @@ def test_ogr_openfilegdb_invalid_filename(): ############################################################################### -def test_ogr_openfilegdb_write_empty(): +def test_ogr_openfilegdb_write_empty(tmp_vsimem): - dirname = "/vsimem/out.gdb" + dirname = tmp_vsimem / "out.gdb" ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) assert ds is not None ds = None @@ -104,9 +104,9 @@ def test_ogr_openfilegdb_write_empty(): @pytest.mark.parametrize("use_synctodisk", [False, True]) -def test_ogr_openfilegdb_write_field_types(use_synctodisk): +def test_ogr_openfilegdb_write_field_types(tmp_vsimem, use_synctodisk): - dirname = "/vsimem/out.gdb" + dirname = tmp_vsimem / "out.gdb" try: ds = gdal.GetDriverByName("OpenFileGDB").Create( dirname, 0, 0, 0, gdal.GDT_Unknown @@ -568,65 +568,65 @@ def test_ogr_openfilegdb_write_field_types(use_synctodisk): @pytest.mark.parametrize("geom_type,read_geom_type,wkt,expected_wkt", testdata) -def test_ogr_openfilegdb_write_all_geoms(geom_type, read_geom_type, wkt, expected_wkt): +def test_ogr_openfilegdb_write_all_geoms( + tmp_vsimem, geom_type, read_geom_type, wkt, expected_wkt +): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - options = [ - "XORIGIN=-1000", - "YORIGIN=-2000", - "XYSCALE=10000", - "XYTOLERANCE=0.001", - ] - lyr = ds.CreateLayer("test", geom_type=geom_type, options=options) - assert lyr is not None - f = ogr.Feature(lyr.GetLayerDefn()) - if wkt: - ref_geom = ogr.CreateGeometryFromWkt(wkt) - assert ref_geom is not None - else: - ref_geom = None - f.SetGeometry(ref_geom) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - ds = None + dirname = tmp_vsimem / "out.gdb" - ds = ogr.Open(dirname) - assert ds is not None - lyr = ds.GetLayer(0) - assert lyr.GetGeomType() == read_geom_type - f = lyr.GetNextFeature() - got_geom = f.GetGeometryRef() - if ref_geom is None or ref_geom.IsEmpty(): - assert got_geom is None + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + options = [ + "XORIGIN=-1000", + "YORIGIN=-2000", + "XYSCALE=10000", + "XYTOLERANCE=0.001", + ] + lyr = ds.CreateLayer("test", geom_type=geom_type, options=options) + assert lyr is not None + f = ogr.Feature(lyr.GetLayerDefn()) + if wkt: + ref_geom = ogr.CreateGeometryFromWkt(wkt) + assert ref_geom is not None + else: + ref_geom = None + f.SetGeometry(ref_geom) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + ds = None + + ds = ogr.Open(dirname) + assert ds is not None + lyr = ds.GetLayer(0) + assert lyr.GetGeomType() == read_geom_type + f = lyr.GetNextFeature() + got_geom = f.GetGeometryRef() + if ref_geom is None or ref_geom.IsEmpty(): + assert got_geom is None + else: + if expected_wkt: + expected_geom = ogr.CreateGeometryFromWkt(expected_wkt) else: - if expected_wkt: - expected_geom = ogr.CreateGeometryFromWkt(expected_wkt) - else: - expected_geom = ogr.ForceTo(ref_geom, read_geom_type) - ogrtest.check_feature_geometry(got_geom, expected_geom) - - # Test presence of a spatial index - if ( - ref_geom is not None - and not ref_geom.IsEmpty() - and ogr.GT_Flatten(geom_type) != ogr.wkbPoint - and ( - ogr.GT_Flatten(geom_type) != ogr.wkbMultiPoint - or ref_geom.GetPointCount() > 1 - ) - and geom_type != ogr.wkbGeometryCollection25D - ): - assert gdal.VSIStatL(dirname + "/a00000009.spx") is not None - minx, maxx, miny, maxy = ref_geom.GetEnvelope() - lyr.SetSpatialFilterRect(minx, miny, maxx, maxy) - lyr.ResetReading() - assert lyr.GetNextFeature() is not None + expected_geom = ogr.ForceTo(ref_geom, read_geom_type) + ogrtest.check_feature_geometry(got_geom, expected_geom) + + # Test presence of a spatial index + if ( + ref_geom is not None + and not ref_geom.IsEmpty() + and ogr.GT_Flatten(geom_type) != ogr.wkbPoint + and ( + ogr.GT_Flatten(geom_type) != ogr.wkbMultiPoint + or ref_geom.GetPointCount() > 1 + ) + and geom_type != ogr.wkbGeometryCollection25D + ): + assert gdal.VSIStatL(f"{dirname}/a00000009.spx") is not None + minx, maxx, miny, maxy = ref_geom.GetEnvelope() + lyr.SetSpatialFilterRect(minx, miny, maxx, maxy) + lyr.ResetReading() + assert lyr.GetNextFeature() is not None - ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### @@ -641,55 +641,50 @@ def test_ogr_openfilegdb_write_all_geoms(geom_type, read_geom_type, wkt, expecte (ogr.wkbTINZ, "LINESTRING (0 0,1 1)"), ], ) -def test_ogr_openfilegdb_write_bad_geoms(geom_type, wkt): +def test_ogr_openfilegdb_write_bad_geoms(tmp_vsimem, geom_type, wkt): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - lyr = ds.CreateLayer("test", geom_type=geom_type) - assert lyr is not None - f = ogr.Feature(lyr.GetLayerDefn()) - ref_geom = ogr.CreateGeometryFromWkt(wkt) - f.SetGeometry(ref_geom) - with gdal.quiet_errors(): - assert lyr.CreateFeature(f) != ogr.OGRERR_NONE - ds = None - finally: - gdal.RmdirRecursive(dirname) + dirname = tmp_vsimem / "out.gdb" + + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + lyr = ds.CreateLayer("test", geom_type=geom_type) + assert lyr is not None + f = ogr.Feature(lyr.GetLayerDefn()) + ref_geom = ogr.CreateGeometryFromWkt(wkt) + f.SetGeometry(ref_geom) + with gdal.quiet_errors(): + assert lyr.CreateFeature(f) != ogr.OGRERR_NONE + ds = None ############################################################################### -def test_ogr_openfilegdb_write_text_utf16(): +def test_ogr_openfilegdb_write_text_utf16(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer( - "test", geom_type=ogr.wkbNone, options=["CONFIGURATION_KEYWORD=TEXT_UTF16"] - ) - assert lyr is not None - fld_defn = ogr.FieldDefn("str", ogr.OFTString) - fld_defn.SetDefault("'éven'") - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "évenéven") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - ds = None + dirname = tmp_vsimem / "out.gdb" - ds = ogr.Open(dirname) - assert ds is not None - lyr = ds.GetLayer(0) - fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) - assert fld_defn.GetDefault() == "'éven'" - f = lyr.GetNextFeature() - assert f["str"] == "évenéven" - ds = None + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer( + "test", geom_type=ogr.wkbNone, options=["CONFIGURATION_KEYWORD=TEXT_UTF16"] + ) + assert lyr is not None + fld_defn = ogr.FieldDefn("str", ogr.OFTString) + fld_defn.SetDefault("'éven'") + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "évenéven") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname) + assert ds is not None + lyr = ds.GetLayer(0) + fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) + assert fld_defn.GetDefault() == "'éven'" + f = lyr.GetNextFeature() + assert f["str"] == "évenéven" + ds = None ############################################################################### @@ -746,271 +741,259 @@ def read_uint32(f): ], ) @pytest.mark.parametrize("sync", [True, False]) -def test_ogr_openfilegdb_write_create_feature_with_id_set(has_bitmap, ids, sync): +def test_ogr_openfilegdb_write_create_feature_with_id_set( + tmp_vsimem, has_bitmap, ids, sync +): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) - lyr.CreateField(ogr.FieldDefn("int", ogr.OFTInteger)) - for id in ids: - if isinstance(id, tuple): - id, ok = id - else: - ok = True - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(id) - if id < (1 << 31): - f.SetField(0, id) - if ok: - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - else: - with gdal.quiet_errors(): - assert lyr.CreateFeature(f) != ogr.OGRERR_NONE - if sync: - lyr.SyncToDisk() - ds = None + dirname = tmp_vsimem / "out.gdb" - if has_bitmap: - assert gdbtablx_has_bitmap(dirname + "/a00000009.gdbtablx") + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) + lyr.CreateField(ogr.FieldDefn("int", ogr.OFTInteger)) + for id in ids: + if isinstance(id, tuple): + id, ok = id + else: + ok = True + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(id) + if id < (1 << 31): + f.SetField(0, id) + if ok: + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE else: - assert not gdbtablx_has_bitmap(dirname + "/a00000009.gdbtablx") + with gdal.quiet_errors(): + assert lyr.CreateFeature(f) != ogr.OGRERR_NONE + if sync: + lyr.SyncToDisk() + ds = None - # Check that everything has been written correctly - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - ids_only = [] - for id in ids: - if isinstance(id, tuple): - id, ok = id - if ok: - ids_only.append(id) - else: + if has_bitmap: + assert gdbtablx_has_bitmap(f"{dirname}/a00000009.gdbtablx") + else: + assert not gdbtablx_has_bitmap(f"{dirname}/a00000009.gdbtablx") + + # Check that everything has been written correctly + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + ids_only = [] + for id in ids: + if isinstance(id, tuple): + id, ok = id + if ok: ids_only.append(id) - for id in sorted(ids_only): - gdal.ErrorReset() - f = lyr.GetNextFeature() - assert gdal.GetLastErrorMsg() == "" - assert f.GetFID() == id - assert f[0] == id - assert lyr.GetNextFeature() is None - ds = None - finally: - gdal.RmdirRecursive(dirname) + else: + ids_only.append(id) + for id in sorted(ids_only): + gdal.ErrorReset() + f = lyr.GetNextFeature() + assert gdal.GetLastErrorMsg() == "" + assert f.GetFID() == id + assert f[0] == id + assert lyr.GetNextFeature() is None + ds = None ############################################################################### -def test_ogr_openfilegdb_write_delete_feature(): +def test_ogr_openfilegdb_write_delete_feature(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE - assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE - assert lyr.DeleteFeature(1) == ogr.OGRERR_NONE - assert lyr.DeleteFeature(0) == ogr.OGRERR_NON_EXISTING_FEATURE - assert lyr.DeleteFeature(1) == ogr.OGRERR_NON_EXISTING_FEATURE - assert lyr.DeleteFeature(3) == ogr.OGRERR_NON_EXISTING_FEATURE - assert lyr.DeleteFeature(-1) == ogr.OGRERR_NON_EXISTING_FEATURE - ds = None + dirname = tmp_vsimem / "out.gdb" - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - assert lyr.GetFeatureCount() == 1 - f = lyr.GetNextFeature() - assert f.GetFID() == 2 - ds = None + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE + assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE + assert lyr.DeleteFeature(1) == ogr.OGRERR_NONE + assert lyr.DeleteFeature(0) == ogr.OGRERR_NON_EXISTING_FEATURE + assert lyr.DeleteFeature(1) == ogr.OGRERR_NON_EXISTING_FEATURE + assert lyr.DeleteFeature(3) == ogr.OGRERR_NON_EXISTING_FEATURE + assert lyr.DeleteFeature(-1) == ogr.OGRERR_NON_EXISTING_FEATURE + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + assert lyr.GetFeatureCount() == 1 + f = lyr.GetNextFeature() + assert f.GetFID() == 2 + ds = None ############################################################################### -def test_ogr_openfilegdb_write_update_feature(): +def test_ogr_openfilegdb_write_update_feature(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + dirname = tmp_vsimem / "out.gdb" - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "one") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(3) - f.SetField("str", "three") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "one") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(4) - f.SetField("str", "four") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(3) + f.SetField("str", "three") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(0) - assert lyr.SetFeature(f) == ogr.OGRERR_NON_EXISTING_FEATURE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(4) + f.SetField("str", "four") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(5) - assert lyr.SetFeature(f) == ogr.OGRERR_NON_EXISTING_FEATURE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(0) + assert lyr.SetFeature(f) == ogr.OGRERR_NON_EXISTING_FEATURE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(2) - assert lyr.SetFeature(f) == ogr.OGRERR_NON_EXISTING_FEATURE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(5) + assert lyr.SetFeature(f) == ogr.OGRERR_NON_EXISTING_FEATURE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(1) - # rewrite same size - f.SetField("str", "ONE") - assert lyr.SetFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(2) + assert lyr.SetFeature(f) == ogr.OGRERR_NON_EXISTING_FEATURE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(4) - # larger feature - f.SetField("str", "four4") - assert lyr.SetFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(1) + # rewrite same size + f.SetField("str", "ONE") + assert lyr.SetFeature(f) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(3) - # smaller feature - f.SetField("str", "3") - assert lyr.SetFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(4) + # larger feature + f.SetField("str", "four4") + assert lyr.SetFeature(f) == ogr.OGRERR_NONE - ds = None + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(3) + # smaller feature + f.SetField("str", "3") + assert lyr.SetFeature(f) == ogr.OGRERR_NONE - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - assert lyr.GetFeatureCount() == 3 + ds = None - f = lyr.GetNextFeature() - assert f["str"] == "ONE" + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + assert lyr.GetFeatureCount() == 3 - f = lyr.GetNextFeature() - assert f["str"] == "3" + f = lyr.GetNextFeature() + assert f["str"] == "ONE" - f = lyr.GetNextFeature() - assert f["str"] == "four4" - ds = None + f = lyr.GetNextFeature() + assert f["str"] == "3" - finally: - gdal.RmdirRecursive(dirname) + f = lyr.GetNextFeature() + assert f["str"] == "four4" + ds = None ############################################################################### -def test_ogr_openfilegdb_write_add_field_to_non_empty_table(): +def test_ogr_openfilegdb_write_add_field_to_non_empty_table(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + dirname = tmp_vsimem / "out.gdb" - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "one") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "two") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "one") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - fld_defn = ogr.FieldDefn( - "cannot_add_non_nullable_field_without_default_val", ogr.OFTString - ) - fld_defn.SetNullable(False) - with gdal.quiet_errors(): - assert lyr.CreateField(fld_defn) != ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "two") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - # No need to rewrite the file - assert lyr.CreateField(ogr.FieldDefn("str2", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str3", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str4", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str5", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str6", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str7", ogr.OFTString)) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn( + "cannot_add_non_nullable_field_without_default_val", ogr.OFTString + ) + fld_defn.SetNullable(False) + with gdal.quiet_errors(): + assert lyr.CreateField(fld_defn) != ogr.OGRERR_NONE - assert lyr.SyncToDisk() == ogr.OGRERR_NONE + # No need to rewrite the file + assert lyr.CreateField(ogr.FieldDefn("str2", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str3", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str4", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str5", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str6", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str7", ogr.OFTString)) == ogr.OGRERR_NONE - ds = None + assert lyr.SyncToDisk() == ogr.OGRERR_NONE - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - f = lyr.GetNextFeature() - assert f["str"] == "one" - assert f["str2"] is None - assert f["str7"] is None - f = lyr.GetNextFeature() - assert f["str"] == "two" - assert f["str2"] is None - assert f["str7"] is None - ds = None + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + f = lyr.GetNextFeature() + assert f["str"] == "one" + assert f["str2"] is None + assert f["str7"] is None + f = lyr.GetNextFeature() + assert f["str"] == "two" + assert f["str2"] is None + assert f["str7"] is None + ds = None ############################################################################### -def test_ogr_openfilegdb_write_add_field_to_non_empty_table_extra_nullable(): +def test_ogr_openfilegdb_write_add_field_to_non_empty_table_extra_nullable(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + dirname = tmp_vsimem / "out.gdb" - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "one") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "two") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "one") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - assert lyr.CreateField(ogr.FieldDefn("str2", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str3", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str4", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str5", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str6", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str7", ogr.OFTString)) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "two") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - # Will trigger a table rewrite - assert lyr.CreateField(ogr.FieldDefn("str8", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str2", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str3", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str4", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str5", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str6", ogr.OFTString)) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str7", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.SyncToDisk() == ogr.OGRERR_NONE + # Will trigger a table rewrite + assert lyr.CreateField(ogr.FieldDefn("str8", ogr.OFTString)) == ogr.OGRERR_NONE - ds = None + assert lyr.SyncToDisk() == ogr.OGRERR_NONE - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - f = lyr.GetNextFeature() - assert f["str"] == "one" - assert f["str2"] is None - assert f["str7"] is None - assert f["str8"] is None - f = lyr.GetNextFeature() - assert f["str"] == "two" - assert f["str2"] is None - assert f["str7"] is None - assert f["str8"] is None - ds = None + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + f = lyr.GetNextFeature() + assert f["str"] == "one" + assert f["str2"] is None + assert f["str7"] is None + assert f["str8"] is None + f = lyr.GetNextFeature() + assert f["str"] == "two" + assert f["str2"] is None + assert f["str7"] is None + assert f["str8"] is None + ds = None ############################################################################### @@ -1026,187 +1009,129 @@ def test_ogr_openfilegdb_write_add_field_to_non_empty_table_extra_nullable(): @pytest.mark.parametrize("options", modify_inplace_options) -@pytest.mark.parametrize( - "dirname", - ["/vsimem/out.gdb", "tmp/add_field_to_non_empty_table_extra_non_nullable.gdb"], -) +@pytest.mark.parametrize("location", ["vsimem", "disk"]) def test_ogr_openfilegdb_write_add_field_to_non_empty_table_extra_non_nullable( - options, dirname + options, location, tmp_path, tmp_vsimem ): - with gdaltest.config_options(options): - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + if location == "vsimem": + dirname = tmp_vsimem / "out.gdb" + else: + dirname = tmp_path / "out.gdb" - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "one") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + with gdaltest.config_options(options): + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "two") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "one") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - fld_defn = ogr.FieldDefn("str2", ogr.OFTString) - fld_defn.SetNullable(False) - fld_defn.SetDefault("'default val'") - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "two") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - fld_defn = ogr.FieldDefn("int16", ogr.OFTInteger) - fld_defn.SetSubType(ogr.OFSTInt16) - fld_defn.SetNullable(False) - fld_defn.SetDefault("-32768") - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("str2", ogr.OFTString) + fld_defn.SetNullable(False) + fld_defn.SetDefault("'default val'") + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - fld_defn = ogr.FieldDefn("int32", ogr.OFTInteger) - fld_defn.SetNullable(False) - fld_defn.SetDefault("123456789") - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("int16", ogr.OFTInteger) + fld_defn.SetSubType(ogr.OFSTInt16) + fld_defn.SetNullable(False) + fld_defn.SetDefault("-32768") + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - fld_defn = ogr.FieldDefn("float32", ogr.OFTReal) - fld_defn.SetSubType(ogr.OFSTFloat32) - fld_defn.SetNullable(False) - fld_defn.SetDefault("1.25") - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("int32", ogr.OFTInteger) + fld_defn.SetNullable(False) + fld_defn.SetDefault("123456789") + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - fld_defn = ogr.FieldDefn("float64", ogr.OFTReal) - fld_defn.SetNullable(False) - fld_defn.SetDefault("1.23456789") - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("float32", ogr.OFTReal) + fld_defn.SetSubType(ogr.OFSTFloat32) + fld_defn.SetNullable(False) + fld_defn.SetDefault("1.25") + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - fld_defn = ogr.FieldDefn("dt", ogr.OFTDateTime) - fld_defn.SetNullable(False) - fld_defn.SetDefault("'2022-11-04T12:34:56+02:00'") - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("float64", ogr.OFTReal) + fld_defn.SetNullable(False) + fld_defn.SetDefault("1.23456789") + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - fld_defn = ogr.FieldDefn("dt_invalid_default", ogr.OFTDateTime) - fld_defn.SetDefault("'foo'") - with gdal.quiet_errors(): - assert lyr.CreateField(fld_defn, False) == ogr.OGRERR_FAILURE - assert gdal.GetLastErrorMsg() == "Cannot parse foo as a date time" + fld_defn = ogr.FieldDefn("dt", ogr.OFTDateTime) + fld_defn.SetNullable(False) + fld_defn.SetDefault("'2022-11-04T12:34:56+02:00'") + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - fld_defn = ogr.FieldDefn("dt_CURRENT_TIMESTAMP", ogr.OFTDateTime) - fld_defn.SetDefault("CURRENT_TIMESTAMP") - with gdal.quiet_errors(): - assert lyr.CreateField(fld_defn, False) == ogr.OGRERR_FAILURE - assert ( - gdal.GetLastErrorMsg() - == "CURRENT_TIMESTAMP is not supported as a default value in File Geodatabase" - ) - - fld_defn = ogr.FieldDefn("dt_CURRENT_TIMESTAMP_2", ogr.OFTDateTime) - fld_defn.SetDefault("CURRENT_TIMESTAMP") - with gdal.quiet_errors(): - assert lyr.CreateField(fld_defn, True) == ogr.OGRERR_NONE - assert ( - gdal.GetLastErrorMsg() - == "CURRENT_TIMESTAMP is not supported as a default value in File Geodatabase" - ) + fld_defn = ogr.FieldDefn("dt_invalid_default", ogr.OFTDateTime) + fld_defn.SetDefault("'foo'") + with gdal.quiet_errors(): + assert lyr.CreateField(fld_defn, False) == ogr.OGRERR_FAILURE + assert gdal.GetLastErrorMsg() == "Cannot parse foo as a date time" - assert lyr.SyncToDisk() == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("dt_CURRENT_TIMESTAMP", ogr.OFTDateTime) + fld_defn.SetDefault("CURRENT_TIMESTAMP") + with gdal.quiet_errors(): + assert lyr.CreateField(fld_defn, False) == ogr.OGRERR_FAILURE + assert ( + gdal.GetLastErrorMsg() + == "CURRENT_TIMESTAMP is not supported as a default value in File Geodatabase" + ) - ds = None + fld_defn = ogr.FieldDefn("dt_CURRENT_TIMESTAMP_2", ogr.OFTDateTime) + fld_defn.SetDefault("CURRENT_TIMESTAMP") + with gdal.quiet_errors(): + assert lyr.CreateField(fld_defn, True) == ogr.OGRERR_NONE + assert ( + gdal.GetLastErrorMsg() + == "CURRENT_TIMESTAMP is not supported as a default value in File Geodatabase" + ) - assert gdal.VSIStatL(dirname + "/a00000009.gdbtable.backup") is None - assert gdal.VSIStatL(dirname + "/a00000009.gdbtablx.backup") is None - assert gdal.VSIStatL(dirname + "/a00000009.gdbtable.compress") is None - assert gdal.VSIStatL(dirname + "/a00000009.gdbtablx.compress") is None + assert lyr.SyncToDisk() == ogr.OGRERR_NONE - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - f = lyr.GetNextFeature() - assert f["str"] == "one" - assert f["str2"] == "default val" - assert f["int16"] == -32768 - assert f["int32"] == 123456789 - assert f["float32"] == 1.25 - assert f["float64"] == 1.23456789 - assert f["dt"] == "2022/11/04 10:34:56+00" - assert f.IsFieldNull("dt_CURRENT_TIMESTAMP_2") - f = lyr.GetNextFeature() - assert f["str"] == "two" - assert f["str2"] == "default val" - ds = None + ds = None + + assert gdal.VSIStatL(dirname / "a00000009.gdbtable.backup") is None + assert gdal.VSIStatL(dirname / "a00000009.gdbtablx.backup") is None + assert gdal.VSIStatL(dirname / "a00000009.gdbtable.compress") is None + assert gdal.VSIStatL(dirname / "a00000009.gdbtablx.compress") is None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + f = lyr.GetNextFeature() + assert f["str"] == "one" + assert f["str2"] == "default val" + assert f["int16"] == -32768 + assert f["int32"] == 123456789 + assert f["float32"] == 1.25 + assert f["float64"] == 1.23456789 + assert f["dt"] == "2022/11/04 10:34:56+00" + assert f.IsFieldNull("dt_CURRENT_TIMESTAMP_2") + f = lyr.GetNextFeature() + assert f["str"] == "two" + assert f["str2"] == "default val" + ds = None ############################################################################### @pytest.mark.parametrize("options", modify_inplace_options) -@pytest.mark.parametrize( - "dirname", - [ - "/vsimem/out.gdb", - "tmp/add_field_to_non_empty_table_extra_non_nullable_simul_error.gdb", - ], -) +@pytest.mark.parametrize("location", ["vsimem", "disk"]) def test_ogr_openfilegdb_write_add_field_to_non_empty_table_extra_non_nullable_simul_error( - options, dirname + location, options, tmp_path, tmp_vsimem ): - with gdaltest.config_options(options): - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "one") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None - - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "two") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None - - fld_defn = ogr.FieldDefn("str2", ogr.OFTString) - fld_defn.SetNullable(False) - fld_defn.SetDefault("'default val'") - with gdal.quiet_errors(): - with gdaltest.config_option( - "OPENFILEGDB_SIMUL_ERROR_IN_RewriteTableToAddLastAddedField", "TRUE" - ): - assert lyr.CreateField(fld_defn) != ogr.OGRERR_NONE - - ds = None - - assert gdal.VSIStatL(dirname + "/a00000009.gdbtable.backup") is None - assert gdal.VSIStatL(dirname + "/a00000009.gdbtablx.backup") is None - assert gdal.VSIStatL(dirname + "/a00000009.gdbtable.compress") is None - assert gdal.VSIStatL(dirname + "/a00000009.gdbtablx.compress") is None - - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - assert lyr.GetLayerDefn().GetFieldCount() == 1 - f = lyr.GetNextFeature() - assert f["str"] == "one" - f = lyr.GetNextFeature() - assert f["str"] == "two" - ds = None - - finally: - gdal.RmdirRecursive(dirname) - - -############################################################################### - + if location == "vsimem": + dirname = tmp_vsimem / "out.gdb" + else: + dirname = tmp_path / "out.gdb" -def test_ogr_openfilegdb_write_add_field_after_reopening(): - - dirname = "/vsimem/out.gdb" - try: + with gdaltest.config_options(options): ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - ds = None - - ds = ogr.Open(dirname, update=1) lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) @@ -1220,36 +1145,84 @@ def test_ogr_openfilegdb_write_add_field_after_reopening(): assert lyr.CreateFeature(f) == ogr.OGRERR_NONE f = None - ds = None + fld_defn = ogr.FieldDefn("str2", ogr.OFTString) + fld_defn.SetNullable(False) + fld_defn.SetDefault("'default val'") + with gdal.quiet_errors(): + with gdaltest.config_option( + "OPENFILEGDB_SIMUL_ERROR_IN_RewriteTableToAddLastAddedField", "TRUE" + ): + assert lyr.CreateField(fld_defn) != ogr.OGRERR_NONE - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) - assert lyr.CreateField(ogr.FieldDefn("str2", ogr.OFTString)) == ogr.OGRERR_NONE ds = None + assert gdal.VSIStatL(dirname / "a00000009.gdbtable.backup") is None + assert gdal.VSIStatL(dirname / "a00000009.gdbtablx.backup") is None + assert gdal.VSIStatL(dirname / "a00000009.gdbtable.compress") is None + assert gdal.VSIStatL(dirname / "a00000009.gdbtablx.compress") is None + ds = ogr.Open(dirname) lyr = ds.GetLayer(0) - assert lyr.GetLayerDefn().GetFieldCount() == 2 + assert lyr.GetLayerDefn().GetFieldCount() == 1 f = lyr.GetNextFeature() assert f["str"] == "one" - assert f["str2"] is None f = lyr.GetNextFeature() assert f["str"] == "two" + ds = None - sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") - assert sql_lyr - f = sql_lyr.GetNextFeature() - xml = f.GetField(0) - f = None - ds.ReleaseResultSet(sql_lyr) - assert "<Name>str</Name>" in xml - assert "<Name>str2</Name>" in xml +############################################################################### - ds = None - finally: - gdal.RmdirRecursive(dirname) +def test_ogr_openfilegdb_write_add_field_after_reopening(tmp_vsimem): + + dirname = tmp_vsimem / "out.gdb" + + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + ds = None + + ds = ogr.Open(dirname, update=1) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "one") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None + + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "two") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None + + ds = None + + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + assert lyr.CreateField(ogr.FieldDefn("str2", ogr.OFTString)) == ogr.OGRERR_NONE + ds = None + + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + assert lyr.GetLayerDefn().GetFieldCount() == 2 + f = lyr.GetNextFeature() + assert f["str"] == "one" + assert f["str2"] is None + f = lyr.GetNextFeature() + assert f["str"] == "two" + + sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") + assert sql_lyr + f = sql_lyr.GetNextFeature() + xml = f.GetField(0) + f = None + ds.ReleaseResultSet(sql_lyr) + + assert "<Name>str</Name>" in xml + assert "<Name>str2</Name>" in xml + + ds = None ############################################################################### @@ -1257,246 +1230,234 @@ def test_ogr_openfilegdb_write_add_field_after_reopening(): @pytest.mark.parametrize("use_synctodisk", [False, True]) @pytest.mark.parametrize("field_to_delete", [0, 1]) -def test_ogr_openfilegdb_write_delete_field(use_synctodisk, field_to_delete): +def test_ogr_openfilegdb_write_delete_field( + tmp_vsimem, use_synctodisk, field_to_delete +): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + dirname = tmp_vsimem / "out.gdb" - assert lyr.CreateField(ogr.FieldDefn("str1", ogr.OFTString)) == ogr.OGRERR_NONE + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - fld_defn = ogr.FieldDefn("str2", ogr.OFTString) - fld_defn.SetNullable(False) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + assert lyr.CreateField(ogr.FieldDefn("str1", ogr.OFTString)) == ogr.OGRERR_NONE - assert lyr.CreateField(ogr.FieldDefn("str3", ogr.OFTString)) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("str2", ogr.OFTString) + fld_defn.SetNullable(False) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str1", "str1_1") - f.SetField("str2", "str2_1") - f.SetField("str3", "str3_1") - f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(1 2)")) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + assert lyr.CreateField(ogr.FieldDefn("str3", ogr.OFTString)) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str1", "str1_2") - f.SetField("str2", "str2_2") - f.SetField("str3", "str3_2") - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str1", "str1_1") + f.SetField("str2", "str2_1") + f.SetField("str3", "str3_1") + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(1 2)")) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - if use_synctodisk: - assert lyr.SyncToDisk() == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str1", "str1_2") + f.SetField("str2", "str2_2") + f.SetField("str3", "str3_2") + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - assert lyr.DeleteField(field_to_delete) == ogr.OGRERR_NONE - assert lyr.GetLayerDefn().GetFieldCount() == 2 + if use_synctodisk: + assert lyr.SyncToDisk() == ogr.OGRERR_NONE - if field_to_delete == 0: - other_field = "str2" - else: - other_field = "str1" + assert lyr.DeleteField(field_to_delete) == ogr.OGRERR_NONE + assert lyr.GetLayerDefn().GetFieldCount() == 2 - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField(other_field, "str2_3") - f.SetField("str3", "str3_3") - f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(2 3)")) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + if field_to_delete == 0: + other_field = "str2" + else: + other_field = "str1" - def check_values(lyr): - f = lyr.GetNextFeature() - assert f[other_field].endswith("_1") - assert f["str3"] == "str3_1" - assert f.GetGeometryRef() is not None - f = None + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField(other_field, "str2_3") + f.SetField("str3", "str3_3") + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(2 3)")) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - f = lyr.GetNextFeature() - assert f[other_field].endswith("_2") - assert f["str3"] == "str3_2" - assert f.GetGeometryRef() is None - f = None + def check_values(lyr): + f = lyr.GetNextFeature() + assert f[other_field].endswith("_1") + assert f["str3"] == "str3_1" + assert f.GetGeometryRef() is not None + f = None - f = lyr.GetNextFeature() - assert f[other_field].endswith("_3") - assert f["str3"] == "str3_3" - assert f.GetGeometryRef() is not None + f = lyr.GetNextFeature() + assert f[other_field].endswith("_2") + assert f["str3"] == "str3_2" + assert f.GetGeometryRef() is None + f = None - check_values(lyr) + f = lyr.GetNextFeature() + assert f[other_field].endswith("_3") + assert f["str3"] == "str3_3" + assert f.GetGeometryRef() is not None - assert lyr.SyncToDisk() == ogr.OGRERR_NONE + check_values(lyr) - lyr.ResetReading() - check_values(lyr) + assert lyr.SyncToDisk() == ogr.OGRERR_NONE - ds = None + lyr.ResetReading() + check_values(lyr) - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) + ds = None - check_values(lyr) + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) - sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") - assert sql_lyr - f = sql_lyr.GetNextFeature() - xml = f.GetField(0) - f = None - ds.ReleaseResultSet(sql_lyr) + check_values(lyr) - if field_to_delete == 0: - assert "<Name>str1</Name>" not in xml - assert "<Name>str2</Name>" in xml - else: - assert "<Name>str1</Name>" in xml - assert "<Name>str2</Name>" not in xml - assert "<Name>str3</Name>" in xml + sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") + assert sql_lyr + f = sql_lyr.GetNextFeature() + xml = f.GetField(0) + f = None + ds.ReleaseResultSet(sql_lyr) - ds = None + if field_to_delete == 0: + assert "<Name>str1</Name>" not in xml + assert "<Name>str2</Name>" in xml + else: + assert "<Name>str1</Name>" in xml + assert "<Name>str2</Name>" not in xml + assert "<Name>str3</Name>" in xml - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### -def test_ogr_openfilegdb_write_delete_field_before_geom(): +def test_ogr_openfilegdb_write_delete_field_before_geom(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + dirname = tmp_vsimem / "out.gdb" - with gdaltest.config_option("OPENFILEGDB_CREATE_FIELD_BEFORE_GEOMETRY", "YES"): - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - - assert lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) == ogr.OGRERR_NONE + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("field_before_geom", "to be deleted") - f.SetField("str", "foo") - f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(1 2)")) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + with gdaltest.config_option("OPENFILEGDB_CREATE_FIELD_BEFORE_GEOMETRY", "YES"): + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - assert ( - lyr.DeleteField(lyr.GetLayerDefn().GetFieldIndex("field_before_geom")) - == ogr.OGRERR_NONE - ) + assert lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) == ogr.OGRERR_NONE - lyr.ResetReading() - f = lyr.GetNextFeature() - assert f.GetField("str") == "foo" - assert f.GetGeometryRef() is not None + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("field_before_geom", "to be deleted") + f.SetField("str", "foo") + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(1 2)")) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None + + assert ( + lyr.DeleteField(lyr.GetLayerDefn().GetFieldIndex("field_before_geom")) + == ogr.OGRERR_NONE + ) - ds = None + lyr.ResetReading() + f = lyr.GetNextFeature() + assert f.GetField("str") == "foo" + assert f.GetGeometryRef() is not None - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### -def test_ogr_openfilegdb_write_feature_dataset_no_crs(): +def test_ogr_openfilegdb_write_feature_dataset_no_crs(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - ds = ogr.Open(dirname, update=1) - lyr = ds.CreateLayer( - "test", - geom_type=ogr.wkbPoint, - options=["FEATURE_DATASET=my_feature_dataset"], - ) - assert lyr is not None - lyr = ds.CreateLayer( - "test2", - geom_type=ogr.wkbPoint, - options=["FEATURE_DATASET=my_feature_dataset"], - ) - assert lyr is not None - ds = None + dirname = tmp_vsimem / "out.gdb" - ds = gdal.OpenEx(dirname) - rg = ds.GetRootGroup() + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + ds = ogr.Open(dirname, update=1) + lyr = ds.CreateLayer( + "test", + geom_type=ogr.wkbPoint, + options=["FEATURE_DATASET=my_feature_dataset"], + ) + assert lyr is not None + lyr = ds.CreateLayer( + "test2", + geom_type=ogr.wkbPoint, + options=["FEATURE_DATASET=my_feature_dataset"], + ) + assert lyr is not None + ds = None - assert rg.GetGroupNames() == ["my_feature_dataset"] + ds = gdal.OpenEx(dirname) + rg = ds.GetRootGroup() - fd = rg.OpenGroup("my_feature_dataset") - assert fd is not None - assert fd.GetVectorLayerNames() == ["test", "test2"] + assert rg.GetGroupNames() == ["my_feature_dataset"] - lyr = ds.GetLayerByName("GDB_Items") - assert ( - lyr.GetFeatureCount() == 5 - ) # == root, workspace, feature dataset, 2 layers + fd = rg.OpenGroup("my_feature_dataset") + assert fd is not None + assert fd.GetVectorLayerNames() == ["test", "test2"] - lyr = ds.GetLayerByName("GDB_ItemRelationships") - assert lyr.GetFeatureCount() == 3 # == feature dataset, 2 layers + lyr = ds.GetLayerByName("GDB_Items") + assert lyr.GetFeatureCount() == 5 # == root, workspace, feature dataset, 2 layers - finally: - gdal.RmdirRecursive(dirname) + lyr = ds.GetLayerByName("GDB_ItemRelationships") + assert lyr.GetFeatureCount() == 3 # == feature dataset, 2 layers ############################################################################### -def test_ogr_openfilegdb_write_feature_dataset_crs(): +def test_ogr_openfilegdb_write_feature_dataset_crs(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + dirname = tmp_vsimem / "out.gdb" - srs = osr.SpatialReference() - srs.ImportFromEPSG(4326) + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer( - "test", - geom_type=ogr.wkbPoint, - srs=srs, - options=["FEATURE_DATASET=my_feature_dataset"], - ) - assert lyr is not None + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) - lyr = ds.CreateLayer( - "test2", - geom_type=ogr.wkbPoint, - srs=srs, - options=["FEATURE_DATASET=my_feature_dataset"], - ) - assert lyr is not None + lyr = ds.CreateLayer( + "test", + geom_type=ogr.wkbPoint, + srs=srs, + options=["FEATURE_DATASET=my_feature_dataset"], + ) + assert lyr is not None + + lyr = ds.CreateLayer( + "test2", + geom_type=ogr.wkbPoint, + srs=srs, + options=["FEATURE_DATASET=my_feature_dataset"], + ) + assert lyr is not None + + lyr = ds.CreateLayer( + "inherited_srs", + geom_type=ogr.wkbPoint, + options=["FEATURE_DATASET=my_feature_dataset"], + ) + assert lyr is not None + + other_srs = osr.SpatialReference() + other_srs.ImportFromEPSG(4269) + with gdal.quiet_errors(): lyr = ds.CreateLayer( - "inherited_srs", + "other_srs", geom_type=ogr.wkbPoint, + srs=other_srs, options=["FEATURE_DATASET=my_feature_dataset"], ) - assert lyr is not None - - other_srs = osr.SpatialReference() - other_srs.ImportFromEPSG(4269) - - with gdal.quiet_errors(): - lyr = ds.CreateLayer( - "other_srs", - geom_type=ogr.wkbPoint, - srs=other_srs, - options=["FEATURE_DATASET=my_feature_dataset"], - ) - assert lyr is None - - ds = None + assert lyr is None - ds = gdal.OpenEx(dirname) - lyr = ds.GetLayerByName("inherited_srs") - srs = lyr.GetSpatialRef() - assert srs is not None - assert srs.GetAuthorityCode(None) == "4326" + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = gdal.OpenEx(dirname) + lyr = ds.GetLayerByName("inherited_srs") + srs = lyr.GetSpatialRef() + assert srs is not None + assert srs.GetAuthorityCode(None) == "4326" ############################################################################### @@ -1531,344 +1492,334 @@ def test_ogr_openfilegdb_write_feature_dataset_crs(): # (340*341, None), # depth 2 # a bit too slow for unit tests ], ) -def test_ogr_openfilegdb_write_spatial_index(numPoints, maxFeaturesPerSpxPage): +def test_ogr_openfilegdb_write_spatial_index( + tmp_vsimem, numPoints, maxFeaturesPerSpxPage +): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("points", geom_type=ogr.wkbPoint) - for j in range(numPoints): - feat = ogr.Feature(lyr.GetLayerDefn()) - geom = ogr.CreateGeometryFromWkt("POINT(%d %d)" % (j, j)) - feat.SetGeometry(geom) - lyr.CreateFeature(feat) - with gdaltest.config_option( - "OPENFILEGDB_MAX_FEATURES_PER_SPX_PAGE", - str(maxFeaturesPerSpxPage) if maxFeaturesPerSpxPage else None, - ): - if maxFeaturesPerSpxPage == 2 and numPoints > 30: - with gdal.quiet_errors(): - gdal.ErrorReset() - lyr.SyncToDisk() - assert gdal.GetLastErrorMsg() != "" - else: - gdal.ErrorReset() - lyr.SyncToDisk() - assert gdal.GetLastErrorMsg() == "" - ds = None + dirname = tmp_vsimem / "out.gdb" - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - if numPoints > 1000: - j = 0 - lyr.SetSpatialFilterRect(j - 0.1, j - 0.1, j + 0.1, j + 0.1) - lyr.ResetReading() - f = lyr.GetNextFeature() - assert f is not None + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("points", geom_type=ogr.wkbPoint) + for j in range(numPoints): + feat = ogr.Feature(lyr.GetLayerDefn()) + geom = ogr.CreateGeometryFromWkt("POINT(%d %d)" % (j, j)) + feat.SetGeometry(geom) + lyr.CreateFeature(feat) + with gdaltest.config_option( + "OPENFILEGDB_MAX_FEATURES_PER_SPX_PAGE", + str(maxFeaturesPerSpxPage) if maxFeaturesPerSpxPage else None, + ): + if maxFeaturesPerSpxPage == 2 and numPoints > 30: + with gdal.quiet_errors(): + gdal.ErrorReset() + lyr.SyncToDisk() + assert gdal.GetLastErrorMsg() != "" + else: + gdal.ErrorReset() + lyr.SyncToDisk() + assert gdal.GetLastErrorMsg() == "" + ds = None - j = numPoints - 1 + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + if numPoints > 1000: + j = 0 + lyr.SetSpatialFilterRect(j - 0.1, j - 0.1, j + 0.1, j + 0.1) + lyr.ResetReading() + f = lyr.GetNextFeature() + assert f is not None + + j = numPoints - 1 + lyr.SetSpatialFilterRect(j - 0.1, j - 0.1, j + 0.1, j + 0.1) + lyr.ResetReading() + f = lyr.GetNextFeature() + assert f is not None + else: + for j in range(numPoints): lyr.SetSpatialFilterRect(j - 0.1, j - 0.1, j + 0.1, j + 0.1) lyr.ResetReading() f = lyr.GetNextFeature() - assert f is not None - else: - for j in range(numPoints): - lyr.SetSpatialFilterRect(j - 0.1, j - 0.1, j + 0.1, j + 0.1) - lyr.ResetReading() - f = lyr.GetNextFeature() - assert f is not None, j - ds = None - - finally: - gdal.RmdirRecursive(dirname) + assert f is not None, j + ds = None ############################################################################### -def test_ogr_openfilegdb_write_attribute_index(): +def test_ogr_openfilegdb_write_attribute_index(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - fld_defn = ogr.FieldDefn("int16", ogr.OFTInteger) - fld_defn.SetSubType(ogr.OFSTInt16) - lyr.CreateField(fld_defn) - fld_defn = ogr.FieldDefn("int32", ogr.OFTInteger) - lyr.CreateField(fld_defn) - fld_defn = ogr.FieldDefn("float32", ogr.OFTReal) - fld_defn.SetSubType(ogr.OFSTFloat32) - lyr.CreateField(fld_defn) - fld_defn = ogr.FieldDefn("float64", ogr.OFTReal) - lyr.CreateField(fld_defn) - fld_defn = ogr.FieldDefn("str", ogr.OFTString) - lyr.CreateField(fld_defn) - fld_defn = ogr.FieldDefn("lower_str", ogr.OFTString) - lyr.CreateField(fld_defn) - fld_defn = ogr.FieldDefn("dt", ogr.OFTDateTime) - lyr.CreateField(fld_defn) + dirname = tmp_vsimem / "out.gdb" - f = ogr.Feature(lyr.GetLayerDefn()) - f["int16"] = -1234 - f["int32"] = -12346789 - f["float32"] = 1.25 - f["float64"] = 1.256789 - f["str"] = "my str" - f["lower_str"] = "MY STR" - f["dt"] = "2022-06-03T16:06:00Z" - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + fld_defn = ogr.FieldDefn("int16", ogr.OFTInteger) + fld_defn.SetSubType(ogr.OFSTInt16) + lyr.CreateField(fld_defn) + fld_defn = ogr.FieldDefn("int32", ogr.OFTInteger) + lyr.CreateField(fld_defn) + fld_defn = ogr.FieldDefn("float32", ogr.OFTReal) + fld_defn.SetSubType(ogr.OFSTFloat32) + lyr.CreateField(fld_defn) + fld_defn = ogr.FieldDefn("float64", ogr.OFTReal) + lyr.CreateField(fld_defn) + fld_defn = ogr.FieldDefn("str", ogr.OFTString) + lyr.CreateField(fld_defn) + fld_defn = ogr.FieldDefn("lower_str", ogr.OFTString) + lyr.CreateField(fld_defn) + fld_defn = ogr.FieldDefn("dt", ogr.OFTDateTime) + lyr.CreateField(fld_defn) - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * 100 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f["int16"] = -1234 + f["int32"] = -12346789 + f["float32"] = 1.25 + f["float64"] = 1.256789 + f["str"] = "my str" + f["lower_str"] = "MY STR" + f["dt"] = "2022-06-03T16:06:00Z" + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = ("x" * 100) + "y" - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * 100 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - # Errors of index creation - with gdal.quiet_errors(): - gdal.ErrorReset() - ds.ExecuteSQL("CREATE INDEX this_name_is_wayyyyy_tooo_long ON test(int16)") - assert gdal.GetLastErrorMsg() != "" + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = ("x" * 100) + "y" + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - gdal.ErrorReset() - ds.ExecuteSQL("CREATE INDEX idx_int16 ON non_existing_layer(int16)") - assert gdal.GetLastErrorMsg() != "" + # Errors of index creation + with gdal.quiet_errors(): + gdal.ErrorReset() + ds.ExecuteSQL("CREATE INDEX this_name_is_wayyyyy_tooo_long ON test(int16)") + assert gdal.GetLastErrorMsg() != "" - gdal.ErrorReset() - ds.ExecuteSQL("CREATE INDEX invalid_field ON test(invalid_field)") - assert gdal.GetLastErrorMsg() != "" + gdal.ErrorReset() + ds.ExecuteSQL("CREATE INDEX idx_int16 ON non_existing_layer(int16)") + assert gdal.GetLastErrorMsg() != "" - # Reserved keyword - gdal.ErrorReset() - ds.ExecuteSQL("CREATE INDEX SELECT ON test(int16)") - assert gdal.GetLastErrorMsg() != "" + gdal.ErrorReset() + ds.ExecuteSQL("CREATE INDEX invalid_field ON test(invalid_field)") + assert gdal.GetLastErrorMsg() != "" - gdal.ErrorReset() - ds.ExecuteSQL("CREATE INDEX _starting_by_ ON test(int16)") - assert gdal.GetLastErrorMsg() != "" + # Reserved keyword + gdal.ErrorReset() + ds.ExecuteSQL("CREATE INDEX SELECT ON test(int16)") + assert gdal.GetLastErrorMsg() != "" - gdal.ErrorReset() - ds.ExecuteSQL("CREATE INDEX a&b ON test(int16)") - assert gdal.GetLastErrorMsg() != "" + gdal.ErrorReset() + ds.ExecuteSQL("CREATE INDEX _starting_by_ ON test(int16)") + assert gdal.GetLastErrorMsg() != "" - # Create indexes gdal.ErrorReset() - for i in range(lyr.GetLayerDefn().GetFieldCount()): - fld_name = lyr.GetLayerDefn().GetFieldDefn(i).GetName() - if fld_name == "lower_str": - ds.ExecuteSQL( - "CREATE INDEX idx_%s ON test(LOWER(%s))" % (fld_name, fld_name) - ) - else: - ds.ExecuteSQL("CREATE INDEX idx_%s ON test(%s)" % (fld_name, fld_name)) - assert gdal.GetLastErrorMsg() == "" - assert ( - gdal.VSIStatL(dirname + "/a00000009.idx_" + fld_name + ".atx") - is not None + ds.ExecuteSQL("CREATE INDEX a&b ON test(int16)") + assert gdal.GetLastErrorMsg() != "" + + # Create indexes + gdal.ErrorReset() + for i in range(lyr.GetLayerDefn().GetFieldCount()): + fld_name = lyr.GetLayerDefn().GetFieldDefn(i).GetName() + if fld_name == "lower_str": + ds.ExecuteSQL( + "CREATE INDEX idx_%s ON test(LOWER(%s))" % (fld_name, fld_name) ) + else: + ds.ExecuteSQL("CREATE INDEX idx_%s ON test(%s)" % (fld_name, fld_name)) + assert gdal.GetLastErrorMsg() == "" + assert gdal.VSIStatL(dirname / f"a00000009.idx_{fld_name}.atx") is not None - fld_defn = ogr.FieldDefn("unindexed", ogr.OFTString) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("unindexed", ogr.OFTString) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - with gdal.quiet_errors(): - # Re-using an index name - gdal.ErrorReset() - ds.ExecuteSQL("CREATE INDEX idx_int16 ON test(unindexed)") - assert gdal.GetLastErrorMsg() != "" + with gdal.quiet_errors(): + # Re-using an index name + gdal.ErrorReset() + ds.ExecuteSQL("CREATE INDEX idx_int16 ON test(unindexed)") + assert gdal.GetLastErrorMsg() != "" - # Trying to index twice a field - gdal.ErrorReset() - ds.ExecuteSQL("CREATE INDEX int16_again ON test(int16)") - assert gdal.GetLastErrorMsg() != "" + # Trying to index twice a field + gdal.ErrorReset() + ds.ExecuteSQL("CREATE INDEX int16_again ON test(int16)") + assert gdal.GetLastErrorMsg() != "" - gdal.ErrorReset() - ds.ExecuteSQL("CREATE INDEX lower_str_again ON test(lower_str)") - assert gdal.GetLastErrorMsg() != "" + gdal.ErrorReset() + ds.ExecuteSQL("CREATE INDEX lower_str_again ON test(lower_str)") + assert gdal.GetLastErrorMsg() != "" - ds = None + ds = None - def check_index_fully_used(ds, lyr): - sql_lyr = ds.ExecuteSQL("GetLayerAttrIndexUse " + lyr.GetName()) - attr_index_use = int(sql_lyr.GetNextFeature().GetField(0)) - ds.ReleaseResultSet(sql_lyr) - assert attr_index_use == 2 # IteratorSufficientToEvaluateFilter + def check_index_fully_used(ds, lyr): + sql_lyr = ds.ExecuteSQL("GetLayerAttrIndexUse " + lyr.GetName()) + attr_index_use = int(sql_lyr.GetNextFeature().GetField(0)) + ds.ReleaseResultSet(sql_lyr) + assert attr_index_use == 2 # IteratorSufficientToEvaluateFilter - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) - lyr.SetAttributeFilter("int16 = -1234") - check_index_fully_used(ds, lyr) - assert lyr.GetFeatureCount() == 1 + lyr.SetAttributeFilter("int16 = -1234") + check_index_fully_used(ds, lyr) + assert lyr.GetFeatureCount() == 1 - lyr.SetAttributeFilter("int16 = 1234") - assert lyr.GetFeatureCount() == 0 + lyr.SetAttributeFilter("int16 = 1234") + assert lyr.GetFeatureCount() == 0 - lyr.SetAttributeFilter("int32 = -12346789") - check_index_fully_used(ds, lyr) - assert lyr.GetFeatureCount() == 1 + lyr.SetAttributeFilter("int32 = -12346789") + check_index_fully_used(ds, lyr) + assert lyr.GetFeatureCount() == 1 - lyr.SetAttributeFilter("int32 = 12346789") - assert lyr.GetFeatureCount() == 0 + lyr.SetAttributeFilter("int32 = 12346789") + assert lyr.GetFeatureCount() == 0 - lyr.SetAttributeFilter("float32 = 1.25") - check_index_fully_used(ds, lyr) - assert lyr.GetFeatureCount() == 1 + lyr.SetAttributeFilter("float32 = 1.25") + check_index_fully_used(ds, lyr) + assert lyr.GetFeatureCount() == 1 - lyr.SetAttributeFilter("float32 = -1.25") - assert lyr.GetFeatureCount() == 0 + lyr.SetAttributeFilter("float32 = -1.25") + assert lyr.GetFeatureCount() == 0 - lyr.SetAttributeFilter("float64 = 1.256789") - assert lyr.GetFeatureCount() == 1 + lyr.SetAttributeFilter("float64 = 1.256789") + assert lyr.GetFeatureCount() == 1 - lyr.SetAttributeFilter("float64 = -1.256789") - assert lyr.GetFeatureCount() == 0 + lyr.SetAttributeFilter("float64 = -1.256789") + assert lyr.GetFeatureCount() == 0 - lyr.SetAttributeFilter("str = 'my str'") - assert lyr.GetFeatureCount() == 1 + lyr.SetAttributeFilter("str = 'my str'") + assert lyr.GetFeatureCount() == 1 - lyr.SetAttributeFilter("str = 'MY STR'") - assert lyr.GetFeatureCount() == 0 + lyr.SetAttributeFilter("str = 'MY STR'") + assert lyr.GetFeatureCount() == 0 - lyr.SetAttributeFilter("str = 'my st'") - assert lyr.GetFeatureCount() == 0 + lyr.SetAttributeFilter("str = 'my st'") + assert lyr.GetFeatureCount() == 0 - lyr.SetAttributeFilter("str = 'my str2'") - assert lyr.GetFeatureCount() == 0 + lyr.SetAttributeFilter("str = 'my str2'") + assert lyr.GetFeatureCount() == 0 - # Test truncation to 80 characters - # lyr.SetAttributeFilter("str = '%s'" % ('x' * 100)) - # assert lyr.GetFeatureCount() == 1 + # Test truncation to 80 characters + # lyr.SetAttributeFilter("str = '%s'" % ('x' * 100)) + # assert lyr.GetFeatureCount() == 1 - # lyr.SetAttributeFilter("str = '%s'" % ('x' * 100 + 'y')) - # assert lyr.GetFeatureCount() == 1 + # lyr.SetAttributeFilter("str = '%s'" % ('x' * 100 + 'y')) + # assert lyr.GetFeatureCount() == 1 - # lyr.SetAttributeFilter("str = '%s'" % ('x' * 100 + 'z')) - # assert lyr.GetFeatureCount() == 0 + # lyr.SetAttributeFilter("str = '%s'" % ('x' * 100 + 'z')) + # assert lyr.GetFeatureCount() == 0 - # Actually should be "LOWER(lower_str) = 'my str'" ... - # so this test may break if we implement this in a cleaner way - lyr.SetAttributeFilter("lower_str = 'my str'") - assert lyr.GetFeatureCount() == 1 + # Actually should be "LOWER(lower_str) = 'my str'" ... + # so this test may break if we implement this in a cleaner way + lyr.SetAttributeFilter("lower_str = 'my str'") + assert lyr.GetFeatureCount() == 1 - lyr.SetAttributeFilter("dt = '2022/06/03 16:06:00Z'") - check_index_fully_used(ds, lyr) - assert lyr.GetFeatureCount() == 1 + lyr.SetAttributeFilter("dt = '2022/06/03 16:06:00Z'") + check_index_fully_used(ds, lyr) + assert lyr.GetFeatureCount() == 1 - # Check that .gdbindexes is properly updated on field renaming - fld_defn = ogr.FieldDefn("int32_renamed", ogr.OFTInteger) - assert ( - lyr.AlterFieldDefn( - lyr.GetLayerDefn().GetFieldIndex("int32"), fld_defn, ogr.ALTER_ALL_FLAG - ) - == ogr.OGRERR_NONE + # Check that .gdbindexes is properly updated on field renaming + fld_defn = ogr.FieldDefn("int32_renamed", ogr.OFTInteger) + assert ( + lyr.AlterFieldDefn( + lyr.GetLayerDefn().GetFieldIndex("int32"), fld_defn, ogr.ALTER_ALL_FLAG ) + == ogr.OGRERR_NONE + ) - lyr.SetAttributeFilter("int32_renamed = -12346789") - check_index_fully_used(ds, lyr) - assert lyr.GetFeatureCount() == 1 - - ds = None + lyr.SetAttributeFilter("int32_renamed = -12346789") + check_index_fully_used(ds, lyr) + assert lyr.GetFeatureCount() == 1 - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) + ds = None - lyr.SetAttributeFilter("int32_renamed = -12346789") - check_index_fully_used(ds, lyr) - assert lyr.GetFeatureCount() == 1 + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) - # Check that the index is destroy on field deletion - assert gdal.VSIStatL(dirname + "/a00000009.idx_int32.atx") is not None - assert ( - lyr.DeleteField(lyr.GetLayerDefn().GetFieldIndex("int32_renamed")) - == ogr.OGRERR_NONE - ) - assert gdal.VSIStatL(dirname + "/a00000009.idx_int32.atx") is None + lyr.SetAttributeFilter("int32_renamed = -12346789") + check_index_fully_used(ds, lyr) + assert lyr.GetFeatureCount() == 1 - ds = None + # Check that the index is destroy on field deletion + assert gdal.VSIStatL(dirname / "a00000009.idx_int32.atx") is not None + assert ( + lyr.DeleteField(lyr.GetLayerDefn().GetFieldIndex("int32_renamed")) + == ogr.OGRERR_NONE + ) + assert gdal.VSIStatL(dirname / "a00000009.idx_int32.atx") is None - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) + ds = None - lyr.SetAttributeFilter("int16 = -1234") - check_index_fully_used(ds, lyr) - assert lyr.GetFeatureCount() == 1 + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) - lyr.SetAttributeFilter("float32 = 1.25") - check_index_fully_used(ds, lyr) - assert lyr.GetFeatureCount() == 1 + lyr.SetAttributeFilter("int16 = -1234") + check_index_fully_used(ds, lyr) + assert lyr.GetFeatureCount() == 1 - ds = None + lyr.SetAttributeFilter("float32 = 1.25") + check_index_fully_used(ds, lyr) + assert lyr.GetFeatureCount() == 1 - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### -def test_ogr_openfilegdb_write_delete_layer(): +def test_ogr_openfilegdb_write_delete_layer(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - ds = ogr.Open(dirname, update=1) - ds.CreateLayer("test", geom_type=ogr.wkbPoint) - ds.CreateLayer("test2", geom_type=ogr.wkbPoint) - ds = None + dirname = tmp_vsimem / "out.gdb" - ds = ogr.Open(dirname, update=1) - assert ds.TestCapability(ogr.ODsCDeleteLayer) == 1 + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + ds = ogr.Open(dirname, update=1) + ds.CreateLayer("test", geom_type=ogr.wkbPoint) + ds.CreateLayer("test2", geom_type=ogr.wkbPoint) + ds = None - lyr = ds.GetLayerByName("GDB_SystemCatalog") - assert lyr.GetFeatureCount() == 10 # 8 system tables + 2 layers + ds = ogr.Open(dirname, update=1) + assert ds.TestCapability(ogr.ODsCDeleteLayer) == 1 - lyr = ds.GetLayerByName("GDB_Items") - assert lyr.GetFeatureCount() == 4 # root, workspace + 2 layers + lyr = ds.GetLayerByName("GDB_SystemCatalog") + assert lyr.GetFeatureCount() == 10 # 8 system tables + 2 layers - lyr = ds.GetLayerByName("GDB_ItemRelationships") - assert lyr.GetFeatureCount() == 2 # 2 layers + lyr = ds.GetLayerByName("GDB_Items") + assert lyr.GetFeatureCount() == 4 # root, workspace + 2 layers - ds.ExecuteSQL("DELLAYER:test") - assert ds.GetLayerCount() == 1 + lyr = ds.GetLayerByName("GDB_ItemRelationships") + assert lyr.GetFeatureCount() == 2 # 2 layers - for filename in gdal.ReadDir(dirname): - assert not filename.startswith("a00000009.gdbtable") + ds.ExecuteSQL("DELLAYER:test") + assert ds.GetLayerCount() == 1 - assert ds.DeleteLayer(-1) != ogr.OGRERR_NONE - assert ds.DeleteLayer(1) != ogr.OGRERR_NONE + for filename in gdal.ReadDir(dirname): + assert not filename.startswith("a00000009.gdbtable") - # The following should not work - with gdal.quiet_errors(): - gdal.ErrorReset() - ds.ExecuteSQL("DELLAYER:not_existing") - assert gdal.GetLastErrorMsg() != "" - with gdal.quiet_errors(): - gdal.ErrorReset() - ds.ExecuteSQL("DELLAYER:GDB_SystemCatalog") - assert gdal.GetLastErrorMsg() != "" + assert ds.DeleteLayer(-1) != ogr.OGRERR_NONE + assert ds.DeleteLayer(1) != ogr.OGRERR_NONE - ds = None + # The following should not work + with gdal.quiet_errors(): + gdal.ErrorReset() + ds.ExecuteSQL("DELLAYER:not_existing") + assert gdal.GetLastErrorMsg() != "" + with gdal.quiet_errors(): + gdal.ErrorReset() + ds.ExecuteSQL("DELLAYER:GDB_SystemCatalog") + assert gdal.GetLastErrorMsg() != "" - ds = ogr.Open(dirname) - assert ds.GetLayerCount() == 1 - assert ds.GetLayer(0).GetName() == "test2" + ds = None - lyr = ds.GetLayerByName("GDB_SystemCatalog") - assert lyr.GetFeatureCount() == 9 + ds = ogr.Open(dirname) + assert ds.GetLayerCount() == 1 + assert ds.GetLayer(0).GetName() == "test2" - lyr = ds.GetLayerByName("GDB_Items") - assert lyr.GetFeatureCount() == 3 + lyr = ds.GetLayerByName("GDB_SystemCatalog") + assert lyr.GetFeatureCount() == 9 - lyr = ds.GetLayerByName("GDB_ItemRelationships") - assert lyr.GetFeatureCount() == 1 + lyr = ds.GetLayerByName("GDB_Items") + assert lyr.GetFeatureCount() == 3 - finally: - gdal.RmdirRecursive(dirname) + lyr = ds.GetLayerByName("GDB_ItemRelationships") + assert lyr.GetFeatureCount() == 1 ############################################################################### @@ -1886,773 +1837,729 @@ def _check_freelist_consistency(ds, lyr): ############################################################################### -def test_ogr_openfilegdb_write_freelist(): +def test_ogr_openfilegdb_write_freelist(tmp_vsimem): - dirname = "/vsimem/out.gdb" - table_filename = dirname + "/a00000009.gdbtable" - freelist_filename = dirname + "/a00000009.freelist" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - ds = ogr.Open(dirname, update=1) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + dirname = tmp_vsimem / "out.gdb" + table_filename = dirname / "a00000009.gdbtable" + freelist_filename = dirname / "a00000009.freelist" - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "X" * 5) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + ds = ogr.Open(dirname, update=1) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - lyr.SyncToDisk() - filesize = gdal.VSIStatL(table_filename).size + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "X" * 5) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert lyr.DeleteFeature(1) == 0 + lyr.SyncToDisk() + filesize = gdal.VSIStatL(table_filename).size - assert gdal.VSIStatL(freelist_filename) is not None - _check_freelist_consistency(ds, lyr) + assert lyr.DeleteFeature(1) == 0 - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "Y" * 5) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + assert gdal.VSIStatL(freelist_filename) is not None + _check_freelist_consistency(ds, lyr) - assert filesize == gdal.VSIStatL(table_filename).size + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "Y" * 5) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = lyr.GetNextFeature() - assert f["str"] == "Y" * 5 + assert filesize == gdal.VSIStatL(table_filename).size - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "X" * 6) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - to_delete = [f.GetFID()] + f = lyr.GetNextFeature() + assert f["str"] == "Y" * 5 - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "X" * 6) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - to_delete.append(f.GetFID()) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "X" * 6) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + to_delete = [f.GetFID()] - filesize = gdal.VSIStatL(table_filename).size + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "X" * 6) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + to_delete.append(f.GetFID()) - for fid in to_delete: - assert lyr.DeleteFeature(fid) == 0 + filesize = gdal.VSIStatL(table_filename).size - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "Y" * 6) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + for fid in to_delete: + assert lyr.DeleteFeature(fid) == 0 - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "Y" * 6) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "Y" * 6) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert filesize == gdal.VSIStatL(table_filename).size + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "Y" * 6) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert gdal.VSIStatL(freelist_filename) is not None - _check_freelist_consistency(ds, lyr) + assert filesize == gdal.VSIStatL(table_filename).size - lyr.SyncToDisk() - assert gdal.VSIStatL(freelist_filename) is None + assert gdal.VSIStatL(freelist_filename) is not None + _check_freelist_consistency(ds, lyr) - finally: - gdal.RmdirRecursive(dirname) + lyr.SyncToDisk() + assert gdal.VSIStatL(freelist_filename) is None ############################################################################### -def test_ogr_openfilegdb_write_freelist_not_exactly_matching_sizes(): +def test_ogr_openfilegdb_write_freelist_not_exactly_matching_sizes(tmp_vsimem): - dirname = "/vsimem/out.gdb" - table_filename = dirname + "/a00000009.gdbtable" - freelist_filename = dirname + "/a00000009.freelist" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - ds = ogr.Open(dirname, update=1) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + dirname = tmp_vsimem / "out.gdb" + table_filename = dirname / "a00000009.gdbtable" + freelist_filename = dirname / "a00000009.freelist" - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "X" * 500) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + ds = ogr.Open(dirname, update=1) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "X" * 502) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "X" * 500) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - lyr.SyncToDisk() - filesize = gdal.VSIStatL(table_filename).size + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "X" * 502) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert lyr.DeleteFeature(1) == 0 - assert lyr.DeleteFeature(2) == 0 + lyr.SyncToDisk() + filesize = gdal.VSIStatL(table_filename).size - assert gdal.VSIStatL(freelist_filename) is not None - _check_freelist_consistency(ds, lyr) + assert lyr.DeleteFeature(1) == 0 + assert lyr.DeleteFeature(2) == 0 - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "Y" * 490) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + assert gdal.VSIStatL(freelist_filename) is not None + _check_freelist_consistency(ds, lyr) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "Y" * 501) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "Y" * 490) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = lyr.GetNextFeature() - assert f["str"] == "Y" * 490 + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "Y" * 501) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = lyr.GetNextFeature() - assert f["str"] == "Y" * 501 + f = lyr.GetNextFeature() + assert f["str"] == "Y" * 490 - assert filesize == gdal.VSIStatL(table_filename).size - _check_freelist_consistency(ds, lyr) + f = lyr.GetNextFeature() + assert f["str"] == "Y" * 501 - finally: - gdal.RmdirRecursive(dirname) + assert filesize == gdal.VSIStatL(table_filename).size + _check_freelist_consistency(ds, lyr) ############################################################################### -def test_ogr_openfilegdb_write_freelist_scenario_two_sizes(): +def test_ogr_openfilegdb_write_freelist_scenario_two_sizes(tmp_vsimem): - dirname = "/vsimem/out.gdb" - table_filename = dirname + "/a00000009.gdbtable" - freelist_filename = dirname + "/a00000009.freelist" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - ds = ogr.Open(dirname, update=1) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + dirname = tmp_vsimem / "out.gdb" + table_filename = dirname / "a00000009.gdbtable" + freelist_filename = dirname / "a00000009.freelist" - NFEATURES = 400 + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + ds = ogr.Open(dirname, update=1) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - # 500 and 600 are in the [440, 772[ range of the freelist Fibonacci suite - SIZE1 = 600 - SIZE2 = 500 - assert SIZE2 < SIZE1 + NFEATURES = 400 - for i in range(NFEATURES): - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * SIZE1 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + # 500 and 600 are in the [440, 772[ range of the freelist Fibonacci suite + SIZE1 = 600 + SIZE2 = 500 + assert SIZE2 < SIZE1 - for i in range(NFEATURES): - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * SIZE2 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + for i in range(NFEATURES): + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * SIZE1 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - for i in range(NFEATURES): - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * SIZE1 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + for i in range(NFEATURES): + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * SIZE2 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - for i in range(NFEATURES): - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * SIZE2 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + for i in range(NFEATURES): + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * SIZE1 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - lyr.SyncToDisk() - filesize = gdal.VSIStatL(table_filename).size + for i in range(NFEATURES): + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * SIZE2 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - for i in range(NFEATURES * 4): - assert lyr.DeleteFeature(1 + i) == ogr.OGRERR_NONE + lyr.SyncToDisk() + filesize = gdal.VSIStatL(table_filename).size - _check_freelist_consistency(ds, lyr) + for i in range(NFEATURES * 4): + assert lyr.DeleteFeature(1 + i) == ogr.OGRERR_NONE - for i in range(NFEATURES): - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * SIZE1 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + _check_freelist_consistency(ds, lyr) - for i in range(NFEATURES): - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * SIZE2 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + for i in range(NFEATURES): + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * SIZE1 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - for i in range(NFEATURES): - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * SIZE1 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + for i in range(NFEATURES): + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * SIZE2 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - for i in range(NFEATURES): - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * SIZE2 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + for i in range(NFEATURES): + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * SIZE1 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert filesize == gdal.VSIStatL(table_filename).size + for i in range(NFEATURES): + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * SIZE2 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert gdal.VSIStatL(freelist_filename) is not None - _check_freelist_consistency(ds, lyr) - lyr.SyncToDisk() - assert gdal.VSIStatL(freelist_filename) is None + assert filesize == gdal.VSIStatL(table_filename).size - finally: - gdal.RmdirRecursive(dirname) + assert gdal.VSIStatL(freelist_filename) is not None + _check_freelist_consistency(ds, lyr) + lyr.SyncToDisk() + assert gdal.VSIStatL(freelist_filename) is None ############################################################################### -def test_ogr_openfilegdb_write_freelist_scenario_random(): +def test_ogr_openfilegdb_write_freelist_scenario_random(tmp_vsimem): import functools import random r = random.Random(0) - dirname = "/vsimem/out.gdb" - table_filename = dirname + "/a00000009.gdbtable" - freelist_filename = dirname + "/a00000009.freelist" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds is not None - ds = ogr.Open(dirname, update=1) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + dirname = tmp_vsimem / "out.gdb" + table_filename = dirname / "a00000009.gdbtable" + freelist_filename = dirname / "a00000009.freelist" - NFEATURES = 1000 + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + assert ds is not None + ds = ogr.Open(dirname, update=1) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - sizes = [] - fids = [] - # Ranges that are used to allocate a slot in a series of page - fibo_suite = functools.reduce( - lambda x, _: x + [x[-1] + x[-2]], range(20 - 2), [8, 16] - ) + NFEATURES = 1000 - # Create features of random sizes - for i in range(NFEATURES): - series = r.randint(0, len(fibo_suite) - 2) - size = r.randint(fibo_suite[series], fibo_suite[series + 1] - 1) - sizes.append(size) - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * size - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - fids.append(f.GetFID()) + sizes = [] + fids = [] + # Ranges that are used to allocate a slot in a series of page + fibo_suite = functools.reduce( + lambda x, _: x + [x[-1] + x[-2]], range(20 - 2), [8, 16] + ) - # Delete them in random order - for i in range(NFEATURES): - idx = r.randint(0, len(fids) - 1) - fid = fids[idx] - del fids[idx] + # Create features of random sizes + for i in range(NFEATURES): + series = r.randint(0, len(fibo_suite) - 2) + size = r.randint(fibo_suite[series], fibo_suite[series + 1] - 1) + sizes.append(size) + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * size + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + fids.append(f.GetFID()) - assert lyr.DeleteFeature(fid) == ogr.OGRERR_NONE + # Delete them in random order + for i in range(NFEATURES): + idx = r.randint(0, len(fids) - 1) + fid = fids[idx] + del fids[idx] - _check_freelist_consistency(ds, lyr) - lyr.SyncToDisk() - filesize = gdal.VSIStatL(table_filename).size + assert lyr.DeleteFeature(fid) == ogr.OGRERR_NONE - # Re-create feature of the same previous sizes, in random order - for i in range(NFEATURES): - idx = r.randint(0, len(sizes) - 1) - size = sizes[idx] - del sizes[idx] + _check_freelist_consistency(ds, lyr) + lyr.SyncToDisk() + filesize = gdal.VSIStatL(table_filename).size - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "x" * size - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + # Re-create feature of the same previous sizes, in random order + for i in range(NFEATURES): + idx = r.randint(0, len(sizes) - 1) + size = sizes[idx] + del sizes[idx] - assert filesize == gdal.VSIStatL(table_filename).size + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "x" * size + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert gdal.VSIStatL(freelist_filename) is not None - _check_freelist_consistency(ds, lyr) - lyr.SyncToDisk() - assert gdal.VSIStatL(freelist_filename) is None + assert filesize == gdal.VSIStatL(table_filename).size - finally: - gdal.RmdirRecursive(dirname) + assert gdal.VSIStatL(freelist_filename) is not None + _check_freelist_consistency(ds, lyr) + lyr.SyncToDisk() + assert gdal.VSIStatL(freelist_filename) is None ############################################################################### -def test_ogr_openfilegdb_write_freelist_scenario_issue_7504(): +def test_ogr_openfilegdb_write_freelist_scenario_issue_7504(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + dirname = tmp_vsimem / "out.gdb" - N = 173 + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "a" * N - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + N = 173 - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "b" - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "a" * N + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f["str"] = "c" - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "b" + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - # Length is > N: feature is rewritten at end of file - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(1) - f["str"] = "d" * (N + 1) - assert lyr.SetFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f["str"] = "c" + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - # Before bugfix #7504, the space initially taken by feature 1 before - # its edition would have been reused for feature 3, consequently - # overwriting the first few bytes of feature 2... - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(3) - f["str"] = "e" * (N + 3) # must not be greater than N+3 to test the bug - assert lyr.SetFeature(f) == ogr.OGRERR_NONE + # Length is > N: feature is rewritten at end of file + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(1) + f["str"] = "d" * (N + 1) + assert lyr.SetFeature(f) == ogr.OGRERR_NONE - assert lyr.SyncToDisk() == ogr.OGRERR_NONE + # Before bugfix #7504, the space initially taken by feature 1 before + # its edition would have been reused for feature 3, consequently + # overwriting the first few bytes of feature 2... + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(3) + f["str"] = "e" * (N + 3) # must not be greater than N+3 to test the bug + assert lyr.SetFeature(f) == ogr.OGRERR_NONE - f = lyr.GetFeature(1) - assert f["str"] == "d" * (N + 1) + assert lyr.SyncToDisk() == ogr.OGRERR_NONE - f = lyr.GetFeature(2) - assert f["str"] == "b" + f = lyr.GetFeature(1) + assert f["str"] == "d" * (N + 1) - f = lyr.GetFeature(3) - assert f["str"] == "e" * (N + 3) + f = lyr.GetFeature(2) + assert f["str"] == "b" - ds = None + f = lyr.GetFeature(3) + assert f["str"] == "e" * (N + 3) - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### -def test_ogr_openfilegdb_write_repack(): +def test_ogr_openfilegdb_write_repack(tmp_vsimem): - dirname = "/vsimem/out.gdb" - table_filename = dirname + "/a00000009.gdbtable" - freelist_filename = dirname + "/a00000009.freelist" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) - lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) + dirname = tmp_vsimem / "out.gdb" + table_filename = dirname / "a00000009.gdbtable" + freelist_filename = dirname / "a00000009.freelist" - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "1" * 10) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) + lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "2" * 10) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "1" * 10) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("str", "3" * 10) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "2" * 10) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - lyr.SyncToDisk() - filesize = gdal.VSIStatL(table_filename).size + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("str", "3" * 10) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - with gdal.quiet_errors(): - assert ds.ExecuteSQL("REPACK unexisting_table") is None + lyr.SyncToDisk() + filesize = gdal.VSIStatL(table_filename).size - # Repack: nothing to do - sql_lyr = ds.ExecuteSQL("REPACK") - assert sql_lyr - f = sql_lyr.GetNextFeature() - assert f[0] == "true" - ds.ReleaseResultSet(sql_lyr) + with gdal.quiet_errors(): + assert ds.ExecuteSQL("REPACK unexisting_table") is None - assert filesize == gdal.VSIStatL(table_filename).size + # Repack: nothing to do + sql_lyr = ds.ExecuteSQL("REPACK") + assert sql_lyr + f = sql_lyr.GetNextFeature() + assert f[0] == "true" + ds.ReleaseResultSet(sql_lyr) - # Suppress last feature - assert lyr.DeleteFeature(3) == 0 + assert filesize == gdal.VSIStatL(table_filename).size - # Repack: truncate file - sql_lyr = ds.ExecuteSQL("REPACK test") - assert sql_lyr - f = sql_lyr.GetNextFeature() - assert f[0] == "true" - ds.ReleaseResultSet(sql_lyr) + # Suppress last feature + assert lyr.DeleteFeature(3) == 0 - assert gdal.VSIStatL(table_filename).size < filesize - filesize = gdal.VSIStatL(table_filename).size + # Repack: truncate file + sql_lyr = ds.ExecuteSQL("REPACK test") + assert sql_lyr + f = sql_lyr.GetNextFeature() + assert f[0] == "true" + ds.ReleaseResultSet(sql_lyr) - # Suppress first feature - assert lyr.DeleteFeature(1) == 0 + assert gdal.VSIStatL(table_filename).size < filesize + filesize = gdal.VSIStatL(table_filename).size - assert gdal.VSIStatL(freelist_filename) is not None + # Suppress first feature + assert lyr.DeleteFeature(1) == 0 - # Repack: rewrite whole file - sql_lyr = ds.ExecuteSQL("REPACK") - assert sql_lyr - f = sql_lyr.GetNextFeature() - assert f[0] == "true" - ds.ReleaseResultSet(sql_lyr) + assert gdal.VSIStatL(freelist_filename) is not None - assert gdal.VSIStatL(table_filename).size < filesize - filesize = gdal.VSIStatL(table_filename).size + # Repack: rewrite whole file + sql_lyr = ds.ExecuteSQL("REPACK") + assert sql_lyr + f = sql_lyr.GetNextFeature() + assert f[0] == "true" + ds.ReleaseResultSet(sql_lyr) - assert gdal.VSIStatL(freelist_filename) is None + assert gdal.VSIStatL(table_filename).size < filesize + filesize = gdal.VSIStatL(table_filename).size - lyr.ResetReading() - f = lyr.GetNextFeature() - assert f.GetField(0) == "2" * 10 + assert gdal.VSIStatL(freelist_filename) is None - # Repack: nothing to do - sql_lyr = ds.ExecuteSQL("REPACK") - assert sql_lyr - f = sql_lyr.GetNextFeature() - assert f[0] == "true" - ds.ReleaseResultSet(sql_lyr) + lyr.ResetReading() + f = lyr.GetNextFeature() + assert f.GetField(0) == "2" * 10 - assert gdal.VSIStatL(table_filename).size == filesize + # Repack: nothing to do + sql_lyr = ds.ExecuteSQL("REPACK") + assert sql_lyr + f = sql_lyr.GetNextFeature() + assert f[0] == "true" + ds.ReleaseResultSet(sql_lyr) - finally: - gdal.RmdirRecursive(dirname) + assert gdal.VSIStatL(table_filename).size == filesize ############################################################################### -def test_ogr_openfilegdb_write_recompute_extent_on(): +def test_ogr_openfilegdb_write_recompute_extent_on(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + dirname = tmp_vsimem / "out.gdb" - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (1 2)")) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (3 4)")) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (1 2)")) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (5 6)")) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (3 4)")) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (5 6)")) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert lyr.GetExtent() == (1, 5, 2, 6) + f = ogr.Feature(lyr.GetLayerDefn()) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert lyr.DeleteFeature(1) == ogr.OGRERR_NONE + assert lyr.GetExtent() == (1, 5, 2, 6) - assert lyr.GetExtent() == (1, 5, 2, 6) + assert lyr.DeleteFeature(1) == ogr.OGRERR_NONE - gdal.ErrorReset() - assert ds.ExecuteSQL("RECOMPUTE EXTENT ON test") is None - assert gdal.GetLastErrorMsg() == "" + assert lyr.GetExtent() == (1, 5, 2, 6) - with gdal.quiet_errors(): - gdal.ErrorReset() - assert ds.ExecuteSQL("RECOMPUTE EXTENT ON non_existing_layer") is None - assert gdal.GetLastErrorMsg() != "" + gdal.ErrorReset() + assert ds.ExecuteSQL("RECOMPUTE EXTENT ON test") is None + assert gdal.GetLastErrorMsg() == "" - assert lyr.GetExtent() == (3, 5, 4, 6) + with gdal.quiet_errors(): + gdal.ErrorReset() + assert ds.ExecuteSQL("RECOMPUTE EXTENT ON non_existing_layer") is None + assert gdal.GetLastErrorMsg() != "" - ds = None + assert lyr.GetExtent() == (3, 5, 4, 6) - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) - assert lyr.GetExtent() == (3, 5, 4, 6) + ds = None - assert lyr.DeleteFeature(2) == ogr.OGRERR_NONE - assert lyr.DeleteFeature(3) == ogr.OGRERR_NONE + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + assert lyr.GetExtent() == (3, 5, 4, 6) - assert ds.ExecuteSQL("RECOMPUTE EXTENT ON test") is None + assert lyr.DeleteFeature(2) == ogr.OGRERR_NONE + assert lyr.DeleteFeature(3) == ogr.OGRERR_NONE - assert lyr.GetExtent(can_return_null=True) is None + assert ds.ExecuteSQL("RECOMPUTE EXTENT ON test") is None - ds = None + assert lyr.GetExtent(can_return_null=True) is None - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### -def test_ogr_openfilegdb_write_alter_field_defn(): - - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - assert lyr.TestCapability(ogr.OLCAlterFieldDefn) == 1 - - fld_defn = ogr.FieldDefn("str", ogr.OFTString) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - assert ( - lyr.CreateField(ogr.FieldDefn("other_field", ogr.OFTString)) - == ogr.OGRERR_NONE - ) - - # No-op - assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) == ogr.OGRERR_NONE - - # Invalid index - with gdal.quiet_errors(): - assert ( - lyr.AlterFieldDefn(-1, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE - ) - assert ( - lyr.AlterFieldDefn( - lyr.GetLayerDefn().GetFieldCount(), fld_defn, ogr.ALTER_ALL_FLAG - ) - != ogr.OGRERR_NONE - ) - - ds = None - - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) +def test_ogr_openfilegdb_write_alter_field_defn(tmp_vsimem): - # Changing type not supported - fld_defn = ogr.FieldDefn("str", ogr.OFTInteger) - with gdal.quiet_errors(): - assert ( - lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE - ) - fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) - assert fld_defn.GetType() == ogr.OFTString + dirname = tmp_vsimem / "out.gdb" - # Changing subtype not supported - fld_defn = ogr.FieldDefn("str", ogr.OFTString) - fld_defn.SetSubType(ogr.OFSTUUID) - with gdal.quiet_errors(): - assert ( - lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE - ) - fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) - assert fld_defn.GetType() == ogr.OFTString - assert fld_defn.GetSubType() == ogr.OFSTNone + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + assert lyr.TestCapability(ogr.OLCAlterFieldDefn) == 1 - # Changing nullable state not supported - fld_defn = ogr.FieldDefn("str", ogr.OFTString) - fld_defn.SetNullable(False) - with gdal.quiet_errors(): - assert ( - lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE - ) - fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) - assert fld_defn.IsNullable() + fld_defn = ogr.FieldDefn("str", ogr.OFTString) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + assert ( + lyr.CreateField(ogr.FieldDefn("other_field", ogr.OFTString)) == ogr.OGRERR_NONE + ) - # Renaming to an other existing field not supported - fld_defn = ogr.FieldDefn("other_field", ogr.OFTString) - with gdal.quiet_errors(): - assert ( - lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE - ) - fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) - assert fld_defn.GetName() == "str" + # No-op + assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) == ogr.OGRERR_NONE - fld_defn = ogr.FieldDefn("SHAPE", ogr.OFTString) - with gdal.quiet_errors(): - assert ( - lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE + # Invalid index + with gdal.quiet_errors(): + assert lyr.AlterFieldDefn(-1, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE + assert ( + lyr.AlterFieldDefn( + lyr.GetLayerDefn().GetFieldCount(), fld_defn, ogr.ALTER_ALL_FLAG ) - fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) - assert fld_defn.GetName() == "str" + != ogr.OGRERR_NONE + ) - fld_defn = ogr.FieldDefn("str_renamed", ogr.OFTString) - fld_defn.SetAlternativeName("alias") - fld_defn.SetWidth(10) - fld_defn.SetDefault("'aaa'") + ds = None - assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) == ogr.OGRERR_NONE + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + # Changing type not supported + fld_defn = ogr.FieldDefn("str", ogr.OFTInteger) + with gdal.quiet_errors(): + assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) assert fld_defn.GetType() == ogr.OFTString - assert fld_defn.GetName() == "str_renamed" - assert fld_defn.GetAlternativeName() == "alias" - assert fld_defn.GetWidth() == 10 - assert fld_defn.GetDefault() == "'aaa'" - - ds = None - - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) + # Changing subtype not supported + fld_defn = ogr.FieldDefn("str", ogr.OFTString) + fld_defn.SetSubType(ogr.OFSTUUID) + with gdal.quiet_errors(): + assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) assert fld_defn.GetType() == ogr.OFTString - assert fld_defn.GetName() == "str_renamed" - assert fld_defn.GetAlternativeName() == "alias" - assert fld_defn.GetWidth() == 10 - assert fld_defn.GetDefault() == "'aaa'" - - sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") - assert sql_lyr - f = sql_lyr.GetNextFeature() - xml = f.GetField(0) - f = None - ds.ReleaseResultSet(sql_lyr) - assert "<Name>str_renamed</Name>" in xml - - ds = None - - finally: - gdal.RmdirRecursive(dirname) + assert fld_defn.GetSubType() == ogr.OFSTNone + # Changing nullable state not supported + fld_defn = ogr.FieldDefn("str", ogr.OFTString) + fld_defn.SetNullable(False) + with gdal.quiet_errors(): + assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE + fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) + assert fld_defn.IsNullable() -############################################################################### -# Test writing field domains + # Renaming to an other existing field not supported + fld_defn = ogr.FieldDefn("other_field", ogr.OFTString) + with gdal.quiet_errors(): + assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE + fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) + assert fld_defn.GetName() == "str" + fld_defn = ogr.FieldDefn("SHAPE", ogr.OFTString) + with gdal.quiet_errors(): + assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) != ogr.OGRERR_NONE + fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) + assert fld_defn.GetName() == "str" -def test_ogr_openfilegdb_write_domains(): + fld_defn = ogr.FieldDefn("str_renamed", ogr.OFTString) + fld_defn.SetAlternativeName("alias") + fld_defn.SetWidth(10) + fld_defn.SetDefault("'aaa'") - dirname = "/vsimem/out.gdb" - try: - ds = gdal.GetDriverByName("OpenFileGDB").Create( - dirname, 0, 0, 0, gdal.GDT_Unknown - ) + assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) == ogr.OGRERR_NONE - domain = ogr.CreateCodedFieldDomain( - "domain", "desc", ogr.OFTInteger, ogr.OFSTNone, {1: "one", "2": None} - ) - assert ds.AddFieldDomain(domain) + fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) + assert fld_defn.GetType() == ogr.OFTString + assert fld_defn.GetName() == "str_renamed" + assert fld_defn.GetAlternativeName() == "alias" + assert fld_defn.GetWidth() == 10 + assert fld_defn.GetDefault() == "'aaa'" - lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) + ds = None - fld_defn = ogr.FieldDefn("foo", ogr.OFTInteger) - fld_defn.SetDomainName("domain") - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) - fld_defn = ogr.FieldDefn("foo2", ogr.OFTInteger) - fld_defn.SetDomainName("domain") - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + fld_defn = lyr.GetLayerDefn().GetFieldDefn(0) + assert fld_defn.GetType() == ogr.OFTString + assert fld_defn.GetName() == "str_renamed" + assert fld_defn.GetAlternativeName() == "alias" + assert fld_defn.GetWidth() == 10 + assert fld_defn.GetDefault() == "'aaa'" + + sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") + assert sql_lyr + f = sql_lyr.GetNextFeature() + xml = f.GetField(0) + f = None + ds.ReleaseResultSet(sql_lyr) + assert "<Name>str_renamed</Name>" in xml - domain = ogr.CreateRangeFieldDomainDateTime( - "datetime_range", - "datetime_range_desc", - "2023-07-03T12:13:14", - True, - "2023-07-03T12:13:15", - True, - ) - assert ds.AddFieldDomain(domain) - ds = None + ds = None - ds = gdal.OpenEx(dirname) - assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 2 - domain = ds.GetFieldDomain("datetime_range") - assert domain is not None - assert domain.GetName() == "datetime_range" - assert domain.GetDescription() == "datetime_range_desc" - assert domain.GetDomainType() == ogr.OFDT_RANGE - assert domain.GetFieldType() == ogr.OFTDateTime - assert domain.GetFieldSubType() == ogr.OFSTNone - assert domain.GetMinAsString() == "2023-07-03T12:13:14" - assert domain.GetMaxAsString() == "2023-07-03T12:13:15" +############################################################################### +# Test writing field domains - ds = None - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) - assert lyr.DeleteField(0) == ogr.OGRERR_NONE - ds = None +def test_ogr_openfilegdb_write_domains(tmp_vsimem): - ds = ogr.Open(dirname, update=1) - assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 2 - ds = None + dirname = tmp_vsimem / "out.gdb" - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) - assert lyr.DeleteField(0) == ogr.OGRERR_NONE - ds = None + ds = gdal.GetDriverByName("OpenFileGDB").Create(dirname, 0, 0, 0, gdal.GDT_Unknown) - ds = ogr.Open(dirname, update=1) - assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 1 - ds = None + domain = ogr.CreateCodedFieldDomain( + "domain", "desc", ogr.OFTInteger, ogr.OFSTNone, {1: "one", "2": None} + ) + assert ds.AddFieldDomain(domain) - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) - fld_defn = ogr.FieldDefn("foo", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - ds = None + lyr = ds.CreateLayer("test", geom_type=ogr.wkbNone) - ds = ogr.Open(dirname, update=1) - assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 1 - ds = None + fld_defn = ogr.FieldDefn("foo", ogr.OFTInteger) + fld_defn.SetDomainName("domain") + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) - fld_defn = ogr.FieldDefn("foo", ogr.OFTInteger) - fld_defn.SetDomainName("domain") - assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) == ogr.OGRERR_NONE - assert lyr.GetLayerDefn().GetFieldDefn(0).GetDomainName() == "domain" - ds = None + fld_defn = ogr.FieldDefn("foo2", ogr.OFTInteger) + fld_defn.SetDomainName("domain") + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - ds = ogr.Open(dirname, update=1) - assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 2 - ds = None + domain = ogr.CreateRangeFieldDomainDateTime( + "datetime_range", + "datetime_range_desc", + "2023-07-03T12:13:14", + True, + "2023-07-03T12:13:15", + True, + ) + assert ds.AddFieldDomain(domain) + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = gdal.OpenEx(dirname) + assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 2 + domain = ds.GetFieldDomain("datetime_range") + assert domain is not None + assert domain.GetName() == "datetime_range" + assert domain.GetDescription() == "datetime_range_desc" + assert domain.GetDomainType() == ogr.OFDT_RANGE + assert domain.GetFieldType() == ogr.OFTDateTime + assert domain.GetFieldSubType() == ogr.OFSTNone + assert domain.GetMinAsString() == "2023-07-03T12:13:14" + assert domain.GetMaxAsString() == "2023-07-03T12:13:15" -############################################################################### -# Test writing relationships + ds = None + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + assert lyr.DeleteField(0) == ogr.OGRERR_NONE + ds = None -def test_ogr_openfilegdb_write_relationships(): + ds = ogr.Open(dirname, update=1) + assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 2 + ds = None - dirname = "/vsimem/out.gdb" - try: - ds = gdal.GetDriverByName("OpenFileGDB").Create( - dirname, 0, 0, 0, gdal.GDT_Unknown - ) + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + assert lyr.DeleteField(0) == ogr.OGRERR_NONE + ds = None - relationship = gdal.Relationship( - "my_relationship", "origin_table", "dest_table", gdal.GRC_ONE_TO_ONE - ) - relationship.SetLeftTableFields(["o_pkey"]) - relationship.SetRightTableFields(["dest_pkey"]) - relationship.SetRelatedTableType("media") + ds = ogr.Open(dirname, update=1) + assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 1 + ds = None - # no tables yet - assert not ds.AddRelationship(relationship) + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + fld_defn = ogr.FieldDefn("foo", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + ds = None - lyr = ds.CreateLayer("origin_table", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("o_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + ds = ogr.Open(dirname, update=1) + assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 1 + ds = None - lyr = ds.CreateLayer("dest_table", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + fld_defn = ogr.FieldDefn("foo", ogr.OFTInteger) + fld_defn.SetDomainName("domain") + assert lyr.AlterFieldDefn(0, fld_defn, ogr.ALTER_ALL_FLAG) == ogr.OGRERR_NONE + assert lyr.GetLayerDefn().GetFieldDefn(0).GetDomainName() == "domain" + ds = None - ds = gdal.OpenEx(dirname, gdal.GA_Update) + ds = ogr.Open(dirname, update=1) + assert ds.GetLayerByName("GDB_ItemRelationships").GetFeatureCount() == 2 + ds = None - items_lyr = ds.GetLayerByName("GDB_Items") - f = items_lyr.GetFeature(1) - assert f["Path"] == "\\" - root_dataset_uuid = f["UUID"] - f = items_lyr.GetFeature(3) - assert f["Name"] == "origin_table" - origin_table_uuid = f["UUID"] +############################################################################### +# Test writing relationships - f = items_lyr.GetFeature(4) - assert f["Name"] == "dest_table" - dest_table_uuid = f["UUID"] - ds = gdal.OpenEx(dirname, gdal.GA_Update) +def test_ogr_openfilegdb_write_relationships(tmp_vsimem): - assert ds.AddRelationship(relationship) + dirname = tmp_vsimem / "out.gdb" - assert set(ds.GetRelationshipNames()) == {"my_relationship"} - retrieved_rel = ds.GetRelationship("my_relationship") - assert retrieved_rel.GetCardinality() == gdal.GRC_ONE_TO_ONE - assert retrieved_rel.GetType() == gdal.GRT_ASSOCIATION - assert retrieved_rel.GetLeftTableName() == "origin_table" - assert retrieved_rel.GetRightTableName() == "dest_table" - assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] - assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] - assert retrieved_rel.GetRelatedTableType() == "media" + ds = gdal.GetDriverByName("OpenFileGDB").Create(dirname, 0, 0, 0, gdal.GDT_Unknown) - # check metadata contents - items_lyr = ds.GetLayerByName("GDB_Items") - f = items_lyr.GetFeature(5) - relationship_uuid = f["UUID"] - assert f["Name"] == "my_relationship" - assert ( - f["Definition"] - == """<DERelationshipClassInfo xsi:type="typens:DERelationshipClassInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:typens="http://www.esri.com/schemas/ArcGIS/10.1"> + relationship = gdal.Relationship( + "my_relationship", "origin_table", "dest_table", gdal.GRC_ONE_TO_ONE + ) + relationship.SetLeftTableFields(["o_pkey"]) + relationship.SetRightTableFields(["dest_pkey"]) + relationship.SetRelatedTableType("media") + + # no tables yet + assert not ds.AddRelationship(relationship) + + lyr = ds.CreateLayer("origin_table", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("o_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + lyr = ds.CreateLayer("dest_table", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + + items_lyr = ds.GetLayerByName("GDB_Items") + f = items_lyr.GetFeature(1) + assert f["Path"] == "\\" + root_dataset_uuid = f["UUID"] + + f = items_lyr.GetFeature(3) + assert f["Name"] == "origin_table" + origin_table_uuid = f["UUID"] + + f = items_lyr.GetFeature(4) + assert f["Name"] == "dest_table" + dest_table_uuid = f["UUID"] + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + + assert ds.AddRelationship(relationship) + + assert set(ds.GetRelationshipNames()) == {"my_relationship"} + retrieved_rel = ds.GetRelationship("my_relationship") + assert retrieved_rel.GetCardinality() == gdal.GRC_ONE_TO_ONE + assert retrieved_rel.GetType() == gdal.GRT_ASSOCIATION + assert retrieved_rel.GetLeftTableName() == "origin_table" + assert retrieved_rel.GetRightTableName() == "dest_table" + assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] + assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] + assert retrieved_rel.GetRelatedTableType() == "media" + + # check metadata contents + items_lyr = ds.GetLayerByName("GDB_Items") + f = items_lyr.GetFeature(5) + relationship_uuid = f["UUID"] + assert f["Name"] == "my_relationship" + assert ( + f["Definition"] + == """<DERelationshipClassInfo xsi:type="typens:DERelationshipClassInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:typens="http://www.esri.com/schemas/ArcGIS/10.1"> <CatalogPath>\\my_relationship</CatalogPath> <Name>my_relationship</Name> <ChildrenExpanded>false</ChildrenExpanded> @@ -2715,12 +2622,12 @@ def test_ogr_openfilegdb_write_relationships(): <ChangeTracked>false</ChangeTracked> <ReplicaTracked>false</ReplicaTracked> </DERelationshipClassInfo>\n""" - ) - assert f["DatasetSubtype1"] == 1 - assert f["DatasetSubtype2"] == 0 - assert ( - f["Documentation"] - == """<metadata xml:lang="en"> + ) + assert f["DatasetSubtype1"] == 1 + assert f["DatasetSubtype2"] == 0 + assert ( + f["Documentation"] + == """<metadata xml:lang="en"> <Esri> <CreaDate></CreaDate> <CreaTime></CreaTime> @@ -2732,10 +2639,10 @@ def test_ogr_openfilegdb_write_relationships(): </Esri> </metadata> """ - ) - assert ( - f["ItemInfo"] - == """<ESRI_ItemInformation culture=""> + ) + assert ( + f["ItemInfo"] + == """<ESRI_ItemInformation culture=""> <name>my_relationship</name> <catalogPath>\\my_relationship</catalogPath> <snippet></snippet> @@ -2773,77 +2680,77 @@ def test_ogr_openfilegdb_write_relationships(): <propValues></propValues> </ESRI_ItemInformation> """ - ) - # check item relationships have been created - item_relationships_lyr = ds.GetLayerByName("GDB_ItemRelationships") - - f = item_relationships_lyr.GetFeature(3) - assert f["OriginID"] == origin_table_uuid - assert f["DestID"] == relationship_uuid - assert f["Type"] == "{725BADAB-3452-491B-A795-55F32D67229C}" - - f = item_relationships_lyr.GetFeature(4) - assert f["OriginID"] == dest_table_uuid - assert f["DestID"] == relationship_uuid - assert f["Type"] == "{725BADAB-3452-491B-A795-55F32D67229C}" - - f = item_relationships_lyr.GetFeature(5) - assert f["OriginID"] == root_dataset_uuid - assert f["DestID"] == relationship_uuid - assert f["Type"] == "{DC78F1AB-34E4-43AC-BA47-1C4EABD0E7C7}" - - ds = gdal.OpenEx(dirname, gdal.GA_Update) - assert set(ds.GetRelationshipNames()) == {"my_relationship"} - - # one to many - lyr = ds.CreateLayer("origin_table_1_to_many", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("o_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - - lyr = ds.CreateLayer("dest_table_1_to_many", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - - ds = gdal.OpenEx(dirname, gdal.GA_Update) - - # should be rejected -- duplicate name - assert not ds.AddRelationship(relationship) - - relationship = gdal.Relationship( - "my_one_to_many_relationship", - "origin_table_1_to_many", - "dest_table_1_to_many", - gdal.GRC_ONE_TO_MANY, - ) - relationship.SetLeftTableFields(["o_pkey"]) - relationship.SetRightTableFields(["dest_pkey"]) - relationship.SetType(gdal.GRT_COMPOSITE) - relationship.SetForwardPathLabel("fwd label") - relationship.SetBackwardPathLabel("backward label") - assert ds.AddRelationship(relationship) - - ds = gdal.OpenEx(dirname, gdal.GA_Update) - assert set(ds.GetRelationshipNames()) == { - "my_relationship", - "my_one_to_many_relationship", - } - retrieved_rel = ds.GetRelationship("my_one_to_many_relationship") - assert retrieved_rel.GetCardinality() == gdal.GRC_ONE_TO_MANY - assert retrieved_rel.GetType() == gdal.GRT_COMPOSITE - assert retrieved_rel.GetLeftTableName() == "origin_table_1_to_many" - assert retrieved_rel.GetRightTableName() == "dest_table_1_to_many" - assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] - assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] - assert retrieved_rel.GetForwardPathLabel() == "fwd label" - assert retrieved_rel.GetBackwardPathLabel() == "backward label" - assert retrieved_rel.GetRelatedTableType() == "features" - - items_lyr = ds.GetLayerByName("GDB_Items") - f = items_lyr.GetFeature(8) - assert f["Name"] == "my_one_to_many_relationship" - assert ( - f["Definition"] - == """<DERelationshipClassInfo xsi:type="typens:DERelationshipClassInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:typens="http://www.esri.com/schemas/ArcGIS/10.1"> + ) + # check item relationships have been created + item_relationships_lyr = ds.GetLayerByName("GDB_ItemRelationships") + + f = item_relationships_lyr.GetFeature(3) + assert f["OriginID"] == origin_table_uuid + assert f["DestID"] == relationship_uuid + assert f["Type"] == "{725BADAB-3452-491B-A795-55F32D67229C}" + + f = item_relationships_lyr.GetFeature(4) + assert f["OriginID"] == dest_table_uuid + assert f["DestID"] == relationship_uuid + assert f["Type"] == "{725BADAB-3452-491B-A795-55F32D67229C}" + + f = item_relationships_lyr.GetFeature(5) + assert f["OriginID"] == root_dataset_uuid + assert f["DestID"] == relationship_uuid + assert f["Type"] == "{DC78F1AB-34E4-43AC-BA47-1C4EABD0E7C7}" + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + assert set(ds.GetRelationshipNames()) == {"my_relationship"} + + # one to many + lyr = ds.CreateLayer("origin_table_1_to_many", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("o_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + lyr = ds.CreateLayer("dest_table_1_to_many", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + + # should be rejected -- duplicate name + assert not ds.AddRelationship(relationship) + + relationship = gdal.Relationship( + "my_one_to_many_relationship", + "origin_table_1_to_many", + "dest_table_1_to_many", + gdal.GRC_ONE_TO_MANY, + ) + relationship.SetLeftTableFields(["o_pkey"]) + relationship.SetRightTableFields(["dest_pkey"]) + relationship.SetType(gdal.GRT_COMPOSITE) + relationship.SetForwardPathLabel("fwd label") + relationship.SetBackwardPathLabel("backward label") + assert ds.AddRelationship(relationship) + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + assert set(ds.GetRelationshipNames()) == { + "my_relationship", + "my_one_to_many_relationship", + } + retrieved_rel = ds.GetRelationship("my_one_to_many_relationship") + assert retrieved_rel.GetCardinality() == gdal.GRC_ONE_TO_MANY + assert retrieved_rel.GetType() == gdal.GRT_COMPOSITE + assert retrieved_rel.GetLeftTableName() == "origin_table_1_to_many" + assert retrieved_rel.GetRightTableName() == "dest_table_1_to_many" + assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] + assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] + assert retrieved_rel.GetForwardPathLabel() == "fwd label" + assert retrieved_rel.GetBackwardPathLabel() == "backward label" + assert retrieved_rel.GetRelatedTableType() == "features" + + items_lyr = ds.GetLayerByName("GDB_Items") + f = items_lyr.GetFeature(8) + assert f["Name"] == "my_one_to_many_relationship" + assert ( + f["Definition"] + == """<DERelationshipClassInfo xsi:type="typens:DERelationshipClassInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:typens="http://www.esri.com/schemas/ArcGIS/10.1"> <CatalogPath>\\my_one_to_many_relationship</CatalogPath> <Name>my_one_to_many_relationship</Name> <ChildrenExpanded>false</ChildrenExpanded> @@ -2906,74 +2813,74 @@ def test_ogr_openfilegdb_write_relationships(): <ChangeTracked>false</ChangeTracked> <ReplicaTracked>false</ReplicaTracked> </DERelationshipClassInfo>\n""" - ) - assert f["DatasetSubtype1"] == 2 - assert f["DatasetSubtype2"] == 0 - - # many to many relationship - lyr = ds.CreateLayer("origin_table_many_to_many", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("o_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - - lyr = ds.CreateLayer("dest_table_many_to_many", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - - lyr = ds.CreateLayer("mapping_table_many_to_many", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - - lyr = ds.CreateLayer("many_to_many", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("RID", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - fld_defn = ogr.FieldDefn("origin_fk", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - fld_defn = ogr.FieldDefn("destination_fk", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - - ds = gdal.OpenEx(dirname, gdal.GA_Update) - - relationship = gdal.Relationship( - "many_to_many", - "origin_table_many_to_many", - "dest_table_many_to_many", - gdal.GRC_MANY_TO_MANY, - ) - relationship.SetLeftTableFields(["o_pkey"]) - relationship.SetRightTableFields(["dest_pkey"]) - relationship.SetMappingTableName("mapping_table_many_to_many") - relationship.SetLeftMappingTableFields(["origin_fk"]) - relationship.SetRightMappingTableFields(["destination_fk"]) - - # this should be rejected -- the mapping table name MUST match the relationship name - assert not ds.AddRelationship(relationship) - - relationship.SetMappingTableName("many_to_many") - assert ds.AddRelationship(relationship) - - ds = gdal.OpenEx(dirname, gdal.GA_Update) - assert set(ds.GetRelationshipNames()) == { - "my_relationship", - "my_one_to_many_relationship", - "many_to_many", - } - retrieved_rel = ds.GetRelationship("many_to_many") - assert retrieved_rel.GetCardinality() == gdal.GRC_MANY_TO_MANY - assert retrieved_rel.GetType() == gdal.GRT_ASSOCIATION - assert retrieved_rel.GetLeftTableName() == "origin_table_many_to_many" - assert retrieved_rel.GetRightTableName() == "dest_table_many_to_many" - assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] - assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] - assert retrieved_rel.GetMappingTableName() == "many_to_many" - assert retrieved_rel.GetLeftMappingTableFields() == ["origin_fk"] - assert retrieved_rel.GetRightMappingTableFields() == ["destination_fk"] - - items_lyr = ds.GetLayerByName("GDB_Items") - f = items_lyr.GetFeature(13) - assert f["Name"] == "many_to_many" - assert ( - f["Definition"] - == """<DERelationshipClassInfo xsi:type="typens:DERelationshipClassInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:typens="http://www.esri.com/schemas/ArcGIS/10.1"> + ) + assert f["DatasetSubtype1"] == 2 + assert f["DatasetSubtype2"] == 0 + + # many to many relationship + lyr = ds.CreateLayer("origin_table_many_to_many", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("o_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + lyr = ds.CreateLayer("dest_table_many_to_many", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + lyr = ds.CreateLayer("mapping_table_many_to_many", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + lyr = ds.CreateLayer("many_to_many", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("RID", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("origin_fk", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + fld_defn = ogr.FieldDefn("destination_fk", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + + relationship = gdal.Relationship( + "many_to_many", + "origin_table_many_to_many", + "dest_table_many_to_many", + gdal.GRC_MANY_TO_MANY, + ) + relationship.SetLeftTableFields(["o_pkey"]) + relationship.SetRightTableFields(["dest_pkey"]) + relationship.SetMappingTableName("mapping_table_many_to_many") + relationship.SetLeftMappingTableFields(["origin_fk"]) + relationship.SetRightMappingTableFields(["destination_fk"]) + + # this should be rejected -- the mapping table name MUST match the relationship name + assert not ds.AddRelationship(relationship) + + relationship.SetMappingTableName("many_to_many") + assert ds.AddRelationship(relationship) + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + assert set(ds.GetRelationshipNames()) == { + "my_relationship", + "my_one_to_many_relationship", + "many_to_many", + } + retrieved_rel = ds.GetRelationship("many_to_many") + assert retrieved_rel.GetCardinality() == gdal.GRC_MANY_TO_MANY + assert retrieved_rel.GetType() == gdal.GRT_ASSOCIATION + assert retrieved_rel.GetLeftTableName() == "origin_table_many_to_many" + assert retrieved_rel.GetRightTableName() == "dest_table_many_to_many" + assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] + assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] + assert retrieved_rel.GetMappingTableName() == "many_to_many" + assert retrieved_rel.GetLeftMappingTableFields() == ["origin_fk"] + assert retrieved_rel.GetRightMappingTableFields() == ["destination_fk"] + + items_lyr = ds.GetLayerByName("GDB_Items") + f = items_lyr.GetFeature(13) + assert f["Name"] == "many_to_many" + assert ( + f["Definition"] + == """<DERelationshipClassInfo xsi:type="typens:DERelationshipClassInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:typens="http://www.esri.com/schemas/ArcGIS/10.1"> <CatalogPath>\\many_to_many</CatalogPath> <Name>many_to_many</Name> <ChildrenExpanded>false</ChildrenExpanded> @@ -3058,421 +2965,414 @@ def test_ogr_openfilegdb_write_relationships(): <ChangeTracked>false</ChangeTracked> <ReplicaTracked>false</ReplicaTracked> </DERelationshipClassInfo>\n""" - ) - assert f["DatasetSubtype1"] == 3 - assert f["DatasetSubtype2"] == 0 + ) + assert f["DatasetSubtype1"] == 3 + assert f["DatasetSubtype2"] == 0 - # many to many relationship, auto create mapping table - lyr = ds.CreateLayer("origin_table_many_to_many2", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("o_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + # many to many relationship, auto create mapping table + lyr = ds.CreateLayer("origin_table_many_to_many2", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("o_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - lyr = ds.CreateLayer("dest_table_many_to_many2", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + lyr = ds.CreateLayer("dest_table_many_to_many2", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("dest_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE - ds = gdal.OpenEx(dirname, gdal.GA_Update) + ds = gdal.OpenEx(dirname, gdal.GA_Update) - relationship = gdal.Relationship( - "many_to_many_auto", - "origin_table_many_to_many2", - "dest_table_many_to_many2", - gdal.GRC_MANY_TO_MANY, - ) - relationship.SetLeftTableFields(["o_pkey"]) - relationship.SetRightTableFields(["dest_pkey"]) - - assert ds.AddRelationship(relationship) - - ds = gdal.OpenEx(dirname, gdal.GA_Update) - assert set(ds.GetRelationshipNames()) == { - "my_relationship", - "my_one_to_many_relationship", - "many_to_many", - "many_to_many_auto", - } - retrieved_rel = ds.GetRelationship("many_to_many_auto") - assert retrieved_rel.GetCardinality() == gdal.GRC_MANY_TO_MANY - assert retrieved_rel.GetType() == gdal.GRT_ASSOCIATION - assert retrieved_rel.GetLeftTableName() == "origin_table_many_to_many2" - assert retrieved_rel.GetRightTableName() == "dest_table_many_to_many2" - assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] - assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] - assert retrieved_rel.GetMappingTableName() == "many_to_many_auto" - assert retrieved_rel.GetLeftMappingTableFields() == ["origin_fk"] - assert retrieved_rel.GetRightMappingTableFields() == ["destination_fk"] - # make sure mapping table was created - mapping_table = ds.GetLayerByName("many_to_many_auto") - assert mapping_table is not None - lyr_defn = mapping_table.GetLayerDefn() - assert mapping_table.GetFIDColumn() == "RID" - assert lyr_defn.GetFieldIndex("origin_fk") >= 0 - assert lyr_defn.GetFieldIndex("destination_fk") >= 0 - - items_lyr = ds.GetLayerByName("GDB_Items") - f = items_lyr.GetFeature(16) - relationship_uuid = f["UUID"] - assert f["Name"] == "many_to_many_auto" - assert f["Type"] == "{B606A7E1-FA5B-439C-849C-6E9C2481537B}" - - # delete relationship - assert not ds.DeleteRelationship("i dont exist") - assert set(ds.GetRelationshipNames()) == { - "my_relationship", - "my_one_to_many_relationship", - "many_to_many", - "many_to_many_auto", - } - - assert ds.DeleteRelationship("many_to_many_auto") - assert set(ds.GetRelationshipNames()) == { - "my_relationship", - "my_one_to_many_relationship", - "many_to_many", - } - ds = gdal.OpenEx(dirname, gdal.GA_Update) - assert set(ds.GetRelationshipNames()) == { - "my_relationship", - "my_one_to_many_relationship", - "many_to_many", - } + relationship = gdal.Relationship( + "many_to_many_auto", + "origin_table_many_to_many2", + "dest_table_many_to_many2", + gdal.GRC_MANY_TO_MANY, + ) + relationship.SetLeftTableFields(["o_pkey"]) + relationship.SetRightTableFields(["dest_pkey"]) - # make sure we are correctly cleaned up - items_lyr = ds.GetLayerByName("GDB_Items") - for f in items_lyr: - assert f["UUID"] != relationship_uuid - - # check item relationships have been created - item_relationships_lyr = ds.GetLayerByName("GDB_ItemRelationships") - for f in item_relationships_lyr: - assert f["OriginID"] != relationship_uuid - assert f["DestID"] != relationship_uuid - - # update relationship - relationship = gdal.Relationship( - "i dont exist", - "origin_table_1_to_many", - "dest_table_1_to_many", - gdal.GRC_ONE_TO_MANY, - ) - assert not ds.UpdateRelationship(relationship) + assert ds.AddRelationship(relationship) - relationship = gdal.Relationship( - "my_one_to_many_relationship", - "origin_table_1_to_many", - "dest_table_1_to_many", - gdal.GRC_ONE_TO_MANY, - ) - relationship.SetLeftTableFields(["o_pkey"]) - relationship.SetRightTableFields(["dest_pkey"]) - relationship.SetType(gdal.GRT_COMPOSITE) - relationship.SetForwardPathLabel("my new fwd label") - relationship.SetBackwardPathLabel("my new backward label") - assert ds.UpdateRelationship(relationship) - - ds = gdal.OpenEx(dirname, gdal.GA_Update) - assert set(ds.GetRelationshipNames()) == { - "my_relationship", - "my_one_to_many_relationship", - "many_to_many", - } - retrieved_rel = ds.GetRelationship("my_one_to_many_relationship") - assert retrieved_rel.GetCardinality() == gdal.GRC_ONE_TO_MANY - assert retrieved_rel.GetType() == gdal.GRT_COMPOSITE - assert retrieved_rel.GetLeftTableName() == "origin_table_1_to_many" - assert retrieved_rel.GetRightTableName() == "dest_table_1_to_many" - assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] - assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] - assert retrieved_rel.GetForwardPathLabel() == "my new fwd label" - assert retrieved_rel.GetBackwardPathLabel() == "my new backward label" - assert retrieved_rel.GetRelatedTableType() == "features" - - # change relationship tables - lyr = ds.CreateLayer("new_origin_table", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("new_o_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + ds = gdal.OpenEx(dirname, gdal.GA_Update) + assert set(ds.GetRelationshipNames()) == { + "my_relationship", + "my_one_to_many_relationship", + "many_to_many", + "many_to_many_auto", + } + retrieved_rel = ds.GetRelationship("many_to_many_auto") + assert retrieved_rel.GetCardinality() == gdal.GRC_MANY_TO_MANY + assert retrieved_rel.GetType() == gdal.GRT_ASSOCIATION + assert retrieved_rel.GetLeftTableName() == "origin_table_many_to_many2" + assert retrieved_rel.GetRightTableName() == "dest_table_many_to_many2" + assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] + assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] + assert retrieved_rel.GetMappingTableName() == "many_to_many_auto" + assert retrieved_rel.GetLeftMappingTableFields() == ["origin_fk"] + assert retrieved_rel.GetRightMappingTableFields() == ["destination_fk"] + # make sure mapping table was created + mapping_table = ds.GetLayerByName("many_to_many_auto") + assert mapping_table is not None + lyr_defn = mapping_table.GetLayerDefn() + assert mapping_table.GetFIDColumn() == "RID" + assert lyr_defn.GetFieldIndex("origin_fk") >= 0 + assert lyr_defn.GetFieldIndex("destination_fk") >= 0 + + items_lyr = ds.GetLayerByName("GDB_Items") + f = items_lyr.GetFeature(16) + relationship_uuid = f["UUID"] + assert f["Name"] == "many_to_many_auto" + assert f["Type"] == "{B606A7E1-FA5B-439C-849C-6E9C2481537B}" + + # delete relationship + assert not ds.DeleteRelationship("i dont exist") + assert set(ds.GetRelationshipNames()) == { + "my_relationship", + "my_one_to_many_relationship", + "many_to_many", + "many_to_many_auto", + } - lyr = ds.CreateLayer("new_dest_table", geom_type=ogr.wkbNone) - fld_defn = ogr.FieldDefn("new_dest_pkey", ogr.OFTInteger) - assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + assert ds.DeleteRelationship("many_to_many_auto") + assert set(ds.GetRelationshipNames()) == { + "my_relationship", + "my_one_to_many_relationship", + "many_to_many", + } + ds = gdal.OpenEx(dirname, gdal.GA_Update) + assert set(ds.GetRelationshipNames()) == { + "my_relationship", + "my_one_to_many_relationship", + "many_to_many", + } - ds = gdal.OpenEx(dirname, gdal.GA_Update) - relationship = gdal.Relationship( - "my_one_to_many_relationship", - "new_origin_table", - "new_dest_table", - gdal.GRC_ONE_TO_MANY, - ) - relationship.SetLeftTableFields(["new_o_pkey"]) - relationship.SetRightTableFields(["new_dest_pkey"]) - assert ds.UpdateRelationship(relationship) - - ds = gdal.OpenEx(dirname, gdal.GA_Update) - assert set(ds.GetRelationshipNames()) == { - "my_relationship", - "my_one_to_many_relationship", - "many_to_many", - } - retrieved_rel = ds.GetRelationship("my_one_to_many_relationship") - assert retrieved_rel.GetCardinality() == gdal.GRC_ONE_TO_MANY - assert retrieved_rel.GetType() == gdal.GRT_ASSOCIATION - assert retrieved_rel.GetLeftTableName() == "new_origin_table" - assert retrieved_rel.GetRightTableName() == "new_dest_table" - assert retrieved_rel.GetLeftTableFields() == ["new_o_pkey"] - assert retrieved_rel.GetRightTableFields() == ["new_dest_pkey"] - - # make sure GDB_ItemRelationships table has been updated - items_lyr = ds.GetLayerByName("GDB_Items") - f = items_lyr.GetFeature(8) - relationship_uuid = f["UUID"] - assert f["Name"] == "my_one_to_many_relationship" - assert f["Type"] == "{B606A7E1-FA5B-439C-849C-6E9C2481537B}" - - f = items_lyr.GetFeature(18) - assert f["Name"] == "new_origin_table" - origin_table_uuid = f["UUID"] - - f = items_lyr.GetFeature(19) - assert f["Name"] == "new_dest_table" - dest_table_uuid = f["UUID"] - - item_relationships_lyr = ds.GetLayerByName("GDB_ItemRelationships") + # make sure we are correctly cleaned up + items_lyr = ds.GetLayerByName("GDB_Items") + for f in items_lyr: + assert f["UUID"] != relationship_uuid + + # check item relationships have been created + item_relationships_lyr = ds.GetLayerByName("GDB_ItemRelationships") + for f in item_relationships_lyr: + assert f["OriginID"] != relationship_uuid + assert f["DestID"] != relationship_uuid + + # update relationship + relationship = gdal.Relationship( + "i dont exist", + "origin_table_1_to_many", + "dest_table_1_to_many", + gdal.GRC_ONE_TO_MANY, + ) + assert not ds.UpdateRelationship(relationship) - assert ( - len( - [ - f - for f in item_relationships_lyr - if f["OriginID"] == origin_table_uuid - and f["DestID"] == relationship_uuid - and f["Type"] == "{725BADAB-3452-491B-A795-55F32D67229C}" - ] - ) - == 1 + relationship = gdal.Relationship( + "my_one_to_many_relationship", + "origin_table_1_to_many", + "dest_table_1_to_many", + gdal.GRC_ONE_TO_MANY, + ) + relationship.SetLeftTableFields(["o_pkey"]) + relationship.SetRightTableFields(["dest_pkey"]) + relationship.SetType(gdal.GRT_COMPOSITE) + relationship.SetForwardPathLabel("my new fwd label") + relationship.SetBackwardPathLabel("my new backward label") + assert ds.UpdateRelationship(relationship) + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + assert set(ds.GetRelationshipNames()) == { + "my_relationship", + "my_one_to_many_relationship", + "many_to_many", + } + retrieved_rel = ds.GetRelationship("my_one_to_many_relationship") + assert retrieved_rel.GetCardinality() == gdal.GRC_ONE_TO_MANY + assert retrieved_rel.GetType() == gdal.GRT_COMPOSITE + assert retrieved_rel.GetLeftTableName() == "origin_table_1_to_many" + assert retrieved_rel.GetRightTableName() == "dest_table_1_to_many" + assert retrieved_rel.GetLeftTableFields() == ["o_pkey"] + assert retrieved_rel.GetRightTableFields() == ["dest_pkey"] + assert retrieved_rel.GetForwardPathLabel() == "my new fwd label" + assert retrieved_rel.GetBackwardPathLabel() == "my new backward label" + assert retrieved_rel.GetRelatedTableType() == "features" + + # change relationship tables + lyr = ds.CreateLayer("new_origin_table", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("new_o_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + lyr = ds.CreateLayer("new_dest_table", geom_type=ogr.wkbNone) + fld_defn = ogr.FieldDefn("new_dest_pkey", ogr.OFTInteger) + assert lyr.CreateField(fld_defn) == ogr.OGRERR_NONE + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + relationship = gdal.Relationship( + "my_one_to_many_relationship", + "new_origin_table", + "new_dest_table", + gdal.GRC_ONE_TO_MANY, + ) + relationship.SetLeftTableFields(["new_o_pkey"]) + relationship.SetRightTableFields(["new_dest_pkey"]) + assert ds.UpdateRelationship(relationship) + + ds = gdal.OpenEx(dirname, gdal.GA_Update) + assert set(ds.GetRelationshipNames()) == { + "my_relationship", + "my_one_to_many_relationship", + "many_to_many", + } + retrieved_rel = ds.GetRelationship("my_one_to_many_relationship") + assert retrieved_rel.GetCardinality() == gdal.GRC_ONE_TO_MANY + assert retrieved_rel.GetType() == gdal.GRT_ASSOCIATION + assert retrieved_rel.GetLeftTableName() == "new_origin_table" + assert retrieved_rel.GetRightTableName() == "new_dest_table" + assert retrieved_rel.GetLeftTableFields() == ["new_o_pkey"] + assert retrieved_rel.GetRightTableFields() == ["new_dest_pkey"] + + # make sure GDB_ItemRelationships table has been updated + items_lyr = ds.GetLayerByName("GDB_Items") + f = items_lyr.GetFeature(8) + relationship_uuid = f["UUID"] + assert f["Name"] == "my_one_to_many_relationship" + assert f["Type"] == "{B606A7E1-FA5B-439C-849C-6E9C2481537B}" + + f = items_lyr.GetFeature(18) + assert f["Name"] == "new_origin_table" + origin_table_uuid = f["UUID"] + + f = items_lyr.GetFeature(19) + assert f["Name"] == "new_dest_table" + dest_table_uuid = f["UUID"] + + item_relationships_lyr = ds.GetLayerByName("GDB_ItemRelationships") + + assert ( + len( + [ + f + for f in item_relationships_lyr + if f["OriginID"] == origin_table_uuid + and f["DestID"] == relationship_uuid + and f["Type"] == "{725BADAB-3452-491B-A795-55F32D67229C}" + ] ) - assert ( - len( - [ - f - for f in item_relationships_lyr - if f["OriginID"] == dest_table_uuid - and f["DestID"] == relationship_uuid - and f["Type"] == "{725BADAB-3452-491B-A795-55F32D67229C}" - ] - ) - == 1 + == 1 + ) + assert ( + len( + [ + f + for f in item_relationships_lyr + if f["OriginID"] == dest_table_uuid + and f["DestID"] == relationship_uuid + and f["Type"] == "{725BADAB-3452-491B-A795-55F32D67229C}" + ] ) - assert ( - len( - [ - f - for f in item_relationships_lyr - if f["OriginID"] == root_dataset_uuid - and f["DestID"] == relationship_uuid - and f["Type"] == "{DC78F1AB-34E4-43AC-BA47-1C4EABD0E7C7}" - ] - ) - == 1 + == 1 + ) + assert ( + len( + [ + f + for f in item_relationships_lyr + if f["OriginID"] == root_dataset_uuid + and f["DestID"] == relationship_uuid + and f["Type"] == "{DC78F1AB-34E4-43AC-BA47-1C4EABD0E7C7}" + ] ) - - finally: - gdal.RmdirRecursive(dirname) + == 1 + ) ############################################################################### # Test emulated transactions -def test_ogr_openfilegdb_write_emulated_transactions(): +def test_ogr_openfilegdb_write_emulated_transactions(tmp_path): - dirname = "tmp/test_ogr_openfilegdb_write_emulated_transactions.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + dirname = tmp_path / "test_ogr_openfilegdb_write_emulated_transactions.gdb" - gdal.Mkdir(dirname + "/.ogrtransaction_backup", 0o755) - with gdal.quiet_errors(): - assert ds.StartTransaction(True) == ogr.OGRERR_FAILURE - gdal.Rmdir(dirname + "/.ogrtransaction_backup") + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert ds.TestCapability(ogr.ODsCEmulatedTransactions) - assert ds.StartTransaction(True) == ogr.OGRERR_NONE + gdal.Mkdir(dirname / ".ogrtransaction_backup", 0o755) + with gdal.quiet_errors(): + assert ds.StartTransaction(True) == ogr.OGRERR_FAILURE + gdal.Rmdir(dirname / ".ogrtransaction_backup") - assert gdal.VSIStatL(dirname + "/.ogrtransaction_backup") is not None + assert ds.TestCapability(ogr.ODsCEmulatedTransactions) + assert ds.StartTransaction(True) == ogr.OGRERR_NONE - assert ds.CommitTransaction() == ogr.OGRERR_NONE + assert gdal.VSIStatL(dirname / ".ogrtransaction_backup") is not None - assert gdal.VSIStatL(dirname + "/.ogrtransaction_backup") is None + assert ds.CommitTransaction() == ogr.OGRERR_NONE - assert ds.StartTransaction(True) == ogr.OGRERR_NONE - assert ds.RollbackTransaction() == ogr.OGRERR_NONE + assert gdal.VSIStatL(dirname / ".ogrtransaction_backup") is None - assert gdal.VSIStatL(dirname + "/.ogrtransaction_backup") is None + assert ds.StartTransaction(True) == ogr.OGRERR_NONE + assert ds.RollbackTransaction() == ogr.OGRERR_NONE - assert ds.StartTransaction(True) == ogr.OGRERR_NONE - with gdal.quiet_errors(): - assert ds.StartTransaction(True) != ogr.OGRERR_NONE - assert ds.RollbackTransaction() == ogr.OGRERR_NONE + assert gdal.VSIStatL(dirname / ".ogrtransaction_backup") is None - assert gdal.VSIStatL(dirname + "/.ogrtransaction_backup") is None + assert ds.StartTransaction(True) == ogr.OGRERR_NONE + with gdal.quiet_errors(): + assert ds.StartTransaction(True) != ogr.OGRERR_NONE + assert ds.RollbackTransaction() == ogr.OGRERR_NONE - with gdal.quiet_errors(): - assert ds.CommitTransaction() != ogr.OGRERR_NONE + assert gdal.VSIStatL(dirname / ".ogrtransaction_backup") is None - assert gdal.VSIStatL(dirname + "/.ogrtransaction_backup") is None + with gdal.quiet_errors(): + assert ds.CommitTransaction() != ogr.OGRERR_NONE - with gdal.quiet_errors(): - assert ds.RollbackTransaction() != ogr.OGRERR_NONE + assert gdal.VSIStatL(dirname / ".ogrtransaction_backup") is None - assert gdal.VSIStatL(dirname + "/.ogrtransaction_backup") is None + with gdal.quiet_errors(): + assert ds.RollbackTransaction() != ogr.OGRERR_NONE - assert ds.StartTransaction(True) == ogr.OGRERR_NONE - lyr = ds.CreateLayer("foo", geom_type=ogr.wkbNone) - assert gdal.VSIStatL(dirname + "/.ogrtransaction_backup") is not None - assert lyr is not None - assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE - assert lyr.GetFeatureCount() == 1 - assert ds.RollbackTransaction() == ogr.OGRERR_NONE + assert gdal.VSIStatL(dirname / ".ogrtransaction_backup") is None - assert gdal.VSIStatL(dirname + "/.ogrtransaction_backup") is None + assert ds.StartTransaction(True) == ogr.OGRERR_NONE + lyr = ds.CreateLayer("foo", geom_type=ogr.wkbNone) + assert gdal.VSIStatL(dirname / ".ogrtransaction_backup") is not None + assert lyr is not None + assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE + assert lyr.GetFeatureCount() == 1 + assert ds.RollbackTransaction() == ogr.OGRERR_NONE - # It is in a ghost state after rollback - assert lyr.GetFeatureCount() == 0 + assert gdal.VSIStatL(dirname / ".ogrtransaction_backup") is None - assert ds.StartTransaction(True) == ogr.OGRERR_NONE + # It is in a ghost state after rollback + assert lyr.GetFeatureCount() == 0 - # Implicit rollback - ds = None + assert ds.StartTransaction(True) == ogr.OGRERR_NONE - ds = ogr.Open(dirname, update=1) - assert ds.StartTransaction(True) == ogr.OGRERR_NONE - gdal.Rmdir(dirname + "/.ogrtransaction_backup") - with gdal.quiet_errors(): - assert ds.RollbackTransaction() == ogr.OGRERR_FAILURE - ds = None + # Implicit rollback + ds = None - ds = ogr.Open(dirname, update=1) - assert ds.TestCapability(ogr.ODsCEmulatedTransactions) - assert ds.GetLayerCount() == 0 - assert gdal.VSIStatL(dirname + "/a00000009.gdbtable") is None + ds = ogr.Open(dirname, update=1) + assert ds.StartTransaction(True) == ogr.OGRERR_NONE + gdal.Rmdir(dirname / ".ogrtransaction_backup") + with gdal.quiet_errors(): + assert ds.RollbackTransaction() == ogr.OGRERR_FAILURE + ds = None - assert ds.StartTransaction(True) == ogr.OGRERR_NONE + ds = ogr.Open(dirname, update=1) + assert ds.TestCapability(ogr.ODsCEmulatedTransactions) + assert ds.GetLayerCount() == 0 + assert gdal.VSIStatL(dirname / "a00000009.gdbtable") is None - assert ds.CreateLayer("foo", geom_type=ogr.wkbNone) is not None - assert gdal.VSIStatL(dirname + "/a00000009.gdbtable") is not None + assert ds.StartTransaction(True) == ogr.OGRERR_NONE - assert ds.DeleteLayer(0) == ogr.OGRERR_NONE - assert gdal.VSIStatL(dirname + "/a00000009.gdbtable") is None + assert ds.CreateLayer("foo", geom_type=ogr.wkbNone) is not None + assert gdal.VSIStatL(dirname / "a00000009.gdbtable") is not None - assert ds.CreateLayer("foo2", geom_type=ogr.wkbNone) is not None - assert gdal.VSIStatL(dirname + "/a0000000a.gdbtable") is not None + assert ds.DeleteLayer(0) == ogr.OGRERR_NONE + assert gdal.VSIStatL(dirname / "a00000009.gdbtable") is None - assert ds.CommitTransaction() == ogr.OGRERR_NONE + assert ds.CreateLayer("foo2", geom_type=ogr.wkbNone) is not None + assert gdal.VSIStatL(dirname / "a0000000a.gdbtable") is not None - assert gdal.VSIStatL(dirname + "/a0000000a.gdbtable") is not None + assert ds.CommitTransaction() == ogr.OGRERR_NONE - assert ds.StartTransaction(True) == ogr.OGRERR_NONE - assert ds.DeleteLayer(0) == ogr.OGRERR_NONE - assert gdal.VSIStatL(dirname + "/a0000000a.gdbtable") is None - assert ds.RollbackTransaction() == ogr.OGRERR_NONE - assert gdal.VSIStatL(dirname + "/a0000000a.gdbtable") is not None - ds = None + assert gdal.VSIStatL(dirname / "a0000000a.gdbtable") is not None - gdal.Mkdir(dirname + "/.ogrtransaction_backup", 0o755) - with gdal.quiet_errors(): - # Cannot open in update mode with an existing backup directory - assert ogr.Open(dirname, update=1) is None + assert ds.StartTransaction(True) == ogr.OGRERR_NONE + assert ds.DeleteLayer(0) == ogr.OGRERR_NONE + assert gdal.VSIStatL(dirname / "a0000000a.gdbtable") is None + assert ds.RollbackTransaction() == ogr.OGRERR_NONE + assert gdal.VSIStatL(dirname / "a0000000a.gdbtable") is not None + ds = None - # Emit warning in read-only mode when opening with an existing backup directory - gdal.ErrorReset() - assert ogr.Open(dirname) is not None - assert "A previous backup directory" in gdal.GetLastErrorMsg() - gdal.Rmdir(dirname + "/.ogrtransaction_backup") + gdal.Mkdir(dirname / ".ogrtransaction_backup", 0o755) + with gdal.quiet_errors(): + # Cannot open in update mode with an existing backup directory + assert ogr.Open(dirname, update=1) is None - # Transaction not supported in read-only mode - ds = ogr.Open(dirname) - assert ds.TestCapability(ogr.ODsCEmulatedTransactions) == 0 - with gdal.quiet_errors(): - assert ds.StartTransaction(True) == ogr.OGRERR_FAILURE - ds = None + # Emit warning in read-only mode when opening with an existing backup directory + gdal.ErrorReset() + assert ogr.Open(dirname) is not None + assert "A previous backup directory" in gdal.GetLastErrorMsg() + gdal.Rmdir(dirname / ".ogrtransaction_backup") - ds = ogr.Open(dirname, update=1) - assert ds.GetLayerCount() == 1 - lyr = ds.GetLayerByName("foo2") + # Transaction not supported in read-only mode + ds = ogr.Open(dirname) + assert ds.TestCapability(ogr.ODsCEmulatedTransactions) == 0 + with gdal.quiet_errors(): + assert ds.StartTransaction(True) == ogr.OGRERR_FAILURE + ds = None - assert ds.StartTransaction(True) == ogr.OGRERR_NONE - assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE - assert lyr.GetFeatureCount() == 1 - assert ds.CommitTransaction() == ogr.OGRERR_NONE - assert lyr.GetFeatureCount() == 1 + ds = ogr.Open(dirname, update=1) + assert ds.GetLayerCount() == 1 + lyr = ds.GetLayerByName("foo2") - ds = None + assert ds.StartTransaction(True) == ogr.OGRERR_NONE + assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE + assert lyr.GetFeatureCount() == 1 + assert ds.CommitTransaction() == ogr.OGRERR_NONE + assert lyr.GetFeatureCount() == 1 - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayerByName("foo2") - assert lyr.GetFeatureCount() == 1 + ds = None - assert ds.StartTransaction(True) == ogr.OGRERR_NONE - assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE - assert lyr.GetFeatureCount() == 2 - assert ds.RollbackTransaction() == ogr.OGRERR_NONE - assert lyr.GetFeatureCount() == 1 + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayerByName("foo2") + assert lyr.GetFeatureCount() == 1 - # Test that StartTransaction() / RollbackTransaction() doesn't destroy - # unmodified layers! (https://github.com/OSGeo/gdal/issues/5952) - assert ds.StartTransaction(True) == ogr.OGRERR_NONE - assert ds.RollbackTransaction() == ogr.OGRERR_NONE + assert ds.StartTransaction(True) == ogr.OGRERR_NONE + assert lyr.CreateFeature(ogr.Feature(lyr.GetLayerDefn())) == ogr.OGRERR_NONE + assert lyr.GetFeatureCount() == 2 + assert ds.RollbackTransaction() == ogr.OGRERR_NONE + assert lyr.GetFeatureCount() == 1 - ds = None + # Test that StartTransaction() / RollbackTransaction() doesn't destroy + # unmodified layers! (https://github.com/OSGeo/gdal/issues/5952) + assert ds.StartTransaction(True) == ogr.OGRERR_NONE + assert ds.RollbackTransaction() == ogr.OGRERR_NONE - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayerByName("foo2") - assert lyr.GetFeatureCount() == 1 - ds = None + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayerByName("foo2") + assert lyr.GetFeatureCount() == 1 + ds = None ############################################################################### -def test_ogr_openfilegdb_write_emulated_transactions_delete_field_before_geom(): +def test_ogr_openfilegdb_write_emulated_transactions_delete_field_before_geom( + tmp_vsimem, +): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + dirname = tmp_vsimem / "out.gdb" - with gdaltest.config_option("OPENFILEGDB_CREATE_FIELD_BEFORE_GEOMETRY", "YES"): - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - assert lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) == ogr.OGRERR_NONE + with gdaltest.config_option("OPENFILEGDB_CREATE_FIELD_BEFORE_GEOMETRY", "YES"): + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetField("field_before_geom", "to be deleted") - f.SetField("str", "foo") - f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(1 2)")) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - f = None + assert lyr.CreateField(ogr.FieldDefn("str", ogr.OFTString)) == ogr.OGRERR_NONE - assert ds.StartTransaction(True) == ogr.OGRERR_NONE + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetField("field_before_geom", "to be deleted") + f.SetField("str", "foo") + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(1 2)")) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + f = None - assert ( - lyr.DeleteField(lyr.GetLayerDefn().GetFieldIndex("field_before_geom")) - == ogr.OGRERR_NONE - ) + assert ds.StartTransaction(True) == ogr.OGRERR_NONE - assert ds.RollbackTransaction() == ogr.OGRERR_NONE + assert ( + lyr.DeleteField(lyr.GetLayerDefn().GetFieldIndex("field_before_geom")) + == ogr.OGRERR_NONE + ) - lyr.ResetReading() - f = lyr.GetNextFeature() - assert f.GetField("field_before_geom") == "to be deleted" - assert f.GetField("str") == "foo" - assert f.GetGeometryRef() is not None + assert ds.RollbackTransaction() == ogr.OGRERR_NONE - ds = None + lyr.ResetReading() + f = lyr.GetNextFeature() + assert f.GetField("field_before_geom") == "to be deleted" + assert f.GetField("str") == "foo" + assert f.GetGeometryRef() is not None - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### @@ -3480,168 +3380,162 @@ def test_ogr_openfilegdb_write_emulated_transactions_delete_field_before_geom(): @pytest.mark.parametrize("options", [[], ["FEATURE_DATASET=fd1"]]) -def test_ogr_openfilegdb_write_rename_layer(options): - - dirname = "tmp/rename.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("other_layer", geom_type=ogr.wkbNone) - lyr.SyncToDisk() +def test_ogr_openfilegdb_write_rename_layer(tmp_path, options): - lyr = ds.CreateLayer("foo", geom_type=ogr.wkbPoint, options=options) - assert lyr.TestCapability(ogr.OLCRename) == 1 + dirname = tmp_path / "rename.gdb" - assert lyr.Rename("bar") == ogr.OGRERR_NONE - assert lyr.GetDescription() == "bar" - assert lyr.GetLayerDefn().GetName() == "bar" + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("other_layer", geom_type=ogr.wkbNone) + lyr.SyncToDisk() - # Too long layer name - with gdal.quiet_errors(): - assert lyr.Rename("x" * 200) != ogr.OGRERR_NONE + lyr = ds.CreateLayer("foo", geom_type=ogr.wkbPoint, options=options) + assert lyr.TestCapability(ogr.OLCRename) == 1 - with gdal.quiet_errors(): - assert lyr.Rename("bar") != ogr.OGRERR_NONE + assert lyr.Rename("bar") == ogr.OGRERR_NONE + assert lyr.GetDescription() == "bar" + assert lyr.GetLayerDefn().GetName() == "bar" - with gdal.quiet_errors(): - assert lyr.Rename("other_layer") != ogr.OGRERR_NONE + # Too long layer name + with gdal.quiet_errors(): + assert lyr.Rename("x" * 200) != ogr.OGRERR_NONE - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometryDirectly(ogr.CreateGeometryFromWkt("POINT (1 2)")) - lyr.CreateFeature(f) + with gdal.quiet_errors(): + assert lyr.Rename("bar") != ogr.OGRERR_NONE - ds = ogr.Open(dirname, update=1) + with gdal.quiet_errors(): + assert lyr.Rename("other_layer") != ogr.OGRERR_NONE - # Check system tables - system_catolog_lyr = ds.GetLayerByName("GDB_SystemCatalog") - f = system_catolog_lyr.GetFeature(10) - assert f["Name"] == "bar" + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometryDirectly(ogr.CreateGeometryFromWkt("POINT (1 2)")) + lyr.CreateFeature(f) - items_lyr = ds.GetLayerByName("GDB_Items") - if options == []: - f = items_lyr.GetFeature(4) - assert f["Path"] == "\\bar" - assert "<CatalogPath>\\bar</CatalogPath>" in f["Definition"] - else: - f = items_lyr.GetFeature(5) - assert f["Path"] == "\\fd1\\bar" - assert "<CatalogPath>\\fd1\\bar</CatalogPath>" in f["Definition"] - assert f["Name"] == "bar" - assert f["PhysicalName"] == "BAR" - assert "<Name>bar</Name>" in f["Definition"] - - # Second renaming, after dataset reopening - lyr = ds.GetLayerByName("bar") - assert lyr.Rename("baz") == ogr.OGRERR_NONE - assert lyr.GetDescription() == "baz" - assert lyr.GetLayerDefn().GetName() == "baz" + ds = ogr.Open(dirname, update=1) - lyr.ResetReading() - f = lyr.GetNextFeature() - assert f.GetGeometryRef() is not None + # Check system tables + system_catolog_lyr = ds.GetLayerByName("GDB_SystemCatalog") + f = system_catolog_lyr.GetFeature(10) + assert f["Name"] == "bar" - ds = None + items_lyr = ds.GetLayerByName("GDB_Items") + if options == []: + f = items_lyr.GetFeature(4) + assert f["Path"] == "\\bar" + assert "<CatalogPath>\\bar</CatalogPath>" in f["Definition"] + else: + f = items_lyr.GetFeature(5) + assert f["Path"] == "\\fd1\\bar" + assert "<CatalogPath>\\fd1\\bar</CatalogPath>" in f["Definition"] + assert f["Name"] == "bar" + assert f["PhysicalName"] == "BAR" + assert "<Name>bar</Name>" in f["Definition"] + + # Second renaming, after dataset reopening + lyr = ds.GetLayerByName("bar") + assert lyr.Rename("baz") == ogr.OGRERR_NONE + assert lyr.GetDescription() == "baz" + assert lyr.GetLayerDefn().GetName() == "baz" + + lyr.ResetReading() + f = lyr.GetNextFeature() + assert f.GetGeometryRef() is not None - ds = ogr.Open(dirname) + ds = None - # Check system tables - system_catolog_lyr = ds.GetLayerByName("GDB_SystemCatalog") - f = system_catolog_lyr.GetFeature(10) - assert f["Name"] == "baz" + ds = ogr.Open(dirname) - items_lyr = ds.GetLayerByName("GDB_Items") - if options == []: - f = items_lyr.GetFeature(4) - assert f["Path"] == "\\baz" - assert "<CatalogPath>\\baz</CatalogPath>" in f["Definition"] - else: - f = items_lyr.GetFeature(5) - assert f["Path"] == "\\fd1\\baz" - assert "<CatalogPath>\\fd1\\baz</CatalogPath>" in f["Definition"] - assert f["Name"] == "baz" - assert f["PhysicalName"] == "BAZ" - assert "<Name>baz</Name>" in f["Definition"] - - lyr = ds.GetLayerByName("baz") - assert lyr is not None, [ - ds.GetLayer(i).GetName() for i in range(ds.GetLayerCount()) - ] + # Check system tables + system_catolog_lyr = ds.GetLayerByName("GDB_SystemCatalog") + f = system_catolog_lyr.GetFeature(10) + assert f["Name"] == "baz" - lyr.ResetReading() - f = lyr.GetNextFeature() - assert f.GetGeometryRef() is not None + items_lyr = ds.GetLayerByName("GDB_Items") + if options == []: + f = items_lyr.GetFeature(4) + assert f["Path"] == "\\baz" + assert "<CatalogPath>\\baz</CatalogPath>" in f["Definition"] + else: + f = items_lyr.GetFeature(5) + assert f["Path"] == "\\fd1\\baz" + assert "<CatalogPath>\\fd1\\baz</CatalogPath>" in f["Definition"] + assert f["Name"] == "baz" + assert f["PhysicalName"] == "BAZ" + assert "<Name>baz</Name>" in f["Definition"] + + lyr = ds.GetLayerByName("baz") + assert lyr is not None, [ + ds.GetLayer(i).GetName() for i in range(ds.GetLayerCount()) + ] - ds = None + lyr.ResetReading() + f = lyr.GetNextFeature() + assert f.GetGeometryRef() is not None - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### # Test field name laundering (#4458) -def test_ogr_openfilegdb_field_name_laundering(): +def test_ogr_openfilegdb_field_name_laundering(tmp_vsimem): - dirname = "/vsimem/out.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - with gdal.quiet_errors(): - lyr.CreateField(ogr.FieldDefn("FROM", ogr.OFTInteger)) # reserved keyword - lyr.CreateField( - ogr.FieldDefn("1NUMBER", ogr.OFTInteger) - ) # starting with a number - lyr.CreateField( - ogr.FieldDefn("WITH SPACE AND !$*!- special characters", ogr.OFTInteger) - ) # unallowed characters - lyr.CreateField(ogr.FieldDefn("é" * 64, ogr.OFTInteger)) # OK - lyr.CreateField( - ogr.FieldDefn( - "A123456789012345678901234567890123456789012345678901234567890123", - ogr.OFTInteger, - ) - ) # 64 characters : ok - lyr.CreateField( - ogr.FieldDefn( - "A1234567890123456789012345678901234567890123456789012345678901234", - ogr.OFTInteger, - ) - ) # 65 characters : nok - lyr.CreateField( - ogr.FieldDefn( - "A12345678901234567890123456789012345678901234567890123456789012345", - ogr.OFTInteger, - ) - ) # 66 characters : nok - - lyr_defn = lyr.GetLayerDefn() - expected_names = [ - "FROM_", - "_1NUMBER", - "WITH_SPACE_AND_______special_characters", - "é" * 64, - "A123456789012345678901234567890123456789012345678901234567890123", - "A1234567890123456789012345678901234567890123456789012345678901_1", - "A1234567890123456789012345678901234567890123456789012345678901_2", - ] - for i in range(5): - assert lyr_defn.GetFieldIndex(expected_names[i]) == i, ( - "did not find %s" % expected_names[i] + dirname = tmp_vsimem / "out.gdb" + + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + with gdal.quiet_errors(): + lyr.CreateField(ogr.FieldDefn("FROM", ogr.OFTInteger)) # reserved keyword + lyr.CreateField( + ogr.FieldDefn("1NUMBER", ogr.OFTInteger) + ) # starting with a number + lyr.CreateField( + ogr.FieldDefn("WITH SPACE AND !$*!- special characters", ogr.OFTInteger) + ) # unallowed characters + lyr.CreateField(ogr.FieldDefn("é" * 64, ogr.OFTInteger)) # OK + lyr.CreateField( + ogr.FieldDefn( + "A123456789012345678901234567890123456789012345678901234567890123", + ogr.OFTInteger, + ) + ) # 64 characters : ok + lyr.CreateField( + ogr.FieldDefn( + "A1234567890123456789012345678901234567890123456789012345678901234", + ogr.OFTInteger, ) + ) # 65 characters : nok + lyr.CreateField( + ogr.FieldDefn( + "A12345678901234567890123456789012345678901234567890123456789012345", + ogr.OFTInteger, + ) + ) # 66 characters : nok - ds = None + lyr_defn = lyr.GetLayerDefn() + expected_names = [ + "FROM_", + "_1NUMBER", + "WITH_SPACE_AND_______special_characters", + "é" * 64, + "A123456789012345678901234567890123456789012345678901234567890123", + "A1234567890123456789012345678901234567890123456789012345678901_1", + "A1234567890123456789012345678901234567890123456789012345678901_2", + ] + for i in range(5): + assert lyr_defn.GetFieldIndex(expected_names[i]) == i, ( + "did not find %s" % expected_names[i] + ) - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### # Test layer name laundering (#4466) -def test_ogr_openfilegdb_layer_name_laundering(): +def test_ogr_openfilegdb_layer_name_laundering(tmp_vsimem): - dirname = "/vsimem/out.gdb" + dirname = tmp_vsimem / "out.gdb" _160char = "A123456789" * 16 @@ -3655,423 +3549,397 @@ def test_ogr_openfilegdb_layer_name_laundering(): _160char + "B", # still too long ] - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - with gdal.quiet_errors(): - for in_name in in_names: - ds.CreateLayer(in_name, geom_type=ogr.wkbPoint) - - expected_names = [ - "FROM_", - "_1NUMBER", - "WITH_SPACE_AND_______special_characters", - "_sde_foo", - _160char, - _160char[0:158] + "_1", - _160char[0:158] + "_2", - ] - for i, exp_name in enumerate(expected_names): - assert ds.GetLayerByIndex(i).GetName() == exp_name, ( - "did not find %s" % exp_name - ) - - ds = None + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + with gdal.quiet_errors(): + for in_name in in_names: + ds.CreateLayer(in_name, geom_type=ogr.wkbPoint) + + expected_names = [ + "FROM_", + "_1NUMBER", + "WITH_SPACE_AND_______special_characters", + "_sde_foo", + _160char, + _160char[0:158] + "_1", + _160char[0:158] + "_2", + ] + for i, exp_name in enumerate(expected_names): + assert ds.GetLayerByIndex(i).GetName() == exp_name, "did not find %s" % exp_name - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### # Test creating layer with documentation -def test_ogr_openfilegdb_layer_documentation(): +def test_ogr_openfilegdb_layer_documentation(tmp_vsimem): - dirname = "/vsimem/out.gdb" - - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - ds.CreateLayer( - "test", geom_type=ogr.wkbPoint, options=["DOCUMENTATION=<my_doc/>"] - ) - ds = None + dirname = tmp_vsimem / "out.gdb" - ds = ogr.Open(dirname) - sql_lyr = ds.ExecuteSQL("GetLayerMetadata test") - f = sql_lyr.GetNextFeature() - assert f.GetField(0) == "<my_doc/>" - ds.ReleaseResultSet(sql_lyr) - ds = None + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + ds.CreateLayer("test", geom_type=ogr.wkbPoint, options=["DOCUMENTATION=<my_doc/>"]) + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname) + sql_lyr = ds.ExecuteSQL("GetLayerMetadata test") + f = sql_lyr.GetNextFeature() + assert f.GetField(0) == "<my_doc/>" + ds.ReleaseResultSet(sql_lyr) + ds = None ############################################################################### # Test explicit CREATE_SHAPE_AREA_AND_LENGTH_FIELDS=YES option -def test_ogr_openfilegdb_CREATE_SHAPE_AREA_AND_LENGTH_FIELDS_explicit(): +def test_ogr_openfilegdb_CREATE_SHAPE_AREA_AND_LENGTH_FIELDS_explicit(tmp_vsimem): dirname = ( - "/vsimem/test_ogr_openfilegdb_CREATE_SHAPE_AREA_AND_LENGTH_FIELDS_explicit.gdb" + tmp_vsimem + / "test_ogr_openfilegdb_CREATE_SHAPE_AREA_AND_LENGTH_FIELDS_explicit.gdb" ) - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - srs = osr.SpatialReference() - srs.ImportFromEPSG(4326) + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) - lyr = ds.CreateLayer( - "line", - srs=srs, - geom_type=ogr.wkbLineString, - options=["CREATE_SHAPE_AREA_AND_LENGTH_FIELDS=YES"], - ) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometryDirectly(ogr.CreateGeometryFromWkt("LINESTRING(0 0,2 0)")) - lyr.CreateFeature(f) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometryDirectly(ogr.CreateGeometryFromWkt("COMPOUNDCURVE((0 0,2 0))")) - lyr.CreateFeature(f) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometryDirectly( - ogr.CreateGeometryFromWkt("MULTILINESTRING((0 0,2 0),(10 0,15 0))") - ) - lyr.CreateFeature(f) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometryDirectly( - ogr.CreateGeometryFromWkt("MULTICURVE((0 0,2 0),(10 0,15 0))") - ) - lyr.CreateFeature(f) - f = ogr.Feature(lyr.GetLayerDefn()) - lyr.CreateFeature(f) + lyr = ds.CreateLayer( + "line", + srs=srs, + geom_type=ogr.wkbLineString, + options=["CREATE_SHAPE_AREA_AND_LENGTH_FIELDS=YES"], + ) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometryDirectly(ogr.CreateGeometryFromWkt("LINESTRING(0 0,2 0)")) + lyr.CreateFeature(f) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometryDirectly(ogr.CreateGeometryFromWkt("COMPOUNDCURVE((0 0,2 0))")) + lyr.CreateFeature(f) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometryDirectly( + ogr.CreateGeometryFromWkt("MULTILINESTRING((0 0,2 0),(10 0,15 0))") + ) + lyr.CreateFeature(f) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometryDirectly( + ogr.CreateGeometryFromWkt("MULTICURVE((0 0,2 0),(10 0,15 0))") + ) + lyr.CreateFeature(f) + f = ogr.Feature(lyr.GetLayerDefn()) + lyr.CreateFeature(f) - lyr = ds.CreateLayer( - "area", - srs=srs, - geom_type=ogr.wkbPolygon, - options=["CREATE_SHAPE_AREA_AND_LENGTH_FIELDS=YES"], - ) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometryDirectly( - ogr.CreateGeometryFromWkt( - "POLYGON((0 0,0 1,1 1,1 0,0 0),(0.2 0.2,0.2 0.8,0.8 0.8,0.8 0.2,0.2 0.2))" - ) + lyr = ds.CreateLayer( + "area", + srs=srs, + geom_type=ogr.wkbPolygon, + options=["CREATE_SHAPE_AREA_AND_LENGTH_FIELDS=YES"], + ) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometryDirectly( + ogr.CreateGeometryFromWkt( + "POLYGON((0 0,0 1,1 1,1 0,0 0),(0.2 0.2,0.2 0.8,0.8 0.8,0.8 0.2,0.2 0.2))" ) - lyr.CreateFeature(f) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometryDirectly( - ogr.CreateGeometryFromWkt( - "CURVEPOLYGON((0 0,0 1,1 1,1 0,0 0),(0.2 0.2,0.2 0.8,0.8 0.8,0.8 0.2,0.2 0.2))" - ) + ) + lyr.CreateFeature(f) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometryDirectly( + ogr.CreateGeometryFromWkt( + "CURVEPOLYGON((0 0,0 1,1 1,1 0,0 0),(0.2 0.2,0.2 0.8,0.8 0.8,0.8 0.2,0.2 0.2))" ) - lyr.CreateFeature(f) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometryDirectly( - ogr.CreateGeometryFromWkt( - "MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0),(0.2 0.2,0.2 0.8,0.8 0.8,0.8 0.2,0.2 0.2)),((10 0,10 1,11 1,11 0,10 0)))" - ) + ) + lyr.CreateFeature(f) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometryDirectly( + ogr.CreateGeometryFromWkt( + "MULTIPOLYGON(((0 0,0 1,1 1,1 0,0 0),(0.2 0.2,0.2 0.8,0.8 0.8,0.8 0.2,0.2 0.2)),((10 0,10 1,11 1,11 0,10 0)))" ) - lyr.CreateFeature(f) - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetGeometryDirectly( - ogr.CreateGeometryFromWkt( - "MULTISURFACE(((0 0,0 1,1 1,1 0,0 0),(0.2 0.2,0.2 0.8,0.8 0.8,0.8 0.2,0.2 0.2)),((10 0,10 1,11 1,11 0,10 0)))" - ) + ) + lyr.CreateFeature(f) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometryDirectly( + ogr.CreateGeometryFromWkt( + "MULTISURFACE(((0 0,0 1,1 1,1 0,0 0),(0.2 0.2,0.2 0.8,0.8 0.8,0.8 0.2,0.2 0.2)),((10 0,10 1,11 1,11 0,10 0)))" ) - lyr.CreateFeature(f) - f = ogr.Feature(lyr.GetLayerDefn()) - lyr.CreateFeature(f) - - ds = None - - ds = ogr.Open(dirname, update=1) + ) + lyr.CreateFeature(f) + f = ogr.Feature(lyr.GetLayerDefn()) + lyr.CreateFeature(f) - lyr = ds.GetLayerByName("line") - lyr_defn = lyr.GetLayerDefn() - assert lyr_defn.GetFieldIndex("Shape_Length") >= 0 - assert lyr_defn.GetFieldIndex("Shape_Area") < 0 - assert ( - lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Length")).GetDefault() - == "FILEGEODATABASE_SHAPE_LENGTH" - ) - f = lyr.GetNextFeature() - assert f["Shape_Length"] == 2 - f = lyr.GetNextFeature() - assert f["Shape_Length"] == 2 - f = lyr.GetNextFeature() - assert f["Shape_Length"] == 2 + 5 - f = lyr.GetNextFeature() - assert f["Shape_Length"] == 2 + 5 - f = lyr.GetNextFeature() - assert f["Shape_Length"] is None + ds = None - lyr = ds.GetLayerByName("area") - lyr_defn = lyr.GetLayerDefn() - assert lyr_defn.GetFieldIndex("Shape_Length") >= 0 - assert lyr_defn.GetFieldIndex("Shape_Area") >= 0 - assert ( - lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Area")).GetDefault() - == "FILEGEODATABASE_SHAPE_AREA" - ) - assert ( - lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Length")).GetDefault() - == "FILEGEODATABASE_SHAPE_LENGTH" - ) - f = lyr.GetNextFeature() - assert f["Shape_Length"] == pytest.approx(6.4) - assert f["Shape_Area"] == pytest.approx(0.64) - f = lyr.GetNextFeature() - assert f["Shape_Length"] == pytest.approx(6.4) - assert f["Shape_Area"] == pytest.approx(0.64) - f = lyr.GetNextFeature() - assert f["Shape_Length"] == pytest.approx(6.4 + 4) - assert f["Shape_Area"] == pytest.approx(0.64 + 1) - f = lyr.GetNextFeature() - assert f["Shape_Length"] == pytest.approx(6.4 + 4) - assert f["Shape_Area"] == pytest.approx(0.64 + 1) - f = lyr.GetNextFeature() - assert f["Shape_Length"] is None - assert f["Shape_Area"] is None + ds = ogr.Open(dirname, update=1) - # Rename Shape_Length and Shape_Area fields (not sure the FileGDB SDK likes it) - iShapeLength = lyr_defn.GetFieldIndex("Shape_Length") - fld_defn = ogr.FieldDefn("Shape_Length_renamed", ogr.OFTReal) - assert ( - lyr.AlterFieldDefn(iShapeLength, fld_defn, ogr.ALTER_NAME_FLAG) - == ogr.OGRERR_NONE - ) + lyr = ds.GetLayerByName("line") + lyr_defn = lyr.GetLayerDefn() + assert lyr_defn.GetFieldIndex("Shape_Length") >= 0 + assert lyr_defn.GetFieldIndex("Shape_Area") < 0 + assert ( + lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Length")).GetDefault() + == "FILEGEODATABASE_SHAPE_LENGTH" + ) + f = lyr.GetNextFeature() + assert f["Shape_Length"] == 2 + f = lyr.GetNextFeature() + assert f["Shape_Length"] == 2 + f = lyr.GetNextFeature() + assert f["Shape_Length"] == 2 + 5 + f = lyr.GetNextFeature() + assert f["Shape_Length"] == 2 + 5 + f = lyr.GetNextFeature() + assert f["Shape_Length"] is None - iShapeArea = lyr_defn.GetFieldIndex("Shape_Area") - fld_defn = ogr.FieldDefn("Shape_Area_renamed", ogr.OFTReal) - assert ( - lyr.AlterFieldDefn(iShapeArea, fld_defn, ogr.ALTER_NAME_FLAG) - == ogr.OGRERR_NONE - ) + lyr = ds.GetLayerByName("area") + lyr_defn = lyr.GetLayerDefn() + assert lyr_defn.GetFieldIndex("Shape_Length") >= 0 + assert lyr_defn.GetFieldIndex("Shape_Area") >= 0 + assert ( + lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Area")).GetDefault() + == "FILEGEODATABASE_SHAPE_AREA" + ) + assert ( + lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Length")).GetDefault() + == "FILEGEODATABASE_SHAPE_LENGTH" + ) + f = lyr.GetNextFeature() + assert f["Shape_Length"] == pytest.approx(6.4) + assert f["Shape_Area"] == pytest.approx(0.64) + f = lyr.GetNextFeature() + assert f["Shape_Length"] == pytest.approx(6.4) + assert f["Shape_Area"] == pytest.approx(0.64) + f = lyr.GetNextFeature() + assert f["Shape_Length"] == pytest.approx(6.4 + 4) + assert f["Shape_Area"] == pytest.approx(0.64 + 1) + f = lyr.GetNextFeature() + assert f["Shape_Length"] == pytest.approx(6.4 + 4) + assert f["Shape_Area"] == pytest.approx(0.64 + 1) + f = lyr.GetNextFeature() + assert f["Shape_Length"] is None + assert f["Shape_Area"] is None + + # Rename Shape_Length and Shape_Area fields (not sure the FileGDB SDK likes it) + iShapeLength = lyr_defn.GetFieldIndex("Shape_Length") + fld_defn = ogr.FieldDefn("Shape_Length_renamed", ogr.OFTReal) + assert ( + lyr.AlterFieldDefn(iShapeLength, fld_defn, ogr.ALTER_NAME_FLAG) + == ogr.OGRERR_NONE + ) - ds = ogr.Open(dirname, update=1) + iShapeArea = lyr_defn.GetFieldIndex("Shape_Area") + fld_defn = ogr.FieldDefn("Shape_Area_renamed", ogr.OFTReal) + assert ( + lyr.AlterFieldDefn(iShapeArea, fld_defn, ogr.ALTER_NAME_FLAG) == ogr.OGRERR_NONE + ) - sql_lyr = ds.ExecuteSQL("GetLayerDefinition area") - assert sql_lyr - f = sql_lyr.GetNextFeature() - xml = f.GetField(0) - f = None - ds.ReleaseResultSet(sql_lyr) - assert "<AreaFieldName>Shape_Area_renamed</AreaFieldName>" in xml - assert "<LengthFieldName>Shape_Length_renamed</LengthFieldName>" in xml + ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayerByName("area") - lyr_defn = lyr.GetLayerDefn() + sql_lyr = ds.ExecuteSQL("GetLayerDefinition area") + assert sql_lyr + f = sql_lyr.GetNextFeature() + xml = f.GetField(0) + f = None + ds.ReleaseResultSet(sql_lyr) + assert "<AreaFieldName>Shape_Area_renamed</AreaFieldName>" in xml + assert "<LengthFieldName>Shape_Length_renamed</LengthFieldName>" in xml - # Delete Shape_Length and Shape_Area fields - assert ( - lyr.DeleteField(lyr_defn.GetFieldIndex("Shape_Length_renamed")) - == ogr.OGRERR_NONE - ) - assert ( - lyr.DeleteField(lyr_defn.GetFieldIndex("Shape_Area_renamed")) - == ogr.OGRERR_NONE - ) + lyr = ds.GetLayerByName("area") + lyr_defn = lyr.GetLayerDefn() - f = ogr.Feature(lyr_defn) - f.SetGeometryDirectly( - ogr.CreateGeometryFromWkt("POLYGON((0 0,0 1,1 1,1 0,0 0))") - ) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - ds = None + # Delete Shape_Length and Shape_Area fields + assert ( + lyr.DeleteField(lyr_defn.GetFieldIndex("Shape_Length_renamed")) + == ogr.OGRERR_NONE + ) + assert ( + lyr.DeleteField(lyr_defn.GetFieldIndex("Shape_Area_renamed")) == ogr.OGRERR_NONE + ) - ds = ogr.Open(dirname) + f = ogr.Feature(lyr_defn) + f.SetGeometryDirectly(ogr.CreateGeometryFromWkt("POLYGON((0 0,0 1,1 1,1 0,0 0))")) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + ds = None - sql_lyr = ds.ExecuteSQL("GetLayerDefinition area") - assert sql_lyr - f = sql_lyr.GetNextFeature() - xml = f.GetField(0) - f = None - ds.ReleaseResultSet(sql_lyr) - assert "<AreaFieldName />" in xml - assert "<LengthFieldName />" in xml + ds = ogr.Open(dirname) - ds = None + sql_lyr = ds.ExecuteSQL("GetLayerDefinition area") + assert sql_lyr + f = sql_lyr.GetNextFeature() + xml = f.GetField(0) + f = None + ds.ReleaseResultSet(sql_lyr) + assert "<AreaFieldName />" in xml + assert "<LengthFieldName />" in xml - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### # Test explicit CREATE_SHAPE_AREA_AND_LENGTH_FIELDS=YES option -def test_ogr_openfilegdb_CREATE_SHAPE_AREA_AND_LENGTH_FIELDS_implicit(): +def test_ogr_openfilegdb_CREATE_SHAPE_AREA_AND_LENGTH_FIELDS_implicit(tmp_vsimem): dirname = ( - "/vsimem/test_ogr_openfilegdb_CREATE_SHAPE_AREA_AND_LENGTH_FIELDS_implicit.gdb" + tmp_vsimem + / "test_ogr_openfilegdb_CREATE_SHAPE_AREA_AND_LENGTH_FIELDS_implicit.gdb" ) - try: - gdal.VectorTranslate( - dirname, - "data/filegdb/filegdb_polygonzm_m_not_closing_with_curves.gdb", - options="-f OpenFileGDB -fid 1", - ) - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - lyr_defn = lyr.GetLayerDefn() - assert ( - lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Area")).GetDefault() - == "FILEGEODATABASE_SHAPE_AREA" - ) - assert ( - lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Length")).GetDefault() - == "FILEGEODATABASE_SHAPE_LENGTH" - ) + gdal.VectorTranslate( + dirname, + "data/filegdb/filegdb_polygonzm_m_not_closing_with_curves.gdb", + options="-f OpenFileGDB -fid 1", + ) - ds = None + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + lyr_defn = lyr.GetLayerDefn() + assert ( + lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Area")).GetDefault() + == "FILEGEODATABASE_SHAPE_AREA" + ) + assert ( + lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex("Shape_Length")).GetDefault() + == "FILEGEODATABASE_SHAPE_LENGTH" + ) - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### # Test AlterGeomFieldDefn() -def test_ogr_openfilegdb_write_alter_geom_field_defn(): +def test_ogr_openfilegdb_write_alter_geom_field_defn(tmp_vsimem): - dirname = "/vsimem/test_ogr_openfilegdb_alter_geom_field_defn.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + dirname = tmp_vsimem / "test_ogr_openfilegdb_alter_geom_field_defn.gdb" - srs = osr.SpatialReference() - srs.ImportFromEPSG(4326) + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbLineString) - ds = None + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) + ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbLineString) + ds = None - assert lyr.TestCapability(ogr.OLCAlterGeomFieldDefn) + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) - # Change name - fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbLineString) - assert ( - lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_NAME_FLAG) - == ogr.OGRERR_NONE - ) - assert lyr.GetGeometryColumn() == "shape_renamed" - ds = None + assert lyr.TestCapability(ogr.OLCAlterGeomFieldDefn) - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) + # Change name + fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbLineString) + assert ( + lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_NAME_FLAG) + == ogr.OGRERR_NONE + ) + assert lyr.GetGeometryColumn() == "shape_renamed" + ds = None - sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") - assert sql_lyr - f = sql_lyr.GetNextFeature() - xml = f.GetField(0) - f = None - ds.ReleaseResultSet(sql_lyr) - assert "<Name>shape_renamed</Name>" in xml - assert "WKID" in xml + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) - assert lyr.GetGeometryColumn() == "shape_renamed" - assert lyr.GetSpatialRef().GetAuthorityCode(None) == "4326" + sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") + assert sql_lyr + f = sql_lyr.GetNextFeature() + xml = f.GetField(0) + f = None + ds.ReleaseResultSet(sql_lyr) + assert "<Name>shape_renamed</Name>" in xml + assert "WKID" in xml + + assert lyr.GetGeometryColumn() == "shape_renamed" + assert lyr.GetSpatialRef().GetAuthorityCode(None) == "4326" + + # Set SRS to None + fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbLineString) + fld_defn.SetSpatialRef(None) + assert ( + lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_SRS_FLAG) + == ogr.OGRERR_NONE + ) + assert lyr.GetSpatialRef() is None + ds = None - # Set SRS to None - fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbLineString) - fld_defn.SetSpatialRef(None) - assert ( - lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_SRS_FLAG) - == ogr.OGRERR_NONE - ) - assert lyr.GetSpatialRef() is None - ds = None + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + assert lyr.GetSpatialRef() is None - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) - assert lyr.GetSpatialRef() is None + sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") + assert sql_lyr + f = sql_lyr.GetNextFeature() + xml = f.GetField(0) + f = None + ds.ReleaseResultSet(sql_lyr) + assert "WKID" not in xml + + # Set SRS to EPSG:4326 + fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbLineString) + fld_defn.SetSpatialRef(srs) + assert ( + lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_SRS_FLAG) + == ogr.OGRERR_NONE + ) + assert lyr.GetSpatialRef() is not None + ds = None - sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") - assert sql_lyr - f = sql_lyr.GetNextFeature() - xml = f.GetField(0) - f = None - ds.ReleaseResultSet(sql_lyr) - assert "WKID" not in xml + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + assert lyr.GetSpatialRef() is not None - # Set SRS to EPSG:4326 - fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbLineString) - fld_defn.SetSpatialRef(srs) - assert ( - lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_SRS_FLAG) - == ogr.OGRERR_NONE - ) - assert lyr.GetSpatialRef() is not None - ds = None + sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") + assert sql_lyr + f = sql_lyr.GetNextFeature() + xml = f.GetField(0) + f = None + ds.ReleaseResultSet(sql_lyr) + assert "<WKID>4326</WKID>" in xml - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) - assert lyr.GetSpatialRef() is not None + srs4269 = osr.SpatialReference() + srs4269.ImportFromEPSG(4269) - sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") - assert sql_lyr - f = sql_lyr.GetNextFeature() - xml = f.GetField(0) - f = None - ds.ReleaseResultSet(sql_lyr) - assert "<WKID>4326</WKID>" in xml + # Set SRS to EPSG:4269 + fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbLineString) + fld_defn.SetSpatialRef(srs4269) + assert ( + lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_SRS_FLAG) + == ogr.OGRERR_NONE + ) + assert lyr.GetSpatialRef() is not None + assert lyr.GetSpatialRef().GetAuthorityCode(None) == "4269" + ds = None - srs4269 = osr.SpatialReference() - srs4269.ImportFromEPSG(4269) + ds = ogr.Open(dirname, update=1) + lyr = ds.GetLayer(0) + assert lyr.GetSpatialRef() is not None + assert lyr.GetSpatialRef().GetAuthorityCode(None) == "4269" + + sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") + assert sql_lyr + f = sql_lyr.GetNextFeature() + xml = f.GetField(0) + f = None + ds.ReleaseResultSet(sql_lyr) + assert "<WKID>4269</WKID>" in xml - # Set SRS to EPSG:4269 - fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbLineString) - fld_defn.SetSpatialRef(srs4269) + # Changing geometry type not supported + fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbPolygon) + with gdal.quiet_errors(): assert ( - lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_SRS_FLAG) - == ogr.OGRERR_NONE + lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_TYPE_FLAG) + != ogr.OGRERR_NONE ) - assert lyr.GetSpatialRef() is not None - assert lyr.GetSpatialRef().GetAuthorityCode(None) == "4269" - ds = None - - ds = ogr.Open(dirname, update=1) - lyr = ds.GetLayer(0) - assert lyr.GetSpatialRef() is not None - assert lyr.GetSpatialRef().GetAuthorityCode(None) == "4269" - - sql_lyr = ds.ExecuteSQL("GetLayerDefinition test") - assert sql_lyr - f = sql_lyr.GetNextFeature() - xml = f.GetField(0) - f = None - ds.ReleaseResultSet(sql_lyr) - assert "<WKID>4269</WKID>" in xml - - # Changing geometry type not supported - fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbPolygon) - with gdal.quiet_errors(): - assert ( - lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_TYPE_FLAG) - != ogr.OGRERR_NONE - ) - # Changing nullable state not supported - fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbPolygon) - fld_defn.SetNullable(False) - with gdal.quiet_errors(): - assert ( - lyr.AlterGeomFieldDefn( - 0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_NULLABLE_FLAG - ) - != ogr.OGRERR_NONE - ) - - ds = None + # Changing nullable state not supported + fld_defn = ogr.GeomFieldDefn("shape_renamed", ogr.wkbPolygon) + fld_defn.SetNullable(False) + with gdal.quiet_errors(): + assert ( + lyr.AlterGeomFieldDefn(0, fld_defn, ogr.ALTER_GEOM_FIELD_DEFN_NULLABLE_FLAG) + != ogr.OGRERR_NONE + ) - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### @@ -4080,118 +3948,113 @@ def test_ogr_openfilegdb_write_alter_geom_field_defn(): @pytest.mark.parametrize("field_type", [ogr.OFTInteger, ogr.OFTInteger64, ogr.OFTReal]) -def test_ogr_openfilegdb_write_create_OBJECTID(field_type): - - dirname = "/vsimem/test_ogr_openfilegdb_write_create_OBJECTID.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) - assert ( - lyr.CreateField(ogr.FieldDefn("unused_before", ogr.OFTString)) - == ogr.OGRERR_NONE - ) - assert ( - lyr.CreateField(ogr.FieldDefn(lyr.GetFIDColumn(), field_type)) - == ogr.OGRERR_NONE - ) - assert ( - lyr.CreateField(ogr.FieldDefn("int_field", ogr.OFTInteger)) - == ogr.OGRERR_NONE - ) - assert lyr.GetLayerDefn().GetFieldCount() == 3 +def test_ogr_openfilegdb_write_create_OBJECTID(tmp_vsimem, field_type): - # No FID, but OBJECTID - f = ogr.Feature(lyr.GetLayerDefn()) - f[lyr.GetFIDColumn()] = 10 - f["int_field"] = 2 - f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (1 2)")) - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert f.GetFID() == 10 - f = None + dirname = tmp_vsimem / "test_ogr_openfilegdb_write_create_OBJECTID.gdb" - field_idx = lyr.GetLayerDefn().GetFieldIndex("unused_before") - assert lyr.DeleteField(field_idx) == ogr.OGRERR_NONE + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) + assert ( + lyr.CreateField(ogr.FieldDefn("unused_before", ogr.OFTString)) + == ogr.OGRERR_NONE + ) + assert ( + lyr.CreateField(ogr.FieldDefn(lyr.GetFIDColumn(), field_type)) + == ogr.OGRERR_NONE + ) + assert ( + lyr.CreateField(ogr.FieldDefn("int_field", ogr.OFTInteger)) == ogr.OGRERR_NONE + ) + assert lyr.GetLayerDefn().GetFieldCount() == 3 - assert ( - lyr.CreateField(ogr.FieldDefn("int_field2", ogr.OFTInteger)) - == ogr.OGRERR_NONE - ) + # No FID, but OBJECTID + f = ogr.Feature(lyr.GetLayerDefn()) + f[lyr.GetFIDColumn()] = 10 + f["int_field"] = 2 + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (1 2)")) + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + assert f.GetFID() == 10 + f = None + + field_idx = lyr.GetLayerDefn().GetFieldIndex("unused_before") + assert lyr.DeleteField(field_idx) == ogr.OGRERR_NONE + + assert ( + lyr.CreateField(ogr.FieldDefn("int_field2", ogr.OFTInteger)) == ogr.OGRERR_NONE + ) - # FID and OBJECTID, both equal - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(11) - f[lyr.GetFIDColumn()] = 11 - f["int_field"] = 3 - f["int_field2"] = 30 - assert lyr.CreateFeature(f) == ogr.OGRERR_NONE - assert f.GetFID() == 11 + # FID and OBJECTID, both equal + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(11) + f[lyr.GetFIDColumn()] = 11 + f["int_field"] = 3 + f["int_field2"] = 30 + assert lyr.CreateFeature(f) == ogr.OGRERR_NONE + assert f.GetFID() == 11 - f["int_field"] = 4 - assert lyr.SetFeature(f) == ogr.OGRERR_NONE + f["int_field"] = 4 + assert lyr.SetFeature(f) == ogr.OGRERR_NONE - # FID and OBJECTID, different ==> error - f = ogr.Feature(lyr.GetLayerDefn()) - f.SetFID(12) - f[lyr.GetFIDColumn()] = 13 - with gdal.quiet_errors(): - assert lyr.CreateFeature(f) != ogr.OGRERR_NONE + # FID and OBJECTID, different ==> error + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetFID(12) + f[lyr.GetFIDColumn()] = 13 + with gdal.quiet_errors(): + assert lyr.CreateFeature(f) != ogr.OGRERR_NONE - lyr.ResetReading() - f = lyr.GetNextFeature() - assert f.GetFID() == 10 - assert f[lyr.GetFIDColumn()] == 10 - assert f["int_field"] == 2 - assert f.GetGeometryRef().ExportToWkt() == "POINT (1 2)" + lyr.ResetReading() + f = lyr.GetNextFeature() + assert f.GetFID() == 10 + assert f[lyr.GetFIDColumn()] == 10 + assert f["int_field"] == 2 + assert f.GetGeometryRef().ExportToWkt() == "POINT (1 2)" - f = lyr.GetNextFeature() - assert f.GetFID() == 11 - assert f[lyr.GetFIDColumn()] == 11 - assert f["int_field"] == 4 - assert f["int_field2"] == 30 + f = lyr.GetNextFeature() + assert f.GetFID() == 11 + assert f[lyr.GetFIDColumn()] == 11 + assert f["int_field"] == 4 + assert f["int_field2"] == 30 - # Can't delete or alter OBJECTID field - field_idx = lyr.GetLayerDefn().GetFieldIndex(lyr.GetFIDColumn()) - with gdal.quiet_errors(): - assert lyr.DeleteField(field_idx) == ogr.OGRERR_FAILURE - assert ( - lyr.AlterFieldDefn( - field_idx, - lyr.GetLayerDefn().GetFieldDefn(field_idx), - ogr.ALTER_ALL_FLAG, - ) - == ogr.OGRERR_FAILURE + # Can't delete or alter OBJECTID field + field_idx = lyr.GetLayerDefn().GetFieldIndex(lyr.GetFIDColumn()) + with gdal.quiet_errors(): + assert lyr.DeleteField(field_idx) == ogr.OGRERR_FAILURE + assert ( + lyr.AlterFieldDefn( + field_idx, + lyr.GetLayerDefn().GetFieldDefn(field_idx), + ogr.ALTER_ALL_FLAG, ) + == ogr.OGRERR_FAILURE + ) - ds = None - - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - assert lyr.GetLayerDefn().GetFieldCount() == 2 + ds = None - lyr.ResetReading() - f = lyr.GetNextFeature() - assert f.GetFID() == 10 - assert f["int_field"] == 2 - assert f.GetGeometryRef().ExportToWkt() == "POINT (1 2)" + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + assert lyr.GetLayerDefn().GetFieldCount() == 2 - f = lyr.GetNextFeature() - assert f.GetFID() == 11 - assert f["int_field"] == 4 - assert f["int_field2"] == 30 + lyr.ResetReading() + f = lyr.GetNextFeature() + assert f.GetFID() == 10 + assert f["int_field"] == 2 + assert f.GetGeometryRef().ExportToWkt() == "POINT (1 2)" - ds = None + f = lyr.GetNextFeature() + assert f.GetFID() == 11 + assert f["int_field"] == 4 + assert f["int_field2"] == 30 - finally: - gdal.RmdirRecursive(dirname) + ds = None ############################################################################### # Test driver Delete() method -def test_ogr_openfilegdb_write_delete(): +def test_ogr_openfilegdb_write_delete(tmp_path): - dirname = "tmp/test_ogr_openfilegdb_write_delete.gdb" + dirname = tmp_path / "test_ogr_openfilegdb_write_delete.gdb" if gdal.VSIStatL(dirname) is not None: gdal.RmdirRecursive(dirname) drv = ogr.GetDriverByName("OpenFileGDB") @@ -4211,78 +4074,75 @@ def test_ogr_openfilegdb_write_delete(): "write_wkid,write_vcswkid", [(True, True), (True, False), (False, False)] ) @pytest.mark.require_proj(7, 2) -def test_ogr_openfilegdb_write_compound_crs(write_wkid, write_vcswkid): +def test_ogr_openfilegdb_write_compound_crs(tmp_vsimem, write_wkid, write_vcswkid): - dirname = "/vsimem/test_ogr_openfilegdb_write_compound_crs.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - srs = osr.SpatialReference() - srs.SetFromUserInput( - """COMPOUNDCRS["WGS_1984_Complex_UTM_Zone_22N + MSL height", - PROJCRS["WGS_1984_Complex_UTM_Zone_22N", - BASEGEOGCRS["WGS 84", - DATUM["World Geodetic System 1984", - ELLIPSOID["WGS 84",6378137,298.257223563, - LENGTHUNIT["metre",1]]], - PRIMEM["Greenwich",0, - ANGLEUNIT["Degree",0.0174532925199433]]], - CONVERSION["UTM zone 22N", - METHOD["Transverse Mercator", - ID["EPSG",9807]], - PARAMETER["Latitude of natural origin",0, - ANGLEUNIT["Degree",0.0174532925199433], - ID["EPSG",8801]], - PARAMETER["Longitude of natural origin",-51, - ANGLEUNIT["Degree",0.0174532925199433], - ID["EPSG",8802]], - PARAMETER["Scale factor at natural origin",0.9996, - SCALEUNIT["unity",1], - ID["EPSG",8805]], - PARAMETER["False easting",500000, - LENGTHUNIT["metre",1], - ID["EPSG",8806]], - PARAMETER["False northing",0, - LENGTHUNIT["metre",1], - ID["EPSG",8807]]], - CS[Cartesian,2], - AXIS["(E)",east, - ORDER[1], - LENGTHUNIT["metre",1]], - AXIS["(N)",north, - ORDER[2], - LENGTHUNIT["metre",1]], - USAGE[ - SCOPE["Not known."], - AREA["Between 54°W and 48°W, northern hemisphere between equator and 84°N, onshore and offshore."], - BBOX[0,-54,84,-48]], - ID["ESRI",102572]], - VERTCRS["MSL height", - VDATUM["Mean Sea Level"], - CS[vertical,1], - AXIS["gravity-related height (H)",up, - LENGTHUNIT["metre",1]], - USAGE[ - SCOPE["Hydrography, drilling."], - AREA["World."], - BBOX[-90,-180,90,180]], - ID["EPSG",5714]]] - """ - ) - d = { - "OPENFILEGDB_WRITE_WKID": None if write_wkid else "FALSE", - "OPENFILEGDB_WRITE_VCSWKID": None if write_vcswkid else "FALSE", - } - with gdaltest.config_options(d): - ds.CreateLayer("test", geom_type=ogr.wkbPoint, srs=srs) - ds = None + dirname = tmp_vsimem / "test_ogr_openfilegdb_write_compound_crs.gdb" - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - got_srs = lyr.GetSpatialRef() - assert got_srs.IsSame(srs) + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + srs = osr.SpatialReference() + srs.SetFromUserInput( + """COMPOUNDCRS["WGS_1984_Complex_UTM_Zone_22N + MSL height", +PROJCRS["WGS_1984_Complex_UTM_Zone_22N", + BASEGEOGCRS["WGS 84", + DATUM["World Geodetic System 1984", + ELLIPSOID["WGS 84",6378137,298.257223563, + LENGTHUNIT["metre",1]]], + PRIMEM["Greenwich",0, + ANGLEUNIT["Degree",0.0174532925199433]]], + CONVERSION["UTM zone 22N", + METHOD["Transverse Mercator", + ID["EPSG",9807]], + PARAMETER["Latitude of natural origin",0, + ANGLEUNIT["Degree",0.0174532925199433], + ID["EPSG",8801]], + PARAMETER["Longitude of natural origin",-51, + ANGLEUNIT["Degree",0.0174532925199433], + ID["EPSG",8802]], + PARAMETER["Scale factor at natural origin",0.9996, + SCALEUNIT["unity",1], + ID["EPSG",8805]], + PARAMETER["False easting",500000, + LENGTHUNIT["metre",1], + ID["EPSG",8806]], + PARAMETER["False northing",0, + LENGTHUNIT["metre",1], + ID["EPSG",8807]]], + CS[Cartesian,2], + AXIS["(E)",east, + ORDER[1], + LENGTHUNIT["metre",1]], + AXIS["(N)",north, + ORDER[2], + LENGTHUNIT["metre",1]], + USAGE[ + SCOPE["Not known."], + AREA["Between 54°W and 48°W, northern hemisphere between equator and 84°N, onshore and offshore."], + BBOX[0,-54,84,-48]], + ID["ESRI",102572]], +VERTCRS["MSL height", + VDATUM["Mean Sea Level"], + CS[vertical,1], + AXIS["gravity-related height (H)",up, + LENGTHUNIT["metre",1]], + USAGE[ + SCOPE["Hydrography, drilling."], + AREA["World."], + BBOX[-90,-180,90,180]], + ID["EPSG",5714]]] + """ + ) + d = { + "OPENFILEGDB_WRITE_WKID": None if write_wkid else "FALSE", + "OPENFILEGDB_WRITE_VCSWKID": None if write_vcswkid else "FALSE", + } + with gdaltest.config_options(d): + ds.CreateLayer("test", geom_type=ogr.wkbPoint, srs=srs) + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + got_srs = lyr.GetSpatialRef() + assert got_srs.IsSame(srs) ############################################################################### @@ -4306,51 +4166,44 @@ def test_ogr_openfilegdb_write_compound_crs(write_wkid, write_vcswkid): ogr.wkbMultiPolygonZM, ], ) -def test_ogr_openfilegdb_write_empty_geoms(geom_type): +def test_ogr_openfilegdb_write_empty_geoms(tmp_vsimem, geom_type): - dirname = "/vsimem/test_ogr_openfilegdb_write_empty_geoms.gdb" - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - lyr = ds.CreateLayer("test", geom_type=geom_type) - f = ogr.Feature(lyr.GetLayerDefn()) - g = ogr.Geometry(geom_type) - f.SetGeometry(g) - with gdaltest.config_option("OGR_OPENFILEGDB_WRITE_EMPTY_GEOMETRY", "YES"): - lyr.CreateFeature(f) - ds = None + dirname = tmp_vsimem / "test_ogr_openfilegdb_write_empty_geoms.gdb" - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - assert lyr.GetGeomType() == geom_type - f = lyr.GetNextFeature() - g = f.GetGeometryRef() - assert g.GetGeometryType() == geom_type - assert g.IsEmpty() + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + lyr = ds.CreateLayer("test", geom_type=geom_type) + f = ogr.Feature(lyr.GetLayerDefn()) + g = ogr.Geometry(geom_type) + f.SetGeometry(g) + with gdaltest.config_option("OGR_OPENFILEGDB_WRITE_EMPTY_GEOMETRY", "YES"): + lyr.CreateFeature(f) + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + assert lyr.GetGeomType() == geom_type + f = lyr.GetNextFeature() + g = f.GetGeometryRef() + assert g.GetGeometryType() == geom_type + assert g.IsEmpty() ############################################################################### # Test creating layer with alias name -def test_ogr_openfilegdb_layer_alias_name(): +def test_ogr_openfilegdb_layer_alias_name(tmp_vsimem): - dirname = "/vsimem/out.gdb" - - try: - ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) - ds.CreateLayer("test", geom_type=ogr.wkbPoint, options=["LAYER_ALIAS=my_alias"]) - ds = None + dirname = tmp_vsimem / "out.gdb" - ds = ogr.Open(dirname) - lyr = ds.GetLayer(0) - assert lyr.GetMetadataItem("ALIAS_NAME") == "my_alias" - ds = None + ds = ogr.GetDriverByName("OpenFileGDB").CreateDataSource(dirname) + ds.CreateLayer("test", geom_type=ogr.wkbPoint, options=["LAYER_ALIAS=my_alias"]) + ds = None - finally: - gdal.RmdirRecursive(dirname) + ds = ogr.Open(dirname) + lyr = ds.GetLayer(0) + assert lyr.GetMetadataItem("ALIAS_NAME") == "my_alias" + ds = None ############################################################################### From f74bf4a17c535f01944a17a86f370cb0669f3106 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Fri, 5 Apr 2024 21:57:37 -0400 Subject: [PATCH 069/230] autotest ogr_gpkg.py: use tmp_path, tmp_vsimem --- autotest/ogr/ogr_gpkg.py | 38 +++++++++++++++++--------------------- 1 file changed, 17 insertions(+), 21 deletions(-) diff --git a/autotest/ogr/ogr_gpkg.py b/autotest/ogr/ogr_gpkg.py index 30761e28acba..6b8cfd24aec6 100755 --- a/autotest/ogr/ogr_gpkg.py +++ b/autotest/ogr/ogr_gpkg.py @@ -307,9 +307,9 @@ def validate(gpkg, quiet=False, tmpdir=None): # Create a fresh database. -def test_ogr_gpkg_1(gpkg_ds): +def test_ogr_gpkg_1(gpkg_ds, tmp_path): - assert validate(gpkg_ds), "validation failed" + assert validate(gpkg_ds, tmpdir=tmp_path), "validation failed" ############################################################################### @@ -342,7 +342,7 @@ def test_ogr_gpkg_2bis(gpkg_ds): assert lyr is None, "layer creation should have failed" -def test_ogr_gpkg_3(gpkg_ds): +def test_ogr_gpkg_3(gpkg_ds, tmp_path): srs4326 = osr.SpatialReference() srs4326.ImportFromEPSG(4326) @@ -359,7 +359,7 @@ def test_ogr_gpkg_3(gpkg_ds): ############################################################################### # Close and re-open to test the layer registration - assert validate(gpkg_ds), "validation failed" + assert validate(gpkg_ds, tmpdir=tmp_path), "validation failed" gpkg_ds = gdaltest.reopen(gpkg_ds) @@ -422,7 +422,7 @@ def test_ogr_gpkg_5(gpkg_ds): # Add fields -def test_ogr_gpkg_6(gpkg_ds): +def test_ogr_gpkg_6(gpkg_ds, tmp_path): srs4326 = osr.SpatialReference() srs4326.ImportFromEPSG(4326) @@ -438,7 +438,7 @@ def test_ogr_gpkg_6(gpkg_ds): gpkg_ds = gdaltest.reopen(gpkg_ds) - assert validate(gpkg_ds), "validation failed" + assert validate(gpkg_ds, tmpdir=tmp_path), "validation failed" with gdal.quiet_errors(): gpkg_ds = gdaltest.reopen(gpkg_ds) @@ -609,7 +609,7 @@ def get_feature_count_from_gpkg_contents(): @pytest.mark.usefixtures("tbl_linestring") -def test_ogr_gpkg_8(gpkg_ds): +def test_ogr_gpkg_8(gpkg_ds, tmp_path): lyr = gpkg_ds.GetLayer("tbl_linestring") @@ -621,7 +621,7 @@ def test_ogr_gpkg_8(gpkg_ds): gpkg_ds = gdaltest.reopen(gpkg_ds, update=1) - assert validate(gpkg_ds.GetDescription(), "validation failed") + assert validate(gpkg_ds.GetDescription(), "validation failed", tmpdir=tmp_path) lyr = gpkg_ds.GetLayerByName("tbl_linestring") assert lyr.GetLayerDefn().GetFieldDefn(6).GetSubType() == ogr.OFSTBoolean @@ -1348,9 +1348,9 @@ def test_ogr_gpkg_15(gpkg_ds): # Test SetSRID() function -def test_ogr_gpkg_SetSRID(): +def test_ogr_gpkg_SetSRID(tmp_vsimem): - filename = "/vsimem/test_ogr_gpkg_SetSRID.gpkg" + filename = tmp_vsimem / "test_ogr_gpkg_SetSRID.gpkg" ds = ogr.GetDriverByName("GPKG").CreateDataSource(filename) lyr = ds.CreateLayer("foo") f = ogr.Feature(lyr.GetLayerDefn()) @@ -1373,16 +1373,15 @@ def test_ogr_gpkg_SetSRID(): ds.ReleaseResultSet(sql_lyr) ds = None - gdal.Unlink("/vsimem/test_ogr_gpkg_SetSRID.gpkg") ############################################################################### # Test ST_EnvIntersects() function -def test_ogr_gpkg_ST_EnvIntersects(): +def test_ogr_gpkg_ST_EnvIntersects(tmp_vsimem): - filename = "/vsimem/test_ogr_gpkg_ST_EnvIntersects.gpkg" + filename = tmp_vsimem / "test_ogr_gpkg_ST_EnvIntersects.gpkg" ds = ogr.GetDriverByName("GPKG").CreateDataSource(filename) lyr = ds.CreateLayer("foo") @@ -1446,7 +1445,6 @@ def test_ogr_gpkg_ST_EnvIntersects(): ds.ReleaseResultSet(sql_lyr) ds = None - gdal.Unlink("/vsimem/test_ogr_gpkg_ST_EnvIntersects.gpkg") ############################################################################### @@ -4670,7 +4668,7 @@ def test_ogr_gpkg_47(tmp_vsimem): gdal.ErrorReset() with gdal.config_option("GPKG_WARN_UNRECOGNIZED_APPLICATION_ID", "NO"): - ogr.Open("/vsimem/ogr_gpkg_47.gpkg") + ogr.Open(tmp_vsimem / "ogr_gpkg_47.gpkg") assert gdal.GetLastErrorMsg() == "" @@ -5784,9 +5782,9 @@ def test_ogr_gpkg_z_or_m_geometry_in_non_zm_layer(tmp_vsimem): # Test fixing up wrong RTree update3 trigger from GeoPackage < 1.2.1 -def test_ogr_gpkg_fixup_wrong_rtree_trigger(): +def test_ogr_gpkg_fixup_wrong_rtree_trigger(tmp_vsimem): - filename = "/vsimem/test_ogr_gpkg_fixup_wrong_rtree_trigger.gpkg" + filename = tmp_vsimem / "test_ogr_gpkg_fixup_wrong_rtree_trigger.gpkg" ds = ogr.GetDriverByName("GPKG").CreateDataSource(filename) ds.CreateLayer("test-with-dash") ds.CreateLayer("test2") @@ -8595,9 +8593,9 @@ def test_ogr_gpkg_arrow_stream_numpy_detailed_spatial_filter(tmp_vsimem, layer_t # Test reading an empty file with GetArrowStream() -def test_ogr_gpkg_arrow_stream_empty_file(): +def test_ogr_gpkg_arrow_stream_empty_file(tmp_vsimem): - ds = ogr.GetDriverByName("GPKG").CreateDataSource("/vsimem/test.gpkg") + ds = ogr.GetDriverByName("GPKG").CreateDataSource(tmp_vsimem / "test.gpkg") lyr = ds.CreateLayer("test", geom_type=ogr.wkbPoint) assert lyr.TestCapability(ogr.OLCFastGetArrowStream) == 1 stream = lyr.GetArrowStream() @@ -8615,8 +8613,6 @@ def test_ogr_gpkg_arrow_stream_empty_file(): del stream ds = None - ogr.GetDriverByName("GPKG").DeleteDataSource("/vsimem/test.gpkg") - ############################################################################### # Test opening a file in WAL mode on a read-only storage From e53ab1447a9751e02d99e9fac2211d293505eea8 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Fri, 5 Apr 2024 22:23:37 -0400 Subject: [PATCH 070/230] autotest: ogr_geojson.py: use tmp_path, tmp_vsimem --- autotest/ogr/ogr_geojson.py | 607 +++++++++++++++++------------------- 1 file changed, 290 insertions(+), 317 deletions(-) diff --git a/autotest/ogr/ogr_geojson.py b/autotest/ogr/ogr_geojson.py index b0c4c7e868c5..2b5e0837e68d 100755 --- a/autotest/ogr/ogr_geojson.py +++ b/autotest/ogr/ogr_geojson.py @@ -463,33 +463,27 @@ def test_ogr_geojson_13(): @gdaltest.disable_exceptions() -def test_ogr_geojson_14(): +def test_ogr_geojson_14(tmp_path): with gdal.quiet_errors(): ds = ogr.Open("data/geojson/ogr_geojson_14.geojson") lyr = ds.GetLayer(0) - try: - out_ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( - "tmp/out_ogr_geojson_14.geojson" - ) - out_lyr = out_ds.CreateLayer("lyr") + out_ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_path / "out_ogr_geojson_14.geojson" + ) + out_lyr = out_ds.CreateLayer("lyr") - with gdal.quiet_errors(): - for feat in lyr: - geom = feat.GetGeometryRef() - if geom is not None: - # print(geom) - out_feat = ogr.Feature(feature_def=out_lyr.GetLayerDefn()) - out_feat.SetGeometry(geom) - out_lyr.CreateFeature(out_feat) - - out_ds = None - finally: - try: - os.remove("tmp/out_ogr_geojson_14.geojson") - except OSError: - pass + with gdal.quiet_errors(): + for feat in lyr: + geom = feat.GetGeometryRef() + if geom is not None: + # print(geom) + out_feat = ogr.Feature(feature_def=out_lyr.GetLayerDefn()) + out_feat.SetGeometry(geom) + out_lyr.CreateFeature(out_feat) + + out_ds = None ############################################################################### @@ -539,7 +533,7 @@ def test_ogr_geojson_15(): # Test reading files with no extension (#4314) -def test_ogr_geojson_20(): +def test_ogr_geojson_20(tmp_vsimem): from glob import glob @@ -550,20 +544,18 @@ def test_ogr_geojson_20(): # create tmp file with no file extension data = open(gj, "rb").read() - f = gdal.VSIFOpenL("/vsimem/testgj", "wb") + f = gdal.VSIFOpenL(tmp_vsimem / "testgj", "wb") gdal.VSIFWriteL(data, 1, len(data), f) gdal.VSIFCloseL(f) with gdal.quiet_errors(): - ds = ogr.Open("/vsimem/testgj") + ds = ogr.Open(tmp_vsimem / "testgj") if ds is None: print(gj) print(data.decode("LATIN1")) pytest.fail("Failed to open datasource") ds = None - gdal.Unlink("/vsimem/testgj") - ############################################################################### # Test reading output of geocouch spatiallist @@ -640,9 +632,11 @@ def test_ogr_geojson_22(): # Write GeoJSON with bbox and test SRS writing&reading back -def test_ogr_geojson_23(): +def test_ogr_geojson_23(tmp_vsimem): - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_23.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_23.json" + ) sr = osr.SpatialReference() sr.ImportFromEPSG(4322) lyr = ds.CreateLayer("foo", srs=sr, options=["WRITE_BBOX=YES"]) @@ -658,7 +652,7 @@ def test_ogr_geojson_23(): lyr = None ds = None - ds = ogr.Open("/vsimem/ogr_geojson_23.json") + ds = ogr.Open(tmp_vsimem / "ogr_geojson_23.json") lyr = ds.GetLayer(0) sr_got = lyr.GetSpatialRef() ds = None @@ -666,12 +660,10 @@ def test_ogr_geojson_23(): sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) assert sr_got.IsSame(sr), "did not get expected SRS" - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_23.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_23.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_23.json") - assert data.find('"bbox": [ 1, 10, 2, 20 ]') != -1, "did not find global bbox" assert ( @@ -683,7 +675,7 @@ def test_ogr_geojson_23(): # Test alternate form of geojson -def test_ogr_geojson_24(): +def test_ogr_geojson_24(tmp_vsimem): content = """loadGeoJSON({"layerFoo": { "type": "Feature", "geometry": { @@ -704,9 +696,9 @@ def test_ogr_geojson_24(): if i == 0: ds = ogr.Open(content) else: - gdal.FileFromMemBuffer("/vsimem/ogr_geojson_24.js", content) - ds = ogr.Open("/vsimem/ogr_geojson_24.js") - gdal.Unlink("/vsimem/ogr_geojson_24.js") + gdal.FileFromMemBuffer(tmp_vsimem / "ogr_geojson_24.js", content) + ds = ogr.Open(tmp_vsimem / "ogr_geojson_24.js") + gdal.Unlink(tmp_vsimem / "ogr_geojson_24.js") assert ds is not None, "Failed to open datasource" @@ -735,7 +727,7 @@ def test_ogr_geojson_24(): # Test 64bit support -def test_ogr_geojson_26(): +def test_ogr_geojson_26(tmp_vsimem): ds = ogr.Open( """{"type": "FeatureCollection", "features":[ @@ -777,7 +769,9 @@ def test_ogr_geojson_26(): lyr = None ds = None - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_26.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_26.json" + ) lyr = ds.CreateLayer("test") lyr.CreateField(ogr.FieldDefn("int64", ogr.OFTInteger64)) lyr.CreateField(ogr.FieldDefn("int64list", ogr.OFTInteger64List)) @@ -789,12 +783,10 @@ def test_ogr_geojson_26(): f = None ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_26.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_26.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_26.json") - assert ( '{ "type": "Feature", "id": 1234567890123, "properties": { "int64": 1234567890123, "int64list": [ 1234567890123 ] }, "geometry": null }' in data @@ -842,9 +834,11 @@ def test_ogr_geojson_27(): # Test handling of huge coordinates (#5377) -def test_ogr_geojson_35(): +def test_ogr_geojson_35(tmp_vsimem): - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_35.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_35.json" + ) lyr = ds.CreateLayer("foo") feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetFID(1) @@ -918,12 +912,10 @@ def test_ogr_geojson_35(): ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_35.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_35.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_35.json") - assert "-1.79" in data and "e+308" in data for ident in range(2, 8): assert ( @@ -950,7 +942,7 @@ def test_ogr_geojson_36(): # Test boolean type support -def test_ogr_geojson_37(): +def test_ogr_geojson_37(tmp_vsimem): # Test read support ds = ogr.Open( @@ -989,7 +981,7 @@ def test_ogr_geojson_37(): pytest.fail() out_ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( - "/vsimem/ogr_geojson_37.json" + tmp_vsimem / "ogr_geojson_37.json" ) out_lyr = out_ds.CreateLayer("test") for i in range(feat_defn.GetFieldCount()): @@ -999,12 +991,10 @@ def test_ogr_geojson_37(): out_lyr.CreateFeature(out_f) out_ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_37.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_37.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_37.json") - assert ( '"bool": false, "not_bool": 0, "bool_list": [ false, true ], "notbool_list": [ false, 3 ]' in data @@ -1015,7 +1005,7 @@ def test_ogr_geojson_37(): # Test datetime/date/time type support -def test_ogr_geojson_38(): +def test_ogr_geojson_38(tmp_vsimem): # Test read support ds = gdal.OpenEx( @@ -1071,7 +1061,7 @@ def test_ogr_geojson_38(): f.DumpReadable() pytest.fail() - tmpfilename = "/vsimem/out.json" + tmpfilename = tmp_vsimem / "out.json" gdal.VectorTranslate( tmpfilename, ds, options="-lco NATIVE_DATA=dummy" ) # dummy NATIVE_DATA so that input values are not copied directly @@ -1080,8 +1070,6 @@ def test_ogr_geojson_38(): data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink(tmpfilename) - assert ( '"dt": "2014-11-20T12:34:56+01:00", "dt2": "2014-11-20T00:00:00", "date": "2014-11-20", "time": "12:34:56"' in data @@ -1430,7 +1418,7 @@ def test_ogr_geojson_44(): # Test native data support -def test_ogr_geojson_45(): +def test_ogr_geojson_45(tmp_vsimem): # Test read support content = """{"type": "FeatureCollection", "foo": "bar", "bar": "baz", @@ -1439,9 +1427,9 @@ def test_ogr_geojson_45(): if i == 0: ds = gdal.OpenEx(content, gdal.OF_VECTOR, open_options=["NATIVE_DATA=YES"]) else: - gdal.FileFromMemBuffer("/vsimem/ogr_geojson_45.json", content) + gdal.FileFromMemBuffer(tmp_vsimem / "ogr_geojson_45.json", content) ds = gdal.OpenEx( - "/vsimem/ogr_geojson_45.json", + tmp_vsimem / "ogr_geojson_45.json", gdal.OF_VECTOR, open_options=["NATIVE_DATA=YES"], ) @@ -1466,9 +1454,11 @@ def test_ogr_geojson_45(): assert native_media_type == "application/vnd.geo+json" ds = None if i == 1: - gdal.Unlink("/vsimem/ogr_geojson_45.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_45.json") - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_45.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_45.json" + ) lyr = ds.CreateLayer( "test", options=[ @@ -1494,11 +1484,11 @@ def test_ogr_geojson_45(): lyr.CreateFeature(f) ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_45.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_45.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_45.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_45.json") assert ( '"bbox": [ 0, 1, 2, 0, 1, 2 ],' in data @@ -1535,10 +1525,10 @@ def test_ogr_geojson_45(): """, open_options=["NATIVE_DATA=YES"], ) - gdal.VectorTranslate("/vsimem/out.json", src_ds, format="GeoJSON") + gdal.VectorTranslate(tmp_vsimem / "out.json", src_ds, format="GeoJSON") - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "features": [ @@ -1563,10 +1553,10 @@ def test_ogr_geojson_45(): """, open_options=["NATIVE_DATA=YES"], ) - gdal.VectorTranslate("/vsimem/out.json", src_ds, format="GeoJSON") + gdal.VectorTranslate(tmp_vsimem / "out.json", src_ds, format="GeoJSON") - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "features": [ @@ -1581,9 +1571,11 @@ def test_ogr_geojson_45(): # Test that writing JSon content as value of a string field is serialized as it -def test_ogr_geojson_46(): +def test_ogr_geojson_46(tmp_vsimem): - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_46.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_46.json" + ) lyr = ds.CreateLayer("test") lyr.CreateField(ogr.FieldDefn("myprop")) f = ogr.Feature(lyr.GetLayerDefn()) @@ -1591,12 +1583,10 @@ def test_ogr_geojson_46(): lyr.CreateFeature(f) ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_46.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_46.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_46.json") - assert '{ "myprop": { "a": "b" } }' in data @@ -1605,7 +1595,7 @@ def test_ogr_geojson_46(): @gdaltest.disable_exceptions() -def test_ogr_geojson_47(): +def test_ogr_geojson_47(tmp_vsimem): # ERROR 6: Update from inline definition not supported with gdal.quiet_errors(): @@ -1613,20 +1603,20 @@ def test_ogr_geojson_47(): assert ds is None gdal.FileFromMemBuffer( - "/vsimem/ogr_geojson_47.json", + tmp_vsimem / "ogr_geojson_47.json", """{"type": "FeatureCollection", "foo": "bar", "features":[ { "type": "Feature", "bar": "baz", "properties": { "myprop": "myvalue" }, "geometry": null } ]}""", ) # Test read support - ds = ogr.Open("/vsimem/ogr_geojson_47.json", update=1) + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json", update=1) lyr = ds.GetLayer(0) f = lyr.GetNextFeature() f.SetField("myprop", "another_value") lyr.SetFeature(f) ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_47.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_47.json", "rb") if fp is not None: data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) @@ -1642,7 +1632,7 @@ def test_ogr_geojson_47(): ) # Test append support - ds = ogr.Open("/vsimem/ogr_geojson_47.json", update=1) + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json", update=1) lyr = ds.GetLayer(0) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(1 2)")) @@ -1660,7 +1650,7 @@ def test_ogr_geojson_47(): ds = None # Test append support - ds = ogr.Open("/vsimem/ogr_geojson_47.json", update=1) + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json", update=1) lyr = ds.GetLayer(0) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt("POINT(4 5)")) @@ -1669,12 +1659,12 @@ def test_ogr_geojson_47(): lyr.SetFeature(f) ds = None - ds = ogr.Open("/vsimem/ogr_geojson_47.json") + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json") lyr = ds.GetLayer(0) assert lyr.GetFeatureCount() == 4 ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_47.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_47.json", "rb") if fp is not None: data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) @@ -1691,54 +1681,54 @@ def test_ogr_geojson_47(): and "id" not in data ) - gdal.Unlink("/vsimem/ogr_geojson_47.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_47.json") # Test appending to empty features array gdal.FileFromMemBuffer( - "/vsimem/ogr_geojson_47.json", + tmp_vsimem / "ogr_geojson_47.json", """{ "type": "FeatureCollection", "features": []}""", ) - ds = ogr.Open("/vsimem/ogr_geojson_47.json", update=1) + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json", update=1) lyr = ds.GetLayer(0) f = ogr.Feature(lyr.GetLayerDefn()) lyr.CreateFeature(f) ds = None - ds = ogr.Open("/vsimem/ogr_geojson_47.json") + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json") lyr = ds.GetLayer(0) assert lyr.GetFeatureCount() == 1 ds = None # Test appending to array ending with non feature gdal.FileFromMemBuffer( - "/vsimem/ogr_geojson_47.json", + tmp_vsimem / "ogr_geojson_47.json", """{ "type": "FeatureCollection", "features": [ null ]}""", ) - ds = ogr.Open("/vsimem/ogr_geojson_47.json", update=1) + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json", update=1) lyr = ds.GetLayer(0) f = ogr.Feature(lyr.GetLayerDefn()) lyr.CreateFeature(f) ds = None - ds = ogr.Open("/vsimem/ogr_geojson_47.json") + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json") lyr = ds.GetLayer(0) assert lyr.GetFeatureCount() == 1 ds = None # Test appending to feature collection not ending with "features" gdal.FileFromMemBuffer( - "/vsimem/ogr_geojson_47.json", + tmp_vsimem / "ogr_geojson_47.json", """{ "type": "FeatureCollection", "features": [], "something": "else"}""", ) - ds = ogr.Open("/vsimem/ogr_geojson_47.json", update=1) + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json", update=1) lyr = ds.GetLayer(0) f = ogr.Feature(lyr.GetLayerDefn()) lyr.CreateFeature(f) ds = None - ds = ogr.Open("/vsimem/ogr_geojson_47.json") + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json") lyr = ds.GetLayer(0) assert lyr.GetFeatureCount() == 1 ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_47.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_47.json", "rb") if fp is not None: data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) @@ -1750,20 +1740,20 @@ def test_ogr_geojson_47(): with gdaltest.config_option("OGR_GEOJSON_REWRITE_IN_PLACE", "YES"): # Test appending to feature collection with "bbox" gdal.FileFromMemBuffer( - "/vsimem/ogr_geojson_47.json", + tmp_vsimem / "ogr_geojson_47.json", """{ "type": "FeatureCollection", "bbox": [0,0,0,0], "features": [ { "type": "Feature", "geometry": { "type": "Point", "coordinates": [0,0]} } ]}""", ) - ds = ogr.Open("/vsimem/ogr_geojson_47.json", update=1) + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json", update=1) lyr = ds.GetLayer(0) f = ogr.Feature(lyr.GetLayerDefn()) lyr.CreateFeature(f) ds = None - ds = ogr.Open("/vsimem/ogr_geojson_47.json") + ds = ogr.Open(tmp_vsimem / "ogr_geojson_47.json") lyr = ds.GetLayer(0) assert lyr.GetFeatureCount() == 2 ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_47.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_47.json", "rb") if fp is not None: data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) @@ -1772,22 +1762,20 @@ def test_ogr_geojson_47(): assert "bbox" in data - gdal.Unlink("/vsimem/ogr_geojson_47.json") - ############################################################################### # Test update support with file that has a single feature not in a FeatureCollection -def test_ogr_geojson_48(): +def test_ogr_geojson_48(tmp_vsimem): gdal.FileFromMemBuffer( - "/vsimem/ogr_geojson_48.json", + tmp_vsimem / "ogr_geojson_48.json", """{ "type": "Feature", "bar": "baz", "bbox": [2,49,2,49], "properties": { "myprop": "myvalue" }, "geometry": {"type": "Point", "coordinates": [ 2, 49]} }""", ) # Test read support - ds = ogr.Open("/vsimem/ogr_geojson_48.json", update=1) + ds = ogr.Open(tmp_vsimem / "ogr_geojson_48.json", update=1) lyr = ds.GetLayer(0) f = lyr.GetNextFeature() f.SetField("myprop", "another_value") @@ -1795,14 +1783,14 @@ def test_ogr_geojson_48(): lyr.SetFeature(f) ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_48.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_48.json", "rb") if fp is not None: data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) else: data = None - gdal.Unlink("/vsimem/ogr_geojson_48.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_48.json") # we don't want crs if there's no in the source assert ( @@ -1818,16 +1806,16 @@ def test_ogr_geojson_48(): # Test ARRAY_AS_STRING -def test_ogr_geojson_49(): +def test_ogr_geojson_49(tmp_vsimem): gdal.FileFromMemBuffer( - "/vsimem/ogr_geojson_49.json", + tmp_vsimem / "ogr_geojson_49.json", """{ "type": "Feature", "properties": { "foo": ["bar"] }, "geometry": null }""", ) # Test read support ds = gdal.OpenEx( - "/vsimem/ogr_geojson_49.json", open_options=["ARRAY_AS_STRING=YES"] + tmp_vsimem / "ogr_geojson_49.json", open_options=["ARRAY_AS_STRING=YES"] ) lyr = ds.GetLayer(0) assert lyr.GetLayerDefn().GetFieldDefn(0).GetType() == ogr.OFTString @@ -1837,16 +1825,16 @@ def test_ogr_geojson_49(): pytest.fail() ds = None - gdal.Unlink("/vsimem/ogr_geojson_49.json") - ############################################################################### # Test that we serialize floating point values with enough significant figures -def test_ogr_geojson_50(): +def test_ogr_geojson_50(tmp_vsimem): - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_50.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_50.json" + ) lyr = ds.CreateLayer("test") lyr.CreateField(ogr.FieldDefn("val", ogr.OFTReal)) f = ogr.Feature(lyr.GetLayerDefn()) @@ -1859,17 +1847,19 @@ def test_ogr_geojson_50(): f = None ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_50.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_50.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_50.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_50.json") assert "1.23456789012456" in data or "5268.813 " in data # If SIGNIFICANT_FIGURES is explicitly specified, and COORDINATE_PRECISION not, # then it also applies to coordinates - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_50.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_50.json" + ) lyr = ds.CreateLayer("test", options=["SIGNIFICANT_FIGURES=17"]) lyr.CreateField(ogr.FieldDefn("val", ogr.OFTReal)) f = ogr.Feature(lyr.GetLayerDefn()) @@ -1878,17 +1868,19 @@ def test_ogr_geojson_50(): f = None ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_50.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_50.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_50.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_50.json") assert "1.23456789012456" in data or "-5" in data # If SIGNIFICANT_FIGURES is explicitly specified, and COORDINATE_PRECISION too, # then SIGNIFICANT_FIGURES only applies to non-coordinates floating point values. - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_50.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_50.json" + ) lyr = ds.CreateLayer( "test", options=["COORDINATE_PRECISION=15", "SIGNIFICANT_FIGURES=17"] ) @@ -1900,11 +1892,11 @@ def test_ogr_geojson_50(): f = None ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_50.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_50.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_50.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_50.json") assert "0.00001234" in data and "1.23456789012456" in data @@ -1913,9 +1905,11 @@ def test_ogr_geojson_50(): # Test writing and reading empty geometries -def test_ogr_geojson_51(): +def test_ogr_geojson_51(tmp_vsimem): - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_51.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_51.json" + ) lyr = ds.CreateLayer("test") lyr.CreateField(ogr.FieldDefn("id", ogr.OFTInteger)) f = ogr.Feature(lyr.GetLayerDefn()) @@ -1949,7 +1943,7 @@ def test_ogr_geojson_51(): f = ogr.Feature(lyr.GetLayerDefn()) ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_51.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_51.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) @@ -1984,14 +1978,12 @@ def test_ogr_geojson_51(): in data ) - ds = ogr.Open("/vsimem/ogr_geojson_51.json") + ds = ogr.Open(tmp_vsimem / "ogr_geojson_51.json") lyr = ds.GetLayer(0) for f in lyr: if f.GetFID() >= 2: assert f.GetGeometryRef().IsEmpty() - gdal.Unlink("/vsimem/ogr_geojson_51.json") - ############################################################################### # Test NULL type detection @@ -2022,20 +2014,22 @@ def test_ogr_geojson_52(): # Test that M is ignored (this is a test of OGRLayer::CreateFeature() actually) -def test_ogr_geojson_53(): +def test_ogr_geojson_53(tmp_vsimem): - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_53.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_53.json" + ) lyr = ds.CreateLayer("test") f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt("POINT ZM (1 2 3 4)")) lyr.CreateFeature(f) ds = None - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_53.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_53.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_53.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_53.json") assert '{ "type": "Point", "coordinates": [ 1.0, 2.0, 3.0 ] }' in data @@ -2088,11 +2082,11 @@ def read_file(filename): return content -def test_ogr_geojson_55(): +def test_ogr_geojson_55(tmp_vsimem): # Basic test for standard bbox and coordinate truncation gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{ "type": "FeatureCollection", "features": [ @@ -2103,8 +2097,8 @@ def test_ogr_geojson_55(): options="-f GeoJSON -lco RFC7946=YES -lco WRITE_BBOX=YES -preserve_fid", ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ 2.1234568, 49.0000000, 3.0000000, 50.0000000 ], @@ -2118,7 +2112,7 @@ def test_ogr_geojson_55(): # Test polygon winding order gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{ "type": "FeatureCollection", "features": [ @@ -2131,8 +2125,8 @@ def test_ogr_geojson_55(): layerCreationOptions=["RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ 2.0000000, 49.0000000, 3.0000000, 50.0000000 ], @@ -2179,14 +2173,14 @@ def test_ogr_geojson_55(): open_options=["NATIVE_DATA=YES"], ) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", src_ds, format="GeoJSON", layerCreationOptions=["RFC7946=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "valid": "should be in output", @@ -2207,11 +2201,11 @@ def test_ogr_geojson_55(): @pytest.mark.require_geos -def test_ogr_geojson_56(): +def test_ogr_geojson_56(tmp_vsimem): # Test offsetting longitudes beyond antimeridian gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{ "type": "FeatureCollection", "features": [ @@ -2227,8 +2221,8 @@ def test_ogr_geojson_56(): layerCreationOptions=["RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ -178.0000000, 47.0000000, 178.0000000, 52.0000000 ], @@ -2246,7 +2240,7 @@ def test_ogr_geojson_56(): # Test geometries across the antimeridian gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{ "type": "FeatureCollection", "features": [ @@ -2262,8 +2256,8 @@ def test_ogr_geojson_56(): layerCreationOptions=["RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [172.0, 47.0, -162.0, 52.0], @@ -2298,7 +2292,7 @@ def test_ogr_geojson_56(): # Test geometries that defeats antimeridian heuristics gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{ "type": "FeatureCollection", "features": [ @@ -2312,8 +2306,8 @@ def test_ogr_geojson_56(): layerCreationOptions=["RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [-163.0, 10.0, 173.0, 52.0], @@ -2346,11 +2340,11 @@ def test_ogr_geojson_56(): @pytest.mark.require_geos -def test_ogr_geojson_56_world(): +def test_ogr_geojson_56_world(tmp_vsimem): # Test polygon geometry that covers the whole world (#2833) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{ "type": "FeatureCollection", "features": [ @@ -2361,8 +2355,8 @@ def test_ogr_geojson_56_world(): layerCreationOptions=["RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ -180.0000000, -90.0000000, 180.0000000, 90.0000000 ], @@ -2375,11 +2369,11 @@ def test_ogr_geojson_56_world(): @pytest.mark.require_geos -def test_ogr_geojson_56_next(): +def test_ogr_geojson_56_next(tmp_vsimem): # Test polygon geometry with one longitude at +/- 180deg (#6250) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{ "type": "FeatureCollection", "features": [ @@ -2390,8 +2384,8 @@ def test_ogr_geojson_56_next(): layerCreationOptions=["RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ 179.5000000, 40.0000000, 180.0000000, 50.0000000 ], @@ -2414,14 +2408,14 @@ def test_ogr_geojson_56_next(): # Test WRAPDATELINE=NO (#6250) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{"type":"LineString","coordinates":[[179,50],[-179,50]]}""", format="GeoJSON", layerCreationOptions=["RFC7946=YES", "WRITE_BBOX=YES", "WRAPDATELINE=NO"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "features": [ @@ -2434,7 +2428,7 @@ def test_ogr_geojson_56_next(): # Test line geometry with one longitude at +/- 180deg (#8645) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{ "type": "FeatureCollection", "features": [ @@ -2445,8 +2439,8 @@ def test_ogr_geojson_56_next(): layerCreationOptions=["RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ 179.0000000, 0.0000000, -179.0000000, 0.0000000 ], @@ -2459,7 +2453,7 @@ def test_ogr_geojson_56_next(): # Test line geometry with one longitude at +/- 180deg (#8645) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", """{ "type": "FeatureCollection", "features": [ @@ -2470,8 +2464,8 @@ def test_ogr_geojson_56_next(): layerCreationOptions=["RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ 179.0000000, 0.0000000, -179.0000000, 0.0000000 ], @@ -2488,7 +2482,7 @@ def test_ogr_geojson_56_next(): @pytest.mark.require_geos -def test_ogr_geojson_57(): +def test_ogr_geojson_57(tmp_vsimem): # Standard case: EPSG:32662: WGS 84 / Plate Carre src_ds = gdal.GetDriverByName("Memory").Create("", 0, 0, 0) @@ -2506,14 +2500,14 @@ def test_ogr_geojson_57(): lyr.CreateFeature(f) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", src_ds, format="GeoJSON", layerCreationOptions=["WRITE_NAME=NO", "RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ -17.9663057, -17.9663057, 17.9663057, 17.9663057 ], @@ -2547,14 +2541,14 @@ def test_ogr_geojson_57(): lyr.CreateFeature(f) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", src_ds, format="GeoJSON", layerCreationOptions=["WRITE_NAME=NO", "RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ -180.0000000, 64.3861643, 180.0000000, 90.0000000 ], @@ -2595,14 +2589,14 @@ def test_ogr_geojson_57(): lyr.CreateFeature(f) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", src_ds, format="GeoJSON", layerCreationOptions=["WRITE_NAME=NO", "RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ -135.0000000, 64.3861643, -45.0000000, 90.0000000 ], @@ -2629,14 +2623,14 @@ def test_ogr_geojson_57(): lyr.CreateFeature(f) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", src_ds, format="GeoJSON", layerCreationOptions=["WRITE_NAME=NO", "RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ 45.0000000, 64.3861643, 135.0000000, 90.0000000 ], @@ -2663,14 +2657,14 @@ def test_ogr_geojson_57(): lyr.CreateFeature(f) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", src_ds, format="GeoJSON", layerCreationOptions=["WRITE_NAME=NO", "RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ 135.0000000, 88.6984598, -135.0000000, 90.0000000 ], @@ -2721,14 +2715,14 @@ def test_ogr_geojson_57(): lyr.CreateFeature(f) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", src_ds, format="GeoJSON", layerCreationOptions=["WRITE_NAME=NO", "RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ -180.0000000, -90.0000000, 180.0000000, -64.3861643 ], @@ -2768,14 +2762,14 @@ def test_ogr_geojson_57(): lyr.CreateFeature(f) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", src_ds, format="GeoJSON", layerCreationOptions=["WRITE_NAME=NO", "RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ 178.5275649, 0.0000000, -179.0681936, 37.0308258 ], @@ -2819,14 +2813,14 @@ def test_ogr_geojson_57(): lyr.CreateFeature(f) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", src_ds, format="GeoJSON", layerCreationOptions=["WRITE_NAME=NO", "RFC7946=YES", "WRITE_BBOX=YES"], ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") expected = """{ "type": "FeatureCollection", "bbox": [ 178.8892102, 36.1240958, 179.2483693, 37.0308258 ], @@ -2842,7 +2836,7 @@ def test_ogr_geojson_57(): # Test using the name member of FeatureCollection -def test_ogr_geojson_58(): +def test_ogr_geojson_58(tmp_vsimem): ds = ogr.Open( '{ "type": "FeatureCollection", "name": "layer_name", "features": []}' @@ -2853,20 +2847,21 @@ def test_ogr_geojson_58(): assert lyr is not None, "Missing layer called layer_name" ds = None - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_58.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_58.json" + ) lyr = ds.CreateLayer("foo") ds = None - ds = ogr.Open("/vsimem/ogr_geojson_58.json") + ds = ogr.Open(tmp_vsimem / "ogr_geojson_58.json") assert ds.GetLayerByName("foo") is not None, "Missing layer called foo" ds = None - gdal.Unlink("/vsimem/ogr_geojson_58.json") ############################################################################### # Test using the description member of FeatureCollection -def test_ogr_geojson_59(): +def test_ogr_geojson_59(tmp_vsimem): ds = ogr.Open( '{ "type": "FeatureCollection", "description": "my_description", "features": []}' @@ -2879,21 +2874,22 @@ def test_ogr_geojson_59(): ), "Did not get DESCRIPTION" ds = None - ds = ogr.GetDriverByName("GeoJSON").CreateDataSource("/vsimem/ogr_geojson_59.json") + ds = ogr.GetDriverByName("GeoJSON").CreateDataSource( + tmp_vsimem / "ogr_geojson_59.json" + ) lyr = ds.CreateLayer("foo", options=["DESCRIPTION=my desc"]) ds = None - ds = ogr.Open("/vsimem/ogr_geojson_59.json") + ds = ogr.Open(tmp_vsimem / "ogr_geojson_59.json") lyr = ds.GetLayerByName("foo") assert lyr.GetMetadataItem("DESCRIPTION") == "my desc", "Did not get DESCRIPTION" ds = None - gdal.Unlink("/vsimem/ogr_geojson_59.json") ############################################################################### # Test null vs unset field -def test_ogr_geojson_60(): +def test_ogr_geojson_60(tmp_vsimem): ds = gdal.OpenEx( """{ "type": "FeatureCollection", "features": [ @@ -2916,13 +2912,13 @@ def test_ogr_geojson_60(): pytest.fail() # Test writing side - gdal.VectorTranslate("/vsimem/ogr_geojson_60.json", ds, format="GeoJSON") + gdal.VectorTranslate(tmp_vsimem / "ogr_geojson_60.json", ds, format="GeoJSON") - fp = gdal.VSIFOpenL("/vsimem/ogr_geojson_60.json", "rb") + fp = gdal.VSIFOpenL(tmp_vsimem / "ogr_geojson_60.json", "rb") data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink("/vsimem/ogr_geojson_60.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_60.json") assert ( '"properties": { "foo": "bar" }' in data and '"properties": { "foo": null }' in data @@ -2934,16 +2930,16 @@ def test_ogr_geojson_60(): # Test corner cases -def test_ogr_geojson_61(): +def test_ogr_geojson_61(tmp_vsimem): # Invalid JSon gdal.FileFromMemBuffer( - "/vsimem/ogr_geojson_61.json", + tmp_vsimem / "ogr_geojson_61.json", """{ "type": "FeatureCollection", "features": [""", ) with pytest.raises(Exception): - ds = gdal.OpenEx("/vsimem/ogr_geojson_61.json") - gdal.Unlink("/vsimem/ogr_geojson_61.json") + ds = gdal.OpenEx(tmp_vsimem / "ogr_geojson_61.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_61.json") # Invalid single geometry with pytest.raises(Exception): @@ -2951,15 +2947,15 @@ def test_ogr_geojson_61(): # Empty property name gdal.FileFromMemBuffer( - "/vsimem/ogr_geojson_61.json", + tmp_vsimem / "ogr_geojson_61.json", """{ "type": "FeatureCollection", "features": [ { "type": "Feature", "properties": {"": 1}, "geometry": null }] }""", ) - ds = gdal.OpenEx("/vsimem/ogr_geojson_61.json") + ds = gdal.OpenEx(tmp_vsimem / "ogr_geojson_61.json") lyr = ds.GetLayer(0) f = lyr.GetNextFeature() assert f.GetField("") == 1 ds = None - gdal.Unlink("/vsimem/ogr_geojson_61.json") + gdal.Unlink(tmp_vsimem / "ogr_geojson_61.json") ############################################################################### @@ -3218,194 +3214,194 @@ def test_ogr_geojson_67(): ############################################################################### -def test_ogr_geojson_id_field_and_id_type(): +def test_ogr_geojson_id_field_and_id_type(tmp_vsimem): gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", "data/poly.shp", options="-f GeoJSON -lco ID_TYPE=String -preserve_fid -limit 1 -fid 2", ) - got = read_file("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") assert ( '"id": "2", "properties": { "AREA": 261752.781, "EAS_ID": 171, "PRFEDEA": "35043414" }' in got ) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", "data/poly.shp", options="-f GeoJSON -lco ID_TYPE=Integer -preserve_fid -limit 1 -fid 2", ) - got = read_file("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") assert ( '"id": 2, "properties": { "AREA": 261752.781, "EAS_ID": 171, "PRFEDEA": "35043414" }' in got ) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", "data/poly.shp", format="GeoJSON", layerCreationOptions=["ID_FIELD=EAS_ID"], limit=1, ) - got = read_file("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") assert ( '"id": 168, "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got ) - src_ds = gdal.OpenEx("/vsimem/out.json", open_options=["NATIVE_DATA=YES"]) - gdal.VectorTranslate("/vsimem/out2.json", src_ds, format="GeoJSON") + src_ds = gdal.OpenEx(tmp_vsimem / "out.json", open_options=["NATIVE_DATA=YES"]) + gdal.VectorTranslate(tmp_vsimem / "out2.json", src_ds, format="GeoJSON") src_ds = None - got = read_file("/vsimem/out2.json") - gdal.Unlink("/vsimem/out2.json") + got = read_file(tmp_vsimem / "out2.json") + gdal.Unlink(tmp_vsimem / "out2.json") assert ( '"id": 168, "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got ) - src_ds = gdal.OpenEx("/vsimem/out.json", open_options=["NATIVE_DATA=YES"]) + src_ds = gdal.OpenEx(tmp_vsimem / "out.json", open_options=["NATIVE_DATA=YES"]) gdal.VectorTranslate( - "/vsimem/out2.json", + tmp_vsimem / "out2.json", src_ds, format="GeoJSON", layerCreationOptions=["ID_TYPE=String"], ) src_ds = None - got = read_file("/vsimem/out2.json") - gdal.Unlink("/vsimem/out2.json") + got = read_file(tmp_vsimem / "out2.json") + gdal.Unlink(tmp_vsimem / "out2.json") assert ( '"id": "168", "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got ) - src_ds = gdal.OpenEx("/vsimem/out.json", open_options=["NATIVE_DATA=YES"]) + src_ds = gdal.OpenEx(tmp_vsimem / "out.json", open_options=["NATIVE_DATA=YES"]) gdal.VectorTranslate( - "/vsimem/out2.json", + tmp_vsimem / "out2.json", src_ds, format="GeoJSON", layerCreationOptions=["ID_TYPE=Integer"], ) src_ds = None - got = read_file("/vsimem/out2.json") - gdal.Unlink("/vsimem/out2.json") + got = read_file(tmp_vsimem / "out2.json") + gdal.Unlink(tmp_vsimem / "out2.json") assert ( '"id": 168, "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got ) - gdal.Unlink("/vsimem/out.json") + gdal.Unlink(tmp_vsimem / "out.json") gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", "data/poly.shp", format="GeoJSON", layerCreationOptions=["ID_FIELD=EAS_ID", "ID_TYPE=String"], limit=1, ) - got = read_file("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") assert ( '"id": "168", "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got ) - src_ds = gdal.OpenEx("/vsimem/out.json", open_options=["NATIVE_DATA=YES"]) - gdal.VectorTranslate("/vsimem/out2.json", src_ds, format="GeoJSON") + src_ds = gdal.OpenEx(tmp_vsimem / "out.json", open_options=["NATIVE_DATA=YES"]) + gdal.VectorTranslate(tmp_vsimem / "out2.json", src_ds, format="GeoJSON") src_ds = None - got = read_file("/vsimem/out2.json") - gdal.Unlink("/vsimem/out2.json") + got = read_file(tmp_vsimem / "out2.json") + gdal.Unlink(tmp_vsimem / "out2.json") assert ( '"id": "168", "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got ) - src_ds = gdal.OpenEx("/vsimem/out.json", open_options=["NATIVE_DATA=YES"]) + src_ds = gdal.OpenEx(tmp_vsimem / "out.json", open_options=["NATIVE_DATA=YES"]) gdal.VectorTranslate( - "/vsimem/out2.json", + tmp_vsimem / "out2.json", src_ds, format="GeoJSON", layerCreationOptions=["ID_TYPE=String"], ) src_ds = None - got = read_file("/vsimem/out2.json") - gdal.Unlink("/vsimem/out2.json") + got = read_file(tmp_vsimem / "out2.json") + gdal.Unlink(tmp_vsimem / "out2.json") assert ( '"id": "168", "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got ) - src_ds = gdal.OpenEx("/vsimem/out.json", open_options=["NATIVE_DATA=YES"]) + src_ds = gdal.OpenEx(tmp_vsimem / "out.json", open_options=["NATIVE_DATA=YES"]) gdal.VectorTranslate( - "/vsimem/out2.json", + tmp_vsimem / "out2.json", src_ds, format="GeoJSON", layerCreationOptions=["ID_TYPE=Integer"], ) src_ds = None - got = read_file("/vsimem/out2.json") - gdal.Unlink("/vsimem/out2.json") + got = read_file(tmp_vsimem / "out2.json") + gdal.Unlink(tmp_vsimem / "out2.json") assert ( '"id": 168, "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got ) - gdal.Unlink("/vsimem/out.json") + gdal.Unlink(tmp_vsimem / "out.json") gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", "data/poly.shp", format="GeoJSON", layerCreationOptions=["ID_FIELD=PRFEDEA"], limit=1, ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") assert ( '"id": "35043411", "properties": { "AREA": 215229.266, "EAS_ID": 168 }' in got ) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", "data/poly.shp", format="GeoJSON", layerCreationOptions=["ID_FIELD=PRFEDEA", "ID_TYPE=Integer"], limit=1, ) - got = read_file("/vsimem/out.json") - gdal.Unlink("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") + gdal.Unlink(tmp_vsimem / "out.json") assert '"id": 35043411, "properties": { "AREA": 215229.266, "EAS_ID": 168 }' in got gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", "data/poly.shp", format="GeoJSON", layerCreationOptions=["ID_GENERATE=YES"], limit=1, ) - got = read_file("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") assert ( '"id": 0, "properties": { "AREA": 215229.266, "EAS_ID": 168, "PRFEDEA": "35043411" }' in got ) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", "data/poly.shp", format="GeoJSON", layerCreationOptions=["ID_GENERATE=YES", "ID_TYPE=Integer"], limit=1, ) - got = read_file("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") assert ( '"id": 0, "properties": { "AREA": 215229.266, "EAS_ID": 168, "PRFEDEA": "35043411" }' in got ) gdal.VectorTranslate( - "/vsimem/out.json", + tmp_vsimem / "out.json", "data/poly.shp", format="GeoJSON", layerCreationOptions=["ID_GENERATE=YES", "ID_TYPE=String"], limit=1, ) - got = read_file("/vsimem/out.json") + got = read_file(tmp_vsimem / "out.json") assert ( '"id": "0", "properties": { "AREA": 215229.266, "EAS_ID": 168, "PRFEDEA": "35043411" }' in got @@ -3474,9 +3470,9 @@ def test_ogr_geojson_starting_with_crs(): # Test we properly flush the file in SyncToDisk() in append situations -def test_ogr_geojson_append_flush(): +def test_ogr_geojson_append_flush(tmp_path): - tmpfilename = "tmp/ogr_geojson_append_flush.json" + tmpfilename = tmp_path / "ogr_geojson_append_flush.json" f = gdal.VSIFOpenL(tmpfilename, "wb") content = """{ "type": "FeatureCollection", @@ -3500,7 +3496,6 @@ def test_ogr_geojson_append_flush(): ds = None ds2 = None - gdal.Unlink(tmpfilename) ############################################################################### @@ -3543,9 +3538,9 @@ def test_ogr_geojson_read_fields_with_different_case(): @pytest.mark.require_geos -def test_ogr_geojson_clip_geometries_rfc7946(): +def test_ogr_geojson_clip_geometries_rfc7946(tmp_vsimem): - tmpfilename = "/vsimem/out.json" + tmpfilename = tmp_vsimem / "out.json" gdal.VectorTranslate( tmpfilename, """{ @@ -3582,14 +3577,12 @@ def test_ogr_geojson_clip_geometries_rfc7946(): ds = None - gdal.Unlink(tmpfilename) - ############################################################################### # Test bugfix for https://github.com/OSGeo/gdal/issues/1109 -def test_ogr_geojson_non_finite(): +def test_ogr_geojson_non_finite(tmp_vsimem): json_content = """{ "type": "FeatureCollection", @@ -3618,7 +3611,7 @@ def test_ogr_geojson_non_finite(): pytest.fail(str(f["nan_prop"])) ds = None - tmpfilename = "/vsimem/out.json" + tmpfilename = tmp_vsimem / "out.json" with gdal.quiet_errors(): gdal.VectorTranslate(tmpfilename, json_content, options="-f GeoJSON") @@ -3645,14 +3638,12 @@ def test_ogr_geojson_non_finite(): pytest.fail(str(f["nan_prop"])) ds = None - gdal.Unlink(tmpfilename) - ############################################################################### # Test writing fields with and without automatic JSON interpretation -def test_ogr_geojson_json_string_autodetect(): +def test_ogr_geojson_json_string_autodetect(tmp_vsimem): json_content = """{ "type": "FeatureCollection", @@ -3669,7 +3660,7 @@ def test_ogr_geojson_json_string_autodetect(): assert f["jsonish"] == "[5]" ds = None - tmpfilename = "/vsimem/out.json" + tmpfilename = tmp_vsimem / "out.json" with gdal.quiet_errors(): gdal.VectorTranslate(tmpfilename, json_content, options="-f GeoJSON") @@ -3693,13 +3684,12 @@ def test_ogr_geojson_json_string_autodetect(): f = lyr.GetNextFeature() assert f["jsonish"] == "[5]" ds = None - gdal.Unlink(tmpfilename) ############################################################################### -def test_ogr_geojson_random_reading_with_id(): +def test_ogr_geojson_random_reading_with_id(tmp_vsimem): json_content = """{ "type": "FeatureCollection", @@ -3708,7 +3698,7 @@ def test_ogr_geojson_random_reading_with_id(): { "type": "Feature", "id": 2, "properties": { "a": "bc" }, "geometry": null } ] }""" - tmpfilename = "/vsimem/temp.json" + tmpfilename = tmp_vsimem / "temp.json" gdal.FileFromMemBuffer(tmpfilename, json_content) ds = ogr.Open(tmpfilename) lyr = ds.GetLayer(0) @@ -3720,13 +3710,12 @@ def test_ogr_geojson_random_reading_with_id(): assert f2.Equal(f2_ref) assert not lyr.GetFeature(3) ds = None - gdal.Unlink(tmpfilename) ############################################################################### -def test_ogr_geojson_random_reading_without_id(): +def test_ogr_geojson_random_reading_without_id(tmp_vsimem): json_content = """{ "type": "FeatureCollection", @@ -3735,7 +3724,7 @@ def test_ogr_geojson_random_reading_without_id(): { "type": "Feature", "properties": { "a": "bc" }, "geometry": null } ] }""" - tmpfilename = "/vsimem/temp.json" + tmpfilename = tmp_vsimem / "temp.json" gdal.FileFromMemBuffer(tmpfilename, json_content) ds = ogr.Open(tmpfilename) lyr = ds.GetLayer(0) @@ -3747,18 +3736,17 @@ def test_ogr_geojson_random_reading_without_id(): assert f2.Equal(f2_ref) assert not lyr.GetFeature(2) ds = None - gdal.Unlink(tmpfilename) ############################################################################### -def test_ogr_geojson_single_feature_random_reading_with_id(): +def test_ogr_geojson_single_feature_random_reading_with_id(tmp_vsimem): json_content = """ { "type": "Feature", "id": 1, "properties": { "a": "a" }, "geometry": null } }""" - tmpfilename = "/vsimem/temp.json" + tmpfilename = tmp_vsimem / "temp.json" gdal.FileFromMemBuffer(tmpfilename, json_content) ds = ogr.Open(tmpfilename) lyr = ds.GetLayer(0) @@ -3766,7 +3754,6 @@ def test_ogr_geojson_single_feature_random_reading_with_id(): f1 = lyr.GetFeature(1) assert f1.Equal(f1_ref) ds = None - gdal.Unlink(tmpfilename) ############################################################################### @@ -3805,9 +3792,9 @@ def test_ogr_geojson_3D_geom_type(): ############################################################################### -def test_ogr_geojson_update_in_loop(): +def test_ogr_geojson_update_in_loop(tmp_vsimem): - tmpfilename = "/vsimem/temp.json" + tmpfilename = tmp_vsimem / "temp.json" # No explicit id gdal.FileFromMemBuffer( @@ -3852,32 +3839,28 @@ def test_ogr_geojson_update_in_loop(): assert fids == [1, 3] ds = None - gdal.Unlink(tmpfilename) - ############################################################################### # Test fix for https://github.com/OSGeo/gdal/issues/2720 -def test_ogr_geojson_starting_with_coordinates(): +def test_ogr_geojson_starting_with_coordinates(tmp_vsimem): - tmpfilename = "/vsimem/temp.json" + tmpfilename = tmp_vsimem / "temp.json" gdal.FileFromMemBuffer( tmpfilename, '{ "coordinates": [' + (" " * 10000) + '2,49], "type": "Point"}' ) ds = gdal.OpenEx(tmpfilename, gdal.OF_VECTOR) assert ds is not None - gdal.Unlink(tmpfilename) - ############################################################################### # Test fix for https://github.com/OSGeo/gdal/issues/2787 -def test_ogr_geojson_starting_with_geometry_coordinates(): +def test_ogr_geojson_starting_with_geometry_coordinates(tmp_vsimem): - tmpfilename = "/vsimem/temp.json" + tmpfilename = tmp_vsimem / "temp.json" gdal.FileFromMemBuffer( tmpfilename, '{ "geometry": {"coordinates": [' @@ -3887,18 +3870,16 @@ def test_ogr_geojson_starting_with_geometry_coordinates(): ds = gdal.OpenEx(tmpfilename, gdal.OF_VECTOR) assert ds is not None - gdal.Unlink(tmpfilename) - ############################################################################### # Test serialization of Float32 values -def test_ogr_geojson_write_float32(): +def test_ogr_geojson_write_float32(tmp_vsimem): def cast_as_float(x): return struct.unpack("f", struct.pack("f", x))[0] - filename = "/vsimem/test_ogr_geojson_write_float32.json" + filename = tmp_vsimem / "test_ogr_geojson_write_float32.json" ds = ogr.GetDriverByName("GeoJSON").CreateDataSource(filename) lyr = ds.CreateLayer("foo") @@ -3944,9 +3925,9 @@ def cast_as_float(x): # Test bugfix for #3172 -def test_ogr_geojson_write_float_exponential_without_dot(): +def test_ogr_geojson_write_float_exponential_without_dot(tmp_vsimem): - filename = "/vsimem/test_ogr_geojson_write_float_exponential_without_dot.json" + filename = tmp_vsimem / "test_ogr_geojson_write_float_exponential_without_dot.json" ds = ogr.GetDriverByName("GeoJSON").CreateDataSource(filename) lyr = ds.CreateLayer("foo") @@ -3968,8 +3949,6 @@ def test_ogr_geojson_write_float_exponential_without_dot(): data = gdal.VSIFReadL(1, 10000, fp).decode("ascii") gdal.VSIFCloseL(fp) - gdal.Unlink(filename) - # Check that the json can be parsed json.loads(data) @@ -3978,15 +3957,14 @@ def test_ogr_geojson_write_float_exponential_without_dot(): # Test bugfix for #3280 -def test_ogr_geojson_feature_starting_with_big_properties(): +def test_ogr_geojson_feature_starting_with_big_properties(tmp_vsimem): - filename = "/vsimem/test_ogr_geojson_feature_starting_with_big_properties.json" + filename = tmp_vsimem / "test_ogr_geojson_feature_starting_with_big_properties.json" gdal.FileFromMemBuffer( filename, '{"properties":{"foo":"%s"},"type":"Feature","geometry":null}' % ("x" * 10000), ) assert ogr.Open(filename) is not None - gdal.Unlink(filename) ############################################################################### @@ -4082,7 +4060,7 @@ def test_ogr_geojson_crs_4979(filename): ############################################################################### -def test_ogr_geojson_write_rfc7946_from_3D_crs(): +def test_ogr_geojson_write_rfc7946_from_3D_crs(tmp_vsimem): srs_4979 = osr.SpatialReference() srs_4979.ImportFromEPSG(4979) @@ -4097,7 +4075,7 @@ def test_ogr_geojson_write_rfc7946_from_3D_crs(): lon, lat, z = ct.TransformPoint(2, 49, ellipsoidal_height) # If we have the egm96 grid, then z should be different from 100 - filename = "/vsimem/out.geojson" + filename = tmp_vsimem / "out.geojson" ds = ogr.GetDriverByName("GeoJSON").CreateDataSource(filename) lyr = ds.CreateLayer("out", srs=srs_4326_5773, options=["RFC7946=YES"]) f = ogr.Feature(lyr.GetLayerDefn()) @@ -4119,9 +4097,9 @@ def test_ogr_geojson_write_rfc7946_from_3D_crs(): @gdaltest.disable_exceptions() -def test_ogr_geojson_feature_large(): +def test_ogr_geojson_feature_large(tmp_vsimem): - filename = "/vsimem/test_ogr_geojson_feature_large.json" + filename = tmp_vsimem / "test_ogr_geojson_feature_large.json" gdal.FileFromMemBuffer( filename, '{"type":"FeatureCollection","features":[{"type":"Feature","properties":{},"geometry":{"type":"LineString","coordinates":[%s]}}]}' @@ -4178,9 +4156,9 @@ def test_ogr_geojson_read_from_http(): # Test ogr2ogr -nln with a input dataset being a GeoJSON file with a name -def test_ogr_geojson_ogr2ogr_nln_with_input_dataset_having_name(): +def test_ogr_geojson_ogr2ogr_nln_with_input_dataset_having_name(tmp_vsimem): - filename = "/vsimem/test_ogr_geojson_feature_large.geojson" + filename = tmp_vsimem / "test_ogr_geojson_feature_large.geojson" gdal.VectorTranslate( filename, '{"type":"FeatureCollection","name":"to_be_overriden","features":[]}', @@ -4189,7 +4167,6 @@ def test_ogr_geojson_ogr2ogr_nln_with_input_dataset_having_name(): ds = ogr.Open(filename) assert ds.GetLayer(0).GetName() == "new_name" ds = None - gdal.Unlink(filename) ############################################################################### @@ -4319,9 +4296,9 @@ def test_ogr_geojson_test_ogrsf(): [{"a_field": "a_value"}, "a_string", 42], ], ) -def test_ogr_geojson_mixed_type_promotion(properties): +def test_ogr_geojson_mixed_type_promotion(tmp_vsimem, properties): - tmpfilename = "/vsimem/temp.json" + tmpfilename = tmp_vsimem / "temp.json" jdata = {"type": "FeatureCollection", "features": []} @@ -4342,16 +4319,14 @@ def test_ogr_geojson_mixed_type_promotion(properties): assert fld_def.GetTypeName() == "String" assert fld_def.GetSubType() == ogr.OFSTJSON - gdal.Unlink(tmpfilename) - ############################################################################### # Test fix for https://github.com/OSGeo/gdal/issues/7319 -def test_ogr_geojson_coordinate_precision(): +def test_ogr_geojson_coordinate_precision(tmp_vsimem): - filename = "/vsimem/test_ogr_geojson_coordinate_precision.json" + filename = tmp_vsimem / "test_ogr_geojson_coordinate_precision.json" ds = ogr.GetDriverByName("GeoJSON").CreateDataSource(filename) lyr = ds.CreateLayer("foo", options=["COORDINATE_PRECISION=1", "WRITE_BBOX=YES"]) @@ -4375,16 +4350,14 @@ def test_ogr_geojson_coordinate_precision(): prec = geom_fld.GetCoordinatePrecision() assert prec.GetXYResolution() == 1e-1 - gdal.Unlink(filename) - ############################################################################### # Test fix for https://github.com/OSGeo/gdal/issues/7319 -def test_ogr_geojson_field_types(): +def test_ogr_geojson_field_types(tmp_vsimem): - filename = "/vsimem/test_ogr_geojson_field_types.json" + filename = tmp_vsimem / "test_ogr_geojson_field_types.json" test_data = """{"type":"FeatureCollection","name":"My Collection","features":[ { "type": "Feature", "properties": { "prop0": 42 }, "geometry": { "type": "Point", "coordinates": [ 102.0, 0.5 ] } }, From 48b29463d3ea2b49accb592b01ec6e4f61810715 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Fri, 5 Apr 2024 22:24:27 -0400 Subject: [PATCH 071/230] autotest: ogr_geoconcept.py: use tmp_path --- autotest/ogr/ogr_geoconcept.py | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/autotest/ogr/ogr_geoconcept.py b/autotest/ogr/ogr_geoconcept.py index 7b9559aaf27a..42290916a6e8 100755 --- a/autotest/ogr/ogr_geoconcept.py +++ b/autotest/ogr/ogr_geoconcept.py @@ -29,8 +29,6 @@ # DEALINGS IN THE SOFTWARE. ############################################################################### -import os - import ogrtest import pytest @@ -44,10 +42,6 @@ @pytest.fixture(autouse=True, scope="module") def startup_and_cleanup(): yield - try: - os.remove("tmp/tmp.gxt") - except OSError: - pass ############################################################################### @@ -142,21 +136,16 @@ def test_ogr_gxt_2(): # Read a GXT file containing 2 points, duplicate it, and check the newly written file -def test_ogr_gxt_3(): +def test_ogr_gxt_3(tmp_path): ds = None src_ds = ogr.Open("data/geoconcept/points.gxt") - try: - os.remove("tmp/tmp.gxt") - except OSError: - pass - # Duplicate all the points from the source GXT src_lyr = src_ds.GetLayerByName("points.points") - ds = ogr.GetDriverByName("Geoconcept").CreateDataSource("tmp/tmp.gxt") + ds = ogr.GetDriverByName("Geoconcept").CreateDataSource(tmp_path / "tmp.gxt") srs = osr.SpatialReference() srs.SetWellKnownGeogCS("WGS84") @@ -181,7 +170,7 @@ def test_ogr_gxt_3(): ds = None # Read the newly written GXT file and check its features and geometries - ds = ogr.Open("tmp/tmp.gxt") + ds = ogr.Open(tmp_path / "tmp.gxt") gxt_lyr = ds.GetLayerByName("points.points") assert gxt_lyr.GetSpatialRef().IsSame( From f25e798a27bebd5662e0909a910e2f7b6fcce29d Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Fri, 5 Apr 2024 22:25:05 -0400 Subject: [PATCH 072/230] autotest: ogr_dxf.py: use tmp_path --- autotest/ogr/ogr_dxf.py | 94 +++++++++++++++-------------------------- 1 file changed, 35 insertions(+), 59 deletions(-) diff --git a/autotest/ogr/ogr_dxf.py b/autotest/ogr/ogr_dxf.py index edfb4fee7994..1e11b9d389e8 100644 --- a/autotest/ogr/ogr_dxf.py +++ b/autotest/ogr/ogr_dxf.py @@ -30,8 +30,6 @@ # DEALINGS IN THE SOFTWARE. ############################################################################### -import os - import gdaltest import ogrtest import pytest @@ -445,9 +443,9 @@ def test_ogr_dxf_11(): # Write a simple file with a polygon and a line, and read back. -def test_ogr_dxf_12(): +def test_ogr_dxf_12(tmp_path): - ds = ogr.GetDriverByName("DXF").CreateDataSource("tmp/dxf_11.dxf") + ds = ogr.GetDriverByName("DXF").CreateDataSource(tmp_path / "dxf_11.dxf") lyr = ds.CreateLayer("entities") assert lyr.GetDataset().GetDescription() == ds.GetDescription() @@ -482,7 +480,7 @@ def test_ogr_dxf_12(): ds = None # Read back. - ds = ogr.Open("tmp/dxf_11.dxf") + ds = ogr.Open(tmp_path / "dxf_11.dxf") lyr = ds.GetLayer(0) # Check first feature @@ -529,8 +527,6 @@ def test_ogr_dxf_12(): ds = None ds = None - os.unlink("tmp/dxf_11.dxf") - ############################################################################### # Check smoothed polyline. @@ -645,10 +641,10 @@ def test_ogr_dxf_14(): # layer '0'. -def test_ogr_dxf_15(): +def test_ogr_dxf_15(tmp_path): ds = ogr.GetDriverByName("DXF").CreateDataSource( - "tmp/dxf_14.dxf", ["FIRST_ENTITY=80"] + tmp_path / "dxf_14.dxf", ["FIRST_ENTITY=80"] ) lyr = ds.CreateLayer("entities") @@ -668,7 +664,7 @@ def test_ogr_dxf_15(): ds = None # Read back. - ds = ogr.Open("tmp/dxf_14.dxf") + ds = ogr.Open(tmp_path / "dxf_14.dxf") lyr = ds.GetLayer(0) # Check first feature @@ -705,7 +701,7 @@ def test_ogr_dxf_15(): # that will be different is the layer name is 'abc' instead of '0' # and the entity id. - outdxf = open("tmp/dxf_14.dxf").read() + outdxf = open(tmp_path / "dxf_14.dxf").read() start_1 = outdxf.find(" 0\nLAYER") start_2 = outdxf.find(" 0\nLAYER", start_1 + 10) @@ -723,8 +719,6 @@ def test_ogr_dxf_15(): handseed = outdxf[start_seed + 10 + 4 : start_seed + 10 + 4 + 8] assert handseed == "00000053", "Did not get expected HANDSEED, got %s." % handseed - os.unlink("tmp/dxf_14.dxf") - ############################################################################### # Test reading without DXF blocks inlined. @@ -819,10 +813,10 @@ def test_ogr_dxf_16(): # Write a file with blocks defined from a source blocks layer. -def test_ogr_dxf_17(): +def test_ogr_dxf_17(tmp_path): ds = ogr.GetDriverByName("DXF").CreateDataSource( - "tmp/dxf_17.dxf", ["HEADER=data/dxf/header_extended.dxf"] + tmp_path / "dxf_17.dxf", ["HEADER=data/dxf/header_extended.dxf"] ) blyr = ds.CreateLayer("blocks") @@ -904,7 +898,7 @@ def test_ogr_dxf_17(): # Reopen and check contents. - ds = ogr.Open("tmp/dxf_17.dxf") + ds = ogr.Open(tmp_path / "dxf_17.dxf") lyr = ds.GetLayer(0) @@ -966,18 +960,16 @@ def test_ogr_dxf_17(): lyr = None ds = None - os.unlink("tmp/dxf_17.dxf") - ############################################################################### # Write a file with line patterns, and make sure corresponding Linetypes are # created. -def test_ogr_dxf_18(): +def test_ogr_dxf_18(tmp_path): ds = ogr.GetDriverByName("DXF").CreateDataSource( - "tmp/dxf_18.dxf", ["HEADER=data/dxf/header_extended.dxf"] + tmp_path / "dxf_18.dxf", ["HEADER=data/dxf/header_extended.dxf"] ) lyr = ds.CreateLayer("entities") @@ -1018,7 +1010,7 @@ def test_ogr_dxf_18(): # Reopen and check contents. - ds = ogr.Open("tmp/dxf_18.dxf") + ds = ogr.Open(tmp_path / "dxf_18.dxf") lyr = ds.GetLayer(0) @@ -1074,18 +1066,16 @@ def test_ogr_dxf_18(): lyr = None ds = None - os.unlink("tmp/dxf_18.dxf") - ############################################################################### # Test writing a file using references to blocks defined entirely in the # template - no blocks layer transferred. -def test_ogr_dxf_19(): +def test_ogr_dxf_19(tmp_path): ds = ogr.GetDriverByName("DXF").CreateDataSource( - "tmp/dxf_19.dxf", ["HEADER=data/dxf/header_extended.dxf"] + tmp_path / "dxf_19.dxf", ["HEADER=data/dxf/header_extended.dxf"] ) lyr = ds.CreateLayer("entities") @@ -1101,7 +1091,7 @@ def test_ogr_dxf_19(): # Reopen and check contents. - ds = ogr.Open("tmp/dxf_19.dxf") + ds = ogr.Open(tmp_path / "dxf_19.dxf") lyr = ds.GetLayer(0) @@ -1121,8 +1111,6 @@ def test_ogr_dxf_19(): lyr = None ds = None - os.unlink("tmp/dxf_19.dxf") - ############################################################################### # SPLINE @@ -1173,7 +1161,7 @@ def test_ogr_dxf_21(): # TEXT -def test_ogr_dxf_22(): +def test_ogr_dxf_22(tmp_vsimem): # Read MTEXT feature ds = ogr.Open("data/dxf/text.dxf") @@ -1190,7 +1178,7 @@ def test_ogr_dxf_22(): ogrtest.check_feature_geometry(feat, "POINT(1 2 3)"), "bad geometry" # Write text feature - out_ds = ogr.GetDriverByName("DXF").CreateDataSource("/vsimem/ogr_dxf_22.dxf") + out_ds = ogr.GetDriverByName("DXF").CreateDataSource(tmp_vsimem / "ogr_dxf_22.dxf") out_lyr = out_ds.CreateLayer("entities") out_feat = ogr.Feature(out_lyr.GetLayerDefn()) out_feat.SetStyleString(style) @@ -1203,7 +1191,7 @@ def test_ogr_dxf_22(): ds = None # Check written file - ds = ogr.Open("/vsimem/ogr_dxf_22.dxf") + ds = ogr.Open(tmp_vsimem / "ogr_dxf_22.dxf") lyr = ds.GetLayer(0) feat = lyr.GetNextFeature() @@ -1213,8 +1201,6 @@ def test_ogr_dxf_22(): ds = None - gdal.Unlink("/vsimem/ogr_dxf_22.dxf") - # Now try reading in the MTEXT feature without translating escape sequences with gdal.config_option("DXF_TRANSLATE_ESCAPE_SEQUENCES", "FALSE"): ds = ogr.Open("data/dxf/text.dxf") @@ -1233,10 +1219,10 @@ def test_ogr_dxf_22(): # POLYGON with hole -def test_ogr_dxf_23(): +def test_ogr_dxf_23(tmp_vsimem): # Write polygon - out_ds = ogr.GetDriverByName("DXF").CreateDataSource("/vsimem/ogr_dxf_23.dxf") + out_ds = ogr.GetDriverByName("DXF").CreateDataSource(tmp_vsimem / "ogr_dxf_23.dxf") out_lyr = out_ds.CreateLayer("entities") out_feat = ogr.Feature(out_lyr.GetLayerDefn()) out_feat.SetStyleString("BRUSH(fc:#ff0000)") @@ -1250,7 +1236,7 @@ def test_ogr_dxf_23(): ds = None # Check written file - ds = ogr.Open("/vsimem/ogr_dxf_23.dxf") + ds = ogr.Open(tmp_vsimem / "ogr_dxf_23.dxf") lyr = ds.GetLayer(0) feat = lyr.GetNextFeature() @@ -1260,8 +1246,6 @@ def test_ogr_dxf_23(): ds = None - gdal.Unlink("/vsimem/ogr_dxf_23.dxf") - ############################################################################### # HATCH @@ -1335,17 +1319,15 @@ def test_ogr_dxf_26(): # Test reading a DXF file without .dxf extensions (#5994) -def test_ogr_dxf_27(): +def test_ogr_dxf_27(tmp_vsimem): gdal.FileFromMemBuffer( - "/vsimem/a_dxf_without_extension", open("data/dxf/solid.dxf").read() + tmp_vsimem / "a_dxf_without_extension", open("data/dxf/solid.dxf").read() ) - ds = ogr.Open("/vsimem/a_dxf_without_extension") + ds = ogr.Open(tmp_vsimem / "a_dxf_without_extension") assert ds is not None - gdal.Unlink("/vsimem/a_dxf_without_extension") - ############################################################################### # Test reading a ELLIPSE with Z extrusion axis value of -1.0 (#5075) @@ -2552,8 +2534,8 @@ def test_ogr_dxf_33(): # Writing Triangle geometry and checking if it is written properly -def test_ogr_dxf_34(): - ds = ogr.GetDriverByName("DXF").CreateDataSource("tmp/triangle_test.dxf") +def test_ogr_dxf_34(tmp_path): + ds = ogr.GetDriverByName("DXF").CreateDataSource(tmp_path / "triangle_test.dxf") lyr = ds.CreateLayer("entities") dst_feat = ogr.Feature(feature_def=lyr.GetLayerDefn()) dst_feat.SetGeometryDirectly( @@ -2567,7 +2549,7 @@ def test_ogr_dxf_34(): ds = None # Read back. - ds = ogr.Open("tmp/triangle_test.dxf") + ds = ogr.Open(tmp_path / "triangle_test.dxf") lyr = ds.GetLayer(0) # Check first feature @@ -2581,8 +2563,6 @@ def test_ogr_dxf_34(): ) ds = None - gdal.Unlink("tmp/triangle_test.dxf") - ############################################################################### # Test reading hatch with elliptical harts @@ -2806,9 +2786,9 @@ def test_ogr_dxf_36(): # Create a blocks layer only -def test_ogr_dxf_37(): +def test_ogr_dxf_37(tmp_vsimem): - ds = ogr.GetDriverByName("DXF").CreateDataSource("/vsimem/ogr_dxf_37.dxf") + ds = ogr.GetDriverByName("DXF").CreateDataSource(tmp_vsimem / "ogr_dxf_37.dxf") lyr = ds.CreateLayer("blocks") @@ -2822,7 +2802,7 @@ def test_ogr_dxf_37(): # Read back. with gdal.config_option("DXF_INLINE_BLOCKS", "FALSE"): - ds = ogr.Open("/vsimem/ogr_dxf_37.dxf") + ds = ogr.Open(tmp_vsimem / "ogr_dxf_37.dxf") lyr = ds.GetLayerByName("blocks") # Check first feature @@ -2830,8 +2810,6 @@ def test_ogr_dxf_37(): assert feat is not None ds = None - gdal.Unlink("/vsimem/ogr_dxf_37.dxf") - ############################################################################### # Test degenerated cases of SOLID (#7038) @@ -3935,9 +3913,9 @@ def test_ogr_dxf_insert_too_many_errors(): ############################################################################### -def test_ogr_dxf_write_geometry_collection_of_unsupported_type(): +def test_ogr_dxf_write_geometry_collection_of_unsupported_type(tmp_vsimem): - tmpfile = "/vsimem/ogr_dxf_write_geometry_collection_of_unsupported_type.dxf" + tmpfile = tmp_vsimem / "ogr_dxf_write_geometry_collection_of_unsupported_type.dxf" ds = ogr.GetDriverByName("DXF").CreateDataSource(tmpfile) lyr = ds.CreateLayer("test") f = ogr.Feature(lyr.GetLayerDefn()) @@ -3946,7 +3924,6 @@ def test_ogr_dxf_write_geometry_collection_of_unsupported_type(): ret = lyr.CreateFeature(f) assert ret != 0 ds = None - gdal.Unlink(tmpfile) ############################################################################### @@ -3971,9 +3948,9 @@ def test_ogr_dxf_very_close_neg_to_zero_knot(): ############################################################################### -def test_ogr_dxf_polygon_3D(): +def test_ogr_dxf_polygon_3D(tmp_vsimem): - tmpfile = "/vsimem/test_ogr_dxf_polygon_3D.dxf" + tmpfile = tmp_vsimem / "test_ogr_dxf_polygon_3D.dxf" ds = ogr.GetDriverByName("DXF").CreateDataSource(tmpfile) lyr = ds.CreateLayer("test") f = ogr.Feature(lyr.GetLayerDefn()) @@ -3986,7 +3963,6 @@ def test_ogr_dxf_polygon_3D(): f = lyr.GetNextFeature() got_g = f.GetGeometryRef() assert got_g.Equals(g) - gdal.Unlink(tmpfile) ############################################################################### From 003eed032551a54f895bca6e12d560d7659fb22e Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Fri, 5 Apr 2024 22:25:51 -0400 Subject: [PATCH 073/230] autotest: ogr_gpx.py: use tmp_vsimem --- autotest/ogr/ogr_gpx.py | 48 +++++++++++------------------------------ 1 file changed, 13 insertions(+), 35 deletions(-) diff --git a/autotest/ogr/ogr_gpx.py b/autotest/ogr/ogr_gpx.py index 7671d7579aad..0d6d156fc99c 100755 --- a/autotest/ogr/ogr_gpx.py +++ b/autotest/ogr/ogr_gpx.py @@ -28,8 +28,6 @@ # DEALINGS IN THE SOFTWARE. ############################################################################### -import os - import ogrtest import pytest @@ -49,11 +47,6 @@ def startup_and_cleanup(): yield - try: - os.remove("tmp/gpx.gpx") - except OSError: - pass - ############################################################################### # Test waypoints gpx layer. @@ -208,13 +201,8 @@ def test_ogr_gpx_5(): # Copy our small gpx file to a new gpx file. -def test_ogr_gpx_6(): +def test_ogr_gpx_6(tmp_path): gpx_ds = ogr.Open("data/gpx/test.gpx") - try: - with gdal.quiet_errors(): - ogr.GetDriverByName("CSV").DeleteDataSource("tmp/gpx.gpx") - except Exception: - pass co_opts = [] @@ -222,7 +210,7 @@ def test_ogr_gpx_6(): gpx_lyr = gpx_ds.GetLayerByName("waypoints") gpx2_ds = ogr.GetDriverByName("GPX").CreateDataSource( - "tmp/gpx.gpx", options=co_opts + tmp_path / "gpx.gpx", options=co_opts ) gpx2_lyr = gpx2_ds.CreateLayer("waypoints", geom_type=ogr.wkbPoint) @@ -275,21 +263,18 @@ def test_ogr_gpx_6(): # Output extra fields as <extensions>. -def test_ogr_gpx_7(): +def test_ogr_gpx_7(tmp_path): bna_ds = ogr.Open("data/gpx/csv_for_gpx.csv") - try: - os.remove("tmp/gpx.gpx") - except OSError: - pass - co_opts = ["GPX_USE_EXTENSIONS=yes"] # Duplicate waypoints bna_lyr = bna_ds.GetLayerByName("csv_for_gpx") - gpx_ds = ogr.GetDriverByName("GPX").CreateDataSource("tmp/gpx.gpx", options=co_opts) + gpx_ds = ogr.GetDriverByName("GPX").CreateDataSource( + tmp_path / "gpx.gpx", options=co_opts + ) gpx_lyr = gpx_ds.CreateLayer("waypoints", geom_type=ogr.wkbPoint) @@ -311,7 +296,7 @@ def test_ogr_gpx_7(): gpx_ds = None # Now check that the extensions fields have been well written - gpx_ds = ogr.Open("tmp/gpx.gpx") + gpx_ds = ogr.Open(tmp_path / "gpx.gpx") gpx_lyr = gpx_ds.GetLayerByName("waypoints") expect = ["PID1", "PID2"] @@ -335,15 +320,10 @@ def test_ogr_gpx_7(): # Output extra fields as <extensions>. -def test_ogr_gpx_8(): - - try: - os.remove("tmp/gpx.gpx") - except OSError: - pass +def test_ogr_gpx_8(tmp_path): gpx_ds = ogr.GetDriverByName("GPX").CreateDataSource( - "tmp/gpx.gpx", options=["LINEFORMAT=LF"] + tmp_path / "gpx.gpx", options=["LINEFORMAT=LF"] ) lyr = gpx_ds.CreateLayer("route_points", geom_type=ogr.wkbPoint) @@ -410,7 +390,7 @@ def test_ogr_gpx_8(): gpx_ds = None - f = open("tmp/gpx.gpx", "rb") + f = open(tmp_path / "gpx.gpx", "rb") f_ref = open("data/gpx/ogr_gpx_8_ref.txt", "rb") f_content = f.read() f_ref_content = f_ref.read() @@ -469,7 +449,7 @@ def test_ogr_gpx_metadata_read(): # Test writing metadata -def test_ogr_gpx_metadata_write(): +def test_ogr_gpx_metadata_write(tmp_vsimem): md = { "AUTHOR_EMAIL": "foo@example.com", @@ -497,18 +477,16 @@ def test_ogr_gpx_metadata_write(): options.append("METADATA_" + key + "=" + md[key]) gpx_ds = ogr.GetDriverByName("GPX").CreateDataSource( - "/vsimem/gpx.gpx", options=options + tmp_vsimem / "gpx.gpx", options=options ) assert gpx_ds is not None gpx_ds = None - ds = ogr.Open("/vsimem/gpx.gpx") + ds = ogr.Open(tmp_vsimem / "gpx.gpx") # print(ds.GetMetadata()) assert ds.GetMetadata() == md ds = None - gdal.Unlink("/vsimem/gpx.gpx") - ############################################################################### # Test CREATOR option From 666bb38014a0d70eca756597a2eb06114325deb5 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Fri, 5 Apr 2024 22:26:34 -0400 Subject: [PATCH 074/230] autotest: ogr_index_test.py: use tmp_path --- autotest/ogr/ogr_index_test.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/autotest/ogr/ogr_index_test.py b/autotest/ogr/ogr_index_test.py index 467646f3951c..a96af7e7c72b 100755 --- a/autotest/ogr/ogr_index_test.py +++ b/autotest/ogr/ogr_index_test.py @@ -44,9 +44,6 @@ def startup_and_cleanup(): for filename in ["join_t.idm", "join_t.ind"]: assert not os.path.exists(filename) - ogr.GetDriverByName("ESRI Shapefile").DeleteDataSource("tmp/ogr_index_10.shp") - ogr.GetDriverByName("ESRI Shapefile").DeleteDataSource("tmp/ogr_index_11.dbf") - @contextlib.contextmanager def create_index_p_test_file(): @@ -313,9 +310,11 @@ def test_ogr_index_creating_index_in_separate_steps_works(): # Test fix for #4326 -def test_ogr_index_10(): +def test_ogr_index_10(tmp_path): - ds = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource("tmp/ogr_index_10.shp") + ds = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource( + tmp_path / "ogr_index_10.shp" + ) lyr = ds.CreateLayer("ogr_index_10") lyr.CreateField(ogr.FieldDefn("intfield", ogr.OFTInteger)) lyr.CreateField(ogr.FieldDefn("realfield", ogr.OFTReal)) @@ -430,9 +429,11 @@ def ogr_index_11_check(lyr, expected_fids): assert feat.GetFID() == expected_fid -def test_ogr_index_11(): +def test_ogr_index_11(tmp_path): - ds = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource("tmp/ogr_index_11.dbf") + ds = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource( + tmp_path / "ogr_index_11.dbf" + ) lyr = ds.CreateLayer("ogr_index_11", geom_type=ogr.wkbNone) lyr.CreateField(ogr.FieldDefn("intfield", ogr.OFTInteger)) lyr.CreateField(ogr.FieldDefn("strfield", ogr.OFTString)) From f7068cae99330182848c5d6087ac19f874c82395 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 18:18:57 +0200 Subject: [PATCH 075/230] Update pci_datum.txt and pci_ellips.txt (fixes #8034) --- ogr/data/pci_datum.txt | 377 +++++++++++++++++++++++----------------- ogr/data/pci_ellips.txt | 128 ++++++++++---- 2 files changed, 312 insertions(+), 193 deletions(-) diff --git a/ogr/data/pci_datum.txt b/ogr/data/pci_datum.txt index 8c96b0e1246f..bc4d4cea4c3d 100644 --- a/ogr/data/pci_datum.txt +++ b/ogr/data/pci_datum.txt @@ -1,67 +1,76 @@ ! -! By email on December 2nd, 2010: +! From https://github.com/OSGeo/gdal/issues/8034, June 30, 2023 ! -! I, Louis Burry, on behalf of PCI Geomatics agree to allow the ellips.txt +! I, Michael Goldberg, on behalf of PCI Geomatics agree to allow the ellips.txt ! and datum.txt file to be distributed under the GDAL open source license. ! -! Louis Burry -! VP Technology & Delivery +! Michael Goldberg +! Development Manager ! PCI Geomatics ! -! NOTE: The range of "D900" to "D998" is set aside for +! +! NOTE: The range of "D950" to "D998" is set aside for ! the use of local customer development. ! ! And the range of "D-90" to "D-98" is set aside for ! the use of local customer development. ! +!For datums using a grid shift file entries are: +!DatumNumber,DatumName,EllipsoidNumber,Location,GridShiftTo,GridShiftFile,GridShiftFile +!If GridShiftTo is negative the shift is reversed +!For datums not using a grid shift file converting to WGS84 using coordinate frame rotation +! (EPSG:9607 which is opposite rotation to EPSG TOWGS84) entries are: +!DatumNumber,DatumName,EllipsoidNumber,XOffset,YOffset,ZOffset,Location,XSigma,YSigma,ZSigma,Doppler,XRotate,YRotate,ZRotate,Scale "DoD World Geodetic System 1984, DMA TR 8350.2" "4 JUL 1997, Third Printing, Includes 3 JAN 2000 Updates" -"D-01","NAD27 (USA, NADCON)","E000","Conterminous U.S.","conus.los","conus.las" -"D-02","NAD83 (USA, NADCON)","E008","Conterminous U.S.","conus.los","conus.las" -"D-03","NAD27 (Canada, NTv1)","E000","Canada","grid.dac" -"D-04","NAD83 (Canada, NTv1)","E008","Canada","grid.dac" -"D-07","NAD27 (USA, NADCON)","E000","Alaska","alaska.los","alaska.las" -"D-08","NAD83 (USA, NADCON)","E008","Alaska","alaska.los","alaska.las" -"D-09","NAD27 (USA, NADCON)","E000","St. George","stgeorge.los","stgeorge.las" -"D-10","NAD83 (USA, NADCON)","E008","St. George","stgeorge.los","stgeorge.las" -"D-11","NAD27 (USA, NADCON)","E000","St. Lawrence","stlrnc.los","stlrnc.las" -"D-12","NAD83 (USA, NADCON)","E008","St. Lawrence","stlrnc.los","stlrnc.las" -"D-13","NAD27 (USA, NADCON)","E000","St. Paul","stpaul.los","stpaul.las" -"D-14","NAD83 (USA, NADCON)","E008","St. Paul","stpaul.los","stpaul.las" -"D-15","Old Hawaiian (USA, NADCON)","E000","Hawaii","hawaii.los","hawaii.las" -"D-16","NAD83 (USA, NADCON)","E008","Hawaii","hawaii.los","hawaii.las" -"D-17","NAD27 (USA, NADCON)","E000","Puerto Rico Virgin Islands","prvi.los","prvi.las" -"D-18","NAD83 (USA, NADCON)","E008","Puerto Rico Virgin Islands","prvi.los","prvi.las" -!"D-19","AGD66 (NTv2)","E014","Australia","A66 National (13.09.01).gsb" -!"D-20","AGD84 (NTv2)","E014","Australia","National 84 (02.07.01).gsb" -!"D-21","GDA94 (from AGD66, NTv2)","E008","Australia","A66 National (13.09.01).gsb" -!"D-22","GDA94 (from AGD84, NTv2)","E008","Australia","National 84 (02.07.01).gsb" -!"D-23","NZGD49 (NTv2)","E004","New Zealand","nzgd2kgrid0005.gsb" -!"D-24","NZGD2000 (NTv2)","E008","New Zealand","nzgd2kgrid0005.gsb" -!"D-66","NAD27 (NTv2)","E000","Quebec","na27scrs.gsb" -!"D-67","NAD83 (SCRS) (NTv2)","E008","Quebec","na27scrs.gsb" -!"D-68","NAD27 (NTv2)","E000","Quebec","na27na83.gsb" -!"D-69","NAD83 (NTv2)","E008","Quebec","na27na83.gsb" -!"D-70","NAD27 (CGQ77) (NTv2)","E000","Quebec","cq77scrs.gsb" -!"D-71","NAD83 (SCRS) (NTv2)","E008","Quebec","cq77scrs.gsb" -!"D-72","NAD27 (CGQ77) (NTv2)","E000","Quebec","cq77na83.gsb" -!"D-73","NAD83 (NTv2)","E008","Quebec","cq77na83.gsb" -!"D-74","NAD83 (NTv2)","E008","Quebec","na83scrs.gsb" -!"D-75","NAD83 (SCRS) (NTv2)","E008","Quebec","na83scrs.gsb" -!"D-76","NAD27 (NTv2)","E000","Saskatchewan","sk27-98.gsb" -!"D-77","NAD83 (CSRS98) (NTv2)","E008","Saskatchewan","sk27-98.gsb" -!"D-78","NAD83 (NTv2)","E008","Saskatchewan","sk83-98.gsb" -!"D-79","NAD83 (CSRS98) (NTv2)","E008","Saskatchewan","sk83-98.gsb" -!"D-80","ATS77 (NTv2)","E910","Nova Scotia","ns778301.gsb" -!"D-81","NAD83 (CSRS98) (NTv2)","E008","Nova Scotia","ns778301.gsb" -!"D-82","ATS77 (NTv2)","E910","Prince Edward Island","pe7783v2.gsb" -!"D-83","NAD83 (CSRS98) (NTv2)","E008","Prince Edward Island","pe7783v2.gsb" -!"D-84","ATS77 (NTv2)","E910","New Brunswick","nb7783v2.gsb" -!"D-85","NAD83 (CSRS98) (NTv2)","E008","New Brunswick","nb7783v2.gsb" -!"D-86","NAD27 (NTv2)","E000","Canada","ntv2_0.gsb" -!"D-87","NAD83 (NTv2)","E008","Canada","ntv2_0.gsb" -!"D-88","NAD27 (1976) (NTv2)","E000","Ontario","may76v20.gsb" -!"D-89","NAD83 (NTv2)","E008","Ontario","may76v20.gsb" +"D-01","NAD27 (USA, NADCON)","E000","Conterminous U.S.","D122","conus.los","conus.las" +"D-02","NAD83 (Deprecated - use D122)","E008",0,0,0,"Conterminous U.S.",2,2,2,354 +"D-03","NAD27 (Canada, NTv1)","E000","Canada","D122","grid.dac" +"D-04","NAD83 (Deprecated - use D122)","E008",0,0,0,"Canada",2,2,2,354 +"D-07","NAD27 (USA, NADCON)","E000","Alaska","D122","alaska.los","alaska.las" +"D-08","NAD83 (Deprecated - use D122)","E008",0,0,0,"Alaska",2,2,2,354 +"D-09","NAD27 (USA, NADCON)","E000","St. George","D122","stgeorge.los","stgeorge.las" +"D-10","NAD83 (Deprecated - use D122)","E008",0,0,0,"St. George",2,2,2,354 +"D-11","NAD27 (USA, NADCON)","E000","St. Lawrence","D122","stlrnc.los","stlrnc.las" +"D-12","NAD83 (Deprecated - use D122)","E008",0,0,0,"St. Lawrence",2,2,2,354 +"D-13","NAD27 (USA, NADCON)","E000","St. Paul","D122","stpaul.los","stpaul.las" +"D-14","NAD83 (Deprecated - use D122)","E008",0,0,0,"St. Paul",2,2,2,354 +"D-15","Old Hawaiian (USA, NADCON)","E000","Hawaii","D122","hawaii.los","hawaii.las" +"D-16","NAD83 (Deprecated - use D122)","E008",0,0,0,"Hawaii",2,2,2,354 +"D-17","NAD27 (USA, NADCON)","E000","Puerto Rico Virgin Islands","D122","prvi.los","prvi.las" +"D-18","NAD83 (Deprecated - use D122)","E008",0,0,0,"Puerto Rico Virgin Islands",2,2,2,354 +"D-21","GDA94 (from AGD66, NTv2)","E008","Australia","D029","A66_National_13_09_01_.gsb" +"D-22","GDA94 (from AGD84, NTv2)","E008","Australia","D030","National_84_02.07.01.gsb" +"D-24","NZGD2000 (NTv2)","E008","New Zealand","D510","nzgd2kgrid0005.gsb" +"D-25","GDA2020 (conformal, from GDA94, NTv2)","E008","Australia","D536","GDA94_GDA2020_conformal.gsb" +"D-26","GDA2020 (conformal and distortion, from GDA94, NTv2)","E008","Australia","D536","GDA94_GDA2020_conformal_and_distortion.gsb" +"D-27","GDA2020 (conformal, from GDA94, NTv2)","E008","Australia (Christmas Island)","D536","GDA94_GDA2020_conformal_christmas_island.gsb" +"D-28","GDA2020 (conformal, from GDA94, NTv2)","E008","Australia (Cocos Islands)","D536","GDA94_GDA2020_conformal_cocos_island.gsb" +"D-55","NAD83 (CSRS 2002) (NTv2)","E008","British Columbia","D122","BC_93_05.gsb" +"D-56","NAD27 (NTv2)","E000","British Columbia","-D-55","BC_27_05.gsb" +"D-57","NAD83 (CSRS) (NTv2)","E008","BC (CRD)","D122","CRD93_00.gsb" +"D-58","NAD27 (NTv2)","E000","BC (CRD)","-D-57","CRD27_00.gsb" +"D-59","NAD83 (CSRS) (NTv2)","E008","BC (Vancouver Island)","D122","NVI93_05.gsb" +"D-62","NAD27 (NTv2)","E000","Ontario (Toronto)","-D-65","TO27CSv1.gsb" +"D-63","NAD27 (NTv2)","E000","Ontario","-D-65","ON27CSv1.gsb" +"D-64","NAD27 (1976) (NTv2)","E000","Ontario","-D-65","ON76CSv1.gsb" +"D-65","NAD83 (CSRS98) (NTv2)","E008","Ontario","D122","ON83CSv1.gsb" +"D-67","NAD83 (SCRS) (NTv2)","E008","Quebec","D-68","na27scrs.gsb" +"D-68","NAD27 (NTv2)","E000","Quebec","-D122","na27na83.gsb" +"D-71","NAD83 (SCRS) (NTv2)","E008","Quebec","D-72","cq77scrs.gsb" +"D-72","NAD27 (CGQ77) (NTv2)","E000","Quebec","D122","cq77na83.gsb" +"D-75","NAD83 (SCRS) (NTv2)","E008","Quebec","D122","na83scrs.gsb" +"D-76","NAD27 (NTv2)","E000","Saskatchewan","-D-79","sk27-98.gsb" +"D-77","NAD27 (NTv2)","E000","Saskatchewan","-D122","sk27-83.gsb" +"D-79","NAD83 (CSRS98) (NTv2)","E008","Saskatchewan","-D122","sk83-98.gsb" +"D-81","NAD83 (CSRS98) (NTv2)","E008","Nova Scotia","D895","ns778301.gsb" +"D-82","ATS77 (NTv2)","E910","Nova Scotia","-D122","GS7783.GSB" +"D-83","NAD83 (CSRS98) (NTv2)","E008","Prince Edward Island","D895","pe7783v2.gsb" +"D-84","NAD83 (CSRS98) (NTv2)","E008","New Brunswick","D122","nb2783v2.gsb" +"D-85","NAD83 (CSRS98) (NTv2)","E008","New Brunswick","D895","nb7783v2.gsb" +"D-86","NAD27 (NTv2)","E000","Canada","-D122","ntv2_0.gsb" +"D-87","NAD83 (CSRS98) (NTv2)","E008","Alberta","D122","ABCSRSV4.DAC" +"D-88","NAD27 (1976) (NTv2)","E000","Ontario","D122","may76v20.gsb" "D800","Normal Sphere","E019",0,0,0,"",0,0,0,0 "D000","WGS 1984","E012",0,0,0,"Global Definition",0,0,0,0 "D001","WGS 1972","E005",0,0,0,"Global Definition",3,3,3,1 @@ -215,7 +224,7 @@ "D149","Provisional S. American 1956","E004",-295,173,-371,"Venezuela",9,14,15,24 "D150","Provisional S. Chilean 1963","E004",16,196,93,"Chile (South, Near 53dS) (Hito XVIII)",25,25,25,2 "D151","Puerto Rico","E000",11,72,-101,"Puerto Rico, Virgin Islands",3,3,3,11 -"D152","Qatar National","E004",-128,-283,22,"Qatar",20,20,20,3 +"D152","Qatar National Datum 1995","E004",-127.78098,-283.37477,21.24081,"Qatar",20,20,20,3 "D153","Qornoq","E004",164,138,-189,"Greenland (South)",25,25,32,2 "D154","Reunion","E004",94,-948,-1262,"Mascarene Islands",25,25,25,1 "D155","Rome 1940","E004",-225,-65,9,"Italy (Sardinia)",25,25,25,1 @@ -225,7 +234,7 @@ "D159","Schwarzeck","E900",616,97,-251,"Namibia",20,20,20,3 "D160","Selvagem Grande 1938","E004",-289,-124,60,"Salvage Islands",25,25,25,1 "D161","SGS 85","E905",3,9,-9,"Soviet Geodetic System 1985",10,10,10,1 -"D162","South American 1969","E907",-57,1,-41,"MEAN Solution,",15,6,9,84 +"D162","South American 1969 (SAD69)","E907",-57,1,-41,"MEAN Solution,",15,6,9,84 "D163","South American 1969","E907",-62,-1,-37,"Argentina",5,5,5,10 "D164","South American 1969","E907",-61,2,-48,"Bolivia",15,15,15,4 "D165","South American 1969 (old)","E907",-60,-2,-41,"Brazil",3,5,5,22 @@ -316,7 +325,7 @@ "D516","SL datum 1999","E006",-0.2933,766.9499,87.7131,"Sri Lanka",0,0,0,0,-0.1957040,-1.6950677,-3.4730161,-0.0393 "D517","Cape (Supercedes D040)","E205",-134.73,-110.92,-292.66,"South Africa",0,0,0,0 "D518","Hartebeesthoek94","E012",0,0,0,"South Africa",0,0,0,0 -"D519","Abidjan 1987","E001",-124.76,53,466.79,"C\uffffte d'Ivoire",0,0,0,0 +"D519","Abidjan 1987","E001",-124.76,53,466.79,"Cote d'Ivoire",0,0,0,0 "D520","Accra","E204",-199,32,322,"Ghana",0,0,0,0 "D521","Azores Central 1948","E004",-104,167,-38,"Azores",0,0,0,0 "D522","Azores Oriental 1940","E004",-203,141,53,"Azores",0,0,0,0 @@ -351,113 +360,171 @@ "D600","D-PAF (Orbits)","E600",0.082,-0.502,-0.224,"Satellite Orbits",0,0,0,0,0.30444,0.04424,0.00609,0.9999999937 "D601","Test Data Set 1","E601",0.071,-0.509,-0.166,"Test 1",0,0,0,0,0.0179,-0.0005,0.0067,0.999999983 "D602","Test Data Set 2","E602",580.0,80.9,399.8,"Test 2",0,0,0,0,0.35,0.1,3.026,1.0000113470025 +"D610","US Standard Datum (USA, NADCON5)","E000","Conterminous U.S.","D611","nadcon5.ussd.nad27.conus.lon.trn.20160901.b","nadcon5.ussd.nad27.conus.lat.trn.20160901.b" +"D611","NAD27 (USA, NADCON5)","E000","Conterminous U.S.","D122","nadcon5.nad27.nad83_1986.conus.lon.trn.20160901.b","nadcon5.nad27.nad83_1986.conus.lat.trn.20160901.b" +"D612","NAD83 (HARN) (USA, NADCON5)","E008","Conterminous U.S.","-D122","nadcon5.nad83_1986.nad83_harn.conus.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_harn.conus.lat.trn.20160901.b" +"D613","NAD83 (FBN) (USA, NADCON5)","E008","Conterminous U.S.","-D612","nadcon5.nad83_harn.nad83_fbn.conus.lon.trn.20160901.b","nadcon5.nad83_harn.nad83_fbn.conus.lat.trn.20160901.b","nadcon5.nad83_harn.nad83_fbn.conus.eht.trn.20160901.b" +"D614","NAD83 (NSRS 2007) (USA, NADCON5)","E008","Conterminous U.S.","-D613","nadcon5.nad83_fbn.nad83_2007.conus.lon.trn.20160901.b","nadcon5.nad83_fbn.nad83_2007.conus.lat.trn.20160901.b","nadcon5.nad83_fbn.nad83_2007.conus.eht.trn.20160901.b" +"D615","NAD83 (2011) (USA, NADCON5)","E008","Conterminous U.S.","-D614","nadcon5.nad83_2007.nad83_2011.conus.lon.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.conus.lat.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.conus.eht.trn.20160901.b" +"D620","Puerto Rico Datum, adjustment of 1940 (USA, NADCON5)","E000","Puerto Rico, Virgin Islands","D122","nadcon5.pr40.nad83_1986.prvi.lon.trn.20160901.b","nadcon5.pr40.nad83_1986.prvi.lat.trn.20160901.b" +"D621","NAD83 (1993) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D122","nadcon5.nad83_1986.nad83_1993.prvi.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_1993.prvi.lat.trn.20160901.b" +"D622","NAD83 (1997) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D621","nadcon5.nad83_1993.nad83_1997.prvi.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_1997.prvi.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_1997.prvi.eht.trn.20160901.b" +"D623","NAD83 (2002) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D622","nadcon5.nad83_1997.nad83_2002.prvi.lon.trn.20160901.b","nadcon5.nad83_1997.nad83_2002.prvi.lat.trn.20160901.b","nadcon5.nad83_1997.nad83_2002.prvi.eht.trn.20160901.b" +"D624","NAD83 (NSRS 2007) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D623","nadcon5.nad83_2002.nad83_2007.prvi.lon.trn.20160901.b","nadcon5.nad83_2002.nad83_2007.prvi.lat.trn.20160901.b","nadcon5.nad83_2002.nad83_2007.prvi.eht.trn.20160901.b" +"D625","NAD83 (2011) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D624","nadcon5.nad83_2007.nad83_2011.prvi.lon.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.prvi.lat.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.prvi.eht.trn.20160901.b" +"D630","Old Hawaiian Datum (USA, NADCON5)","E000","Hawaii","D122","nadcon5.ohd.nad83_1986.hawaii.lon.trn.20160901.b","nadcon5.ohd.nad83_1986.hawaii.lat.trn.20160901.b" +"D631","NAD83 (1993) (USA, NADCON5)","E008","Hawaii","-D122","nadcon5.nad83_1986.nad83_1993.hawaii.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_1993.hawaii.lat.trn.20160901.b" +"D632","NAD83 (PA11) (USA, NADCON5)","E008","Hawaii","-D631","nadcon5.nad83_1993.nad83_pa11.hawaii.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_pa11.hawaii.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_pa11.hawaii.eht.trn.20160901.b" +"D640","NAD27 (USA, NADCON5)","E000","Alaska","D122","nadcon5.nad27.nad83_1986.alaska.lon.trn.20160901.b","nadcon5.nad27.nad83_1986.alaska.lat.trn.20160901.b" +"D641","NAD83 (1992) (USA, NADCON5)","E008","Alaska","-D122","nadcon5.nad83_1986.nad83_1992.alaska.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_1992.alaska.lat.trn.20160901.b" +"D642","NAD83 (NSRS 2007) (USA, NADCON5)","E008","Alaska","-D641","nadcon5.nad83_1992.nad83_2007.alaska.lon.trn.20160901.b","nadcon5.nad83_1992.nad83_2007.alaska.lat.trn.20160901.b","nadcon5.nad83_1992.nad83_2007.alaska.eht.trn.20160901.b" +"D643","NAD83 (2011) (USA, NADCON5)","E008","Alaska","-D642","nadcon5.nad83_2007.nad83_2011.alaska.lon.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.alaska.lat.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.alaska.eht.trn.20160901.b" +"D650","St. Paul 1897 (USA, NADCON5)","E000","St. Paul, Alaska","D651","nadcon5.sp1897.sp1952.stpaul.lon.trn.20160901.b","nadcon5.sp1897.sp1952.stpaul.lat.trn.20160901.b" +"D651","St. Paul 1952 (USA, NADCON5)","E000","St. Paul, Alaska","D122","nadcon5.sp1952.nad83_1986.stpaul.lon.trn.20160901.b","nadcon5.sp1952.nad83_1986.stpaul.lat.trn.20160901.b" +"D652","St. George 1897 (USA, NADCON5)","E000","St. George, Alaska","D653","nadcon5.sg1897.sg1952.stgeorge.lon.trn.20160901.b","nadcon5.sg1897.sg1952.stgeorge.lat.trn.20160901.b" +"D653","St. George 1952 (USA, NADCON5)","E000","St. George, Alaska","D122","nadcon5.sg1952.nad83_1986.stgeorge.lon.trn.20160901.b","nadcon5.sg1952.nad83_1986.stgeorge.lat.trn.20160901.b" +"D654","St. Lawrence 1952 (USA, NADCON5)","E000","St. Lawrence, Alaska","D122","nadcon5.sl1952.nad83_1986.stlawrence.lon.trn.20160901.b","nadcon5.sl1952.nad83_1986.stlawrence.lat.trn.20160901.b" +"D660","American Samoa 1962 (USA, NADCON5)","E000","American Samoa","D122","nadcon5.as62.nad83_1993.as.lon.trn.20160901.b","nadcon5.as62.nad83_1993.as.lat.trn.20160901.b" +"D661","NAD83 (2002) (USA, NADCON5)","E008","American Samoa","-D122","nadcon5.nad83_1993.nad83_2002.as.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.as.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.as.eht.trn.20160901.b" +"D662","NAD83 (PA11) (USA, NADCON5)","E008","American Samoa","-D661","nadcon5.nad83_2002.nad83_pa11.as.lon.trn.20160901.b","nadcon5.nad83_2002.nad83_pa11.as.lat.trn.20160901.b","nadcon5.nad83_2002.nad83_pa11.as.eht.trn.20160901.b" +"D670","Guam 1963 (USA, NADCON5)","E000","Guam and the Commonwealth of the Northern Mariana Islands","D122","nadcon5.gu63.nad83_1993.guamcnmi.lon.trn.20160901.b","nadcon5.gu63.nad83_1993.guamcnmi.lat.trn.20160901.b" +"D671","NAD83 (2002) (USA, NADCON5)","E008","Guam and the Commonwealth of the Northern Mariana Islands","-D122","nadcon5.nad83_1993.nad83_2002.guamcnmi.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.guamcnmi.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.guamcnmi.eht.trn.20160901.b" +"D672","NAD83 (MA11) (USA, NADCON5)","E008","Guam and the Commonwealth of the Northern Mariana Islands","-D671","nadcon5.nad83_2002.nad83_ma11.guamcnmi.lon.trn.20160901.b","nadcon5.nad83_2002.nad83_ma11.guamcnmi.lat.trn.20160901.b","nadcon5.nad83_2002.nad83_ma11.guamcnmi.eht.trn.20160901.b" "D700","MODIS","E700",0,0,0,"Global Definition",0,0,0,0 -"D701","NAD83 (USA, NADCON)","E008","Alabama","alhpgn.los","alhpgn.las" -"D702","NAD83 HARN (USA, NADCON)","E008","Alabama","alhpgn.los","alhpgn.las" -"D703","NAD83 (USA, NADCON)","E008","Arkansas","arhpgn.los","arhpgn.las" -"D704","NAD83 HARN (USA, NADCON)","E008","Arkansas","arhpgn.los","arhpgn.las" -"D705","NAD83 (USA, NADCON)","E008","Arizona","azhpgn.los","azhpgn.las" -"D706","NAD83 HARN (USA, NADCON)","E008","Arizona","azhpgn.los","azhpgn.las" -"D707","NAD83 (USA, NADCON)","E008","California (North of 37dN)","cnhpgn.los","cnhpgn.las" -"D708","NAD83 HARN (USA, NADCON)","E008","California (North of 37dN)","cnhpgn.los","cnhpgn.las" -"D709","NAD83 (USA, NADCON)","E008","California (South of 37dN)","cshpgn.los","cshpgn.las" -"D710","NAD83 HARN (USA, NADCON)","E008","California (South of 37dN)","cshpgn.los","cshpgn.las" -"D711","NAD83 (USA, NADCON)","E008","Colorado","cohpgn.los","cohpgn.las" -"D712","NAD83 HARN (USA, NADCON)","E008","Colorado","cohpgn.los","cohpgn.las" -"D713","NAD83 (USA, NADCON)","E008","Florida","flhpgn.los","flhpgn.las" -"D714","NAD83 HARN (USA, NADCON)","E008","Florida","flhpgn.los","flhpgn.las" -"D715","NAD83 (USA, NADCON)","E008","Georgia","gahpgn.los","gahpgn.las" -"D716","NAD83 HARN (USA, NADCON)","E008","Georgia","gahpgn.los","gahpgn.las" -"D717","Guam 1963 (USA, NADCON)","E000","Guam","guhpgn.los","guhpgn.las" -"D718","NAD83 HARN (USA, NADCON)","E008","Guam","guhpgn.los","guhpgn.las" -"D719","NAD83 (USA, NADCON)","E008","Hawaii","hihpgn.los","hihpgn.las" -"D720","NAD83 HARN (USA, NADCON)","E008","Hawaii","hihpgn.los","hihpgn.las" -"D721","NAD83 (USA, NADCON)","E008","Idaho-Montana (East of 113dW)","emhpgn.los","emhpgn.las" -"D722","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (East of 113dW)","emhpgn.los","emhpgn.las" -"D723","NAD83 (USA, NADCON)","E008","Idaho-Montana (West of 113dW)","wmhpgn.los","wmhpgn.las" -"D724","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (West of 113dW)","wmhpgn.los","wmhpgn.las" -"D725","NAD83 (USA, NADCON)","E008","Iowa","iahpgn.los","iahpgn.las" -"D726","NAD83 HARN (USA, NADCON)","E008","Iowa","iahpgn.los","iahpgn.las" -"D727","NAD83 (USA, NADCON)","E008","Illinois","ilhpgn.los","ilhpgn.las" -"D728","NAD83 HARN (USA, NADCON)","E008","Illinois","ilhpgn.los","ilhpgn.las" -"D729","NAD83 (USA, NADCON)","E008","Indiana","inhpgn.los","inhpgn.las" -"D730","NAD83 HARN (USA, NADCON)","E008","Indiana","inhpgn.los","inhpgn.las" -"D731","NAD83 (USA, NADCON)","E008","Kansas","kshpgn.los","kshpgn.las" -"D732","NAD83 HARN (USA, NADCON)","E008","Kansas","kshpgn.los","kshpgn.las" -"D733","NAD83 (USA, NADCON)","E008","Kentucky","kyhpgn.los","kyhpgn.las" -"D734","NAD83 HARN (USA, NADCON)","E008","Kentucky","kyhpgn.los","kyhpgn.las" -"D735","NAD83 (USA, NADCON)","E008","Louisiana","lahpgn.los","lahpgn.las" -"D736","NAD83 HARN (USA, NADCON)","E008","Louisiana","lahpgn.los","lahpgn.las" -"D737","NAD83 (USA, NADCON)","E008","Maryland-Delaware","mdhpgn.los","mdhpgn.las" -"D738","NAD83 HARN (USA, NADCON)","E008","Maryland-Delaware","mdhpgn.los","mdhpgn.las" -"D739","NAD83 (USA, NADCON)","E008","Maine","mehpgn.los","mehpgn.las" -"D740","NAD83 HARN (USA, NADCON)","E008","Maine","mehpgn.los","mehpgn.las" -"D741","NAD83 (USA, NADCON)","E008","Michigan","mihpgn.los","mihpgn.las" -"D742","NAD83 HARN (USA, NADCON)","E008","Michigan","mihpgn.los","mihpgn.las" -"D743","NAD83 (USA, NADCON)","E008","Minnesota","mnhpgn.los","mnhpgn.las" -"D744","NAD83 HARN (USA, NADCON)","E008","Minnesota","mnhpgn.los","mnhpgn.las" -"D745","NAD83 (USA, NADCON)","E008","Mississippi","mshpgn.los","mshpgn.las" -"D746","NAD83 HARN (USA, NADCON)","E008","Mississippi","mshpgn.los","mshpgn.las" -"D747","NAD83 (USA, NADCON)","E008","Missouri","mohpgn.los","mohpgn.las" -"D748","NAD83 HARN (USA, NADCON)","E008","Missouri","mohpgn.los","mohpgn.las" -"D749","NAD83 (USA, NADCON)","E008","Nebraska","nbhpgn.los","nbhpgn.las" -"D750","NAD83 HARN (USA, NADCON)","E008","Nebraska","nbhpgn.los","nbhpgn.las" -"D751","NAD83 (USA, NADCON)","E008","Nevada","nvhpgn.los","nvhpgn.las" -"D752","NAD83 HARN (USA, NADCON)","E008","Nevada","nvhpgn.los","nvhpgn.las" -"D753","NAD83 (USA, NADCON)","E008","New England (CT,MA,NH,RI,VT)","nehpgn.los","nehpgn.las" -"D754","NAD83 HARN (USA, NADCON)","E008","New England (CT,MA,NH,RI,VT)","nehpgn.los","nehpgn.las" -"D755","NAD83 (USA, NADCON)","E008","New Jersey","njhpgn.los","njhpgn.las" -"D756","NAD83 HARN (USA, NADCON)","E008","New Jersey","njhpgn.los","njhpgn.las" -"D757","NAD83 (USA, NADCON)","E008","New Mexico","nmhpgn.los","nmhpgn.las" -"D758","NAD83 HARN (USA, NADCON)","E008","New Mexico","nmhpgn.los","nmhpgn.las" -"D759","NAD83 (USA, NADCON)","E008","New York","nyhpgn.los","nyhpgn.las" -"D760","NAD83 HARN (USA, NADCON)","E008","New York","nyhpgn.los","nyhpgn.las" -"D761","NAD83 (USA, NADCON)","E008","North Carolina","nchpgn.los","nchpgn.las" -"D762","NAD83 HARN (USA, NADCON)","E008","North Carolina","nchpgn.los","nchpgn.las" -"D763","NAD83 (USA, NADCON)","E008","North Dakota","ndhpgn.los","ndhpgn.las" -"D764","NAD83 HARN (USA, NADCON)","E008","North Dakota","ndhpgn.los","ndhpgn.las" -"D765","NAD83 (USA, NADCON)","E008","Ohio","ohhpgn.los","ohhpgn.las" -"D766","NAD83 HARN (USA, NADCON)","E008","Ohio","ohhpgn.los","ohhpgn.las" -"D767","NAD83 (USA, NADCON)","E008","Oklahoma","okhpgn.los","okhpgn.las" -"D768","NAD83 HARN (USA, NADCON)","E008","Oklahoma","okhpgn.los","okhpgn.las" -"D769","NAD83 (USA, NADCON)","E008","Pennsylvania","pahpgn.los","pahpgn.las" -"D770","NAD83 HARN (USA, NADCON)","E008","Pennsylvania","pahpgn.los","pahpgn.las" -"D771","NAD83 (USA, NADCON)","E008","Puerto Rico-Virgin Is","pvhpgn.los","pvhpgn.las" -"D772","NAD83 HARN (USA, NADCON)","E008","Puerto Rico-Virgin Is","pvhpgn.los","pvhpgn.las" -"D773","American Samoa 1962 (USA, NADCON)","E000","Samoa (Eastern Islands)","eshpgn.los","eshpgn.las" -"D774","NAD83 HARN (USA, NADCON)","E008","Samoa (Eastern Islands)","eshpgn.los","eshpgn.las" -"D775","American Samoa 1962 (USA, NADCON)","E000","Samoa (Western Islands)","wshpgn.los","wshpgn.las" -"D776","NAD83 HARN (USA, NADCON)","E008","Samoa (Western Islands)","wshpgn.los","wshpgn.las" -"D777","NAD83 (USA, NADCON)","E008","South Carolina","schpgn.los","schpgn.las" -"D778","NAD83 HARN (USA, NADCON)","E008","South Carolina","schpgn.los","schpgn.las" -"D779","NAD83 (USA, NADCON)","E008","South Dakota","sdhpgn.los","sdhpgn.las" -"D780","NAD83 HARN (USA, NADCON)","E008","South Dakota","sdhpgn.los","sdhpgn.las" -"D781","NAD83 (USA, NADCON)","E008","Tennessee","tnhpgn.los","tnhpgn.las" -"D782","NAD83 HARN (USA, NADCON)","E008","Tennessee","tnhpgn.los","tnhpgn.las" -"D783","NAD83 (USA, NADCON)","E008","Texas (East of 100dW)","ethpgn.los","ethpgn.las" -"D784","NAD83 HARN (USA, NADCON)","E008","Texas (East of 100dW)","ethpgn.los","ethpgn.las" -"D785","NAD83 (USA, NADCON)","E008","Texas (West of 100dW)","wthpgn.los","wthpgn.las" -"D786","NAD83 HARN (USA, NADCON)","E008","Texas (West of 100dW)","wthpgn.los","wthpgn.las" -"D787","NAD83 (USA, NADCON)","E008","Utah","uthpgn.los","uthpgn.las" -"D788","NAD83 HARN (USA, NADCON)","E008","Utah","uthpgn.los","uthpgn.las" -"D789","NAD83 (USA, NADCON)","E008","Virginia","vahpgn.los","vahpgn.las" -"D790","NAD83 HARN (USA, NADCON)","E008","Virginia","vahpgn.los","vahpgn.las" -"D791","NAD83 (USA, NADCON)","E008","Washington-Oregon","wohpgn.los","wohpgn.las" -"D792","NAD83 HARN (USA, NADCON)","E008","Washington-Oregon","wohpgn.los","wohpgn.las" -"D793","NAD83 (USA, NADCON)","E008","West Virginia","wvhpgn.los","wvhpgn.las" -"D794","NAD83 HARN (USA, NADCON)","E008","West Virginia","wvhpgn.los","wvhpgn.las" -"D795","NAD83 (USA, NADCON)","E008","Wisconsin","wihpgn.los","wihpgn.las" -"D796","NAD83 HARN (USA, NADCON)","E008","Wisconsin","wihpgn.los","wihpgn.las" -"D797","NAD83 (USA, NADCON)","E008","Wyoming","wyhpgn.los","wyhpgn.las" -"D798","NAD83 HARN (USA, NADCON)","E008","Wyoming","wyhpgn.los","wyhpgn.las" -"D886","Reseau Geodesique Francais 1993","E899",-752,-358,-179,"Taiwan",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 -"D887","Reseau National Belge 1972","E899",-752,-358,-179,"Taiwan",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 +"D701","NAD83 (Deprecated - use D122)","E008",0,0,0,"Alabama",2,2,2,354 +"D702","NAD83 HARN (USA, NADCON)","E008","Alabama","D122","alhpgn.los","alhpgn.las" +"D703","NAD83 (Deprecated - use D122)","E008",0,0,0,"Arkansas",2,2,2,354 +"D704","NAD83 HARN (USA, NADCON)","E008","Arkansas","D122","arhpgn.los","arhpgn.las" +"D705","NAD83 (Deprecated - use D122)","E008",0,0,0,"Arizona",2,2,2,354 +"D706","NAD83 HARN (USA, NADCON)","E008","Arizona","D122","azhpgn.los","azhpgn.las" +"D707","NAD83 (Deprecated - use D122)","E008",0,0,0,"California (North of 37dN)",2,2,2,354 +"D708","NAD83 HARN (USA, NADCON)","E008","California (North of 37dN)","D122","cnhpgn.los","cnhpgn.las" +"D709","NAD83 (Deprecated - use D122)","E008",0,0,0,"California (South of 37dN)",2,2,2,354 +"D710","NAD83 HARN (USA, NADCON)","E008","California (South of 37dN)","D122","cshpgn.los","cshpgn.las" +"D711","NAD83 (Deprecated - use D122)","E008",0,0,0,"Colorado",2,2,2,354 +"D712","NAD83 HARN (USA, NADCON)","E008","Colorado","D122","cohpgn.los","cohpgn.las" +"D713","NAD83 (Deprecated - use D122)","E008",0,0,0,"Florida",2,2,2,354 +"D714","NAD83 HARN (USA, NADCON)","E008","Florida","D122","flhpgn.los","flhpgn.las" +"D715","NAD83 (Deprecated - use D122)","E008",0,0,0,"Georgia",2,2,2,354 +"D716","NAD83 HARN (USA, NADCON)","E008","Georgia","D122","gahpgn.los","gahpgn.las" +"D717","Guam 1963 (Deprecated - use D068)","E000",-100,-248,259,"Guam",3,3,3,5 +"D718","NAD83 HARN (USA, NADCON)","E008","Guam","D068","guhpgn.los","guhpgn.las" +"D719","NAD83 (Deprecated - use D122)","E008",0,0,0,"Hawaii",2,2,2,354 +"D720","NAD83 HARN (USA, NADCON)","E008","Hawaii","D122","hihpgn.los","hihpgn.las" +"D721","NAD83 (Deprecated - use D122)","E008",0,0,0,"Idaho-Montana (East of 113dW)",2,2,2,354 +"D722","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (East of 113dW)","D122","emhpgn.los","emhpgn.las" +"D723","NAD83 (Deprecated - use D122)","E008",0,0,0,"Idaho-Montana (West of 113dW)",2,2,2,354 +"D724","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (West of 113dW)","D122","wmhpgn.los","wmhpgn.las" +"D725","NAD83 (Deprecated - use D122)","E008",0,0,0,"Iowa",2,2,2,354 +"D726","NAD83 HARN (USA, NADCON)","E008","Iowa","D122","iahpgn.los","iahpgn.las" +"D727","NAD83 (Deprecated - use D122)","E008",0,0,0,"Illinois",2,2,2,354 +"D728","NAD83 HARN (USA, NADCON)","E008","Illinois","D122","ilhpgn.los","ilhpgn.las" +"D729","NAD83 (Deprecated - use D122)","E008",0,0,0,"Indiana",2,2,2,354 +"D730","NAD83 HARN (USA, NADCON)","E008","Indiana","D122","inhpgn.los","inhpgn.las" +"D731","NAD83 (Deprecated - use D122)","E008",0,0,0,"Kansas",2,2,2,354 +"D732","NAD83 HARN (USA, NADCON)","E008","Kansas","D122","kshpgn.los","kshpgn.las" +"D733","NAD83 (Deprecated - use D122)","E008",0,0,0,"Kentucky",2,2,2,354 +"D734","NAD83 HARN (USA, NADCON)","E008","Kentucky","D122","kyhpgn.los","kyhpgn.las" +"D735","NAD83 (Deprecated - use D122)","E008",0,0,0,"Louisiana",2,2,2,354 +"D736","NAD83 HARN (USA, NADCON)","E008","Louisiana","D122","lahpgn.los","lahpgn.las" +"D737","NAD83 (Deprecated - use D122)","E008",0,0,0,"Maryland-Delaware",2,2,2,354 +"D738","NAD83 HARN (USA, NADCON)","E008","Maryland-Delaware","D122","mdhpgn.los","mdhpgn.las" +"D739","NAD83 (Deprecated - use D122)","E008",0,0,0,"Maine",2,2,2,354 +"D740","NAD83 HARN (USA, NADCON)","E008","Maine","D122","mehpgn.los","mehpgn.las" +"D741","NAD83 (Deprecated - use D122)","E008",0,0,0,"Michigan",2,2,2,354 +"D742","NAD83 HARN (USA, NADCON)","E008","Michigan","D122","mihpgn.los","mihpgn.las" +"D743","NAD83 (Deprecated - use D122)","E008",0,0,0,"Minnesota",2,2,2,354 +"D744","NAD83 HARN (USA, NADCON)","E008","Minnesota","D122","mnhpgn.los","mnhpgn.las" +"D745","NAD83 (Deprecated - use D122)","E008",0,0,0,"Mississippi",2,2,2,354 +"D746","NAD83 HARN (USA, NADCON)","E008","Mississippi","D122","mshpgn.los","mshpgn.las" +"D747","NAD83 (Deprecated - use D122)","E008",0,0,0,"Missouri",2,2,2,354 +"D748","NAD83 HARN (USA, NADCON)","E008","Missouri","D122","mohpgn.los","mohpgn.las" +"D749","NAD83 (Deprecated - use D122)","E008",0,0,0,"Nebraska",2,2,2,354 +"D750","NAD83 HARN (USA, NADCON)","E008","Nebraska","D122","nbhpgn.los","nbhpgn.las" +"D751","NAD83 (Deprecated - use D122)","E008",0,0,0,"Nevada",2,2,2,354 +"D752","NAD83 HARN (USA, NADCON)","E008","Nevada","D122","nvhpgn.los","nvhpgn.las" +"D753","NAD83 (Deprecated - use D122)","E008",0,0,0,"New England (CT,MA,NH,RI,VT",2,2,2,354 +"D754","NAD83 HARN (USA, NADCON)","E008","New England (CT,MA,NH,RI,VT)","D122","nehpgn.los","nehpgn.las" +"D755","NAD83 (Deprecated - use D122)","E008",0,0,0,"New Jersey",2,2,2,354 +"D756","NAD83 HARN (USA, NADCON)","E008","New Jersey","D122","njhpgn.los","njhpgn.las" +"D757","NAD83 (Deprecated - use D122)","E008",0,0,0,"New Mexico",2,2,2,354 +"D758","NAD83 HARN (USA, NADCON)","E008","New Mexico","D122","nmhpgn.los","nmhpgn.las" +"D759","NAD83 (Deprecated - use D122)","E008",0,0,0,"New York",2,2,2,354 +"D760","NAD83 HARN (USA, NADCON)","E008","New York","D122","nyhpgn.los","nyhpgn.las" +"D761","NAD83 (Deprecated - use D122)","E008",0,0,0,"North Carolina",2,2,2,354 +"D762","NAD83 HARN (USA, NADCON)","E008","North Carolina","D122","nchpgn.los","nchpgn.las" +"D763","NAD83 (Deprecated - use D122)","E008",0,0,0,"North Dakota",2,2,2,354 +"D764","NAD83 HARN (USA, NADCON)","E008","North Dakota","D122","ndhpgn.los","ndhpgn.las" +"D765","NAD83 (Deprecated - use D122)","E008",0,0,0,"Ohio",2,2,2,354 +"D766","NAD83 HARN (USA, NADCON)","E008","Ohio","D122","ohhpgn.los","ohhpgn.las" +"D767","NAD83 (Deprecated - use D122)","E008",0,0,0,"Oklahoma",2,2,2,354 +"D768","NAD83 HARN (USA, NADCON)","E008","Oklahoma","D122","okhpgn.los","okhpgn.las" +"D769","NAD83 (Deprecated - use D122)","E008",0,0,0,"Pennsylvania",2,2,2,354 +"D770","NAD83 HARN (USA, NADCON)","E008","Pennsylvania","D122","pahpgn.los","pahpgn.las" +"D771","NAD83 (Deprecated - use D122)","E008",0,0,0,"Puerto Rico-Virgin Is",2,2,2,354 +"D772","NAD83 HARN (USA, NADCON)","E008","Puerto Rico-Virgin Is","D122","pvhpgn.los","pvhpgn.las" +"D773","American Samoa 1962 (Deprecated - use D189)","E000",-115,118,426,"Samoa (Eastern Islands)",25,25,25,2 +"D774","NAD83 HARN (USA, NADCON)","E008","Samoa (Eastern Islands)","D189","eshpgn.los","eshpgn.las" +"D775","American Samoa 1962 (Deprecated - use D189)","E000",-115,118,426,"Samoa (Western Islands)",25,25,25,2 +"D776","NAD83 HARN (USA, NADCON)","E008","Samoa (Western Islands)","D189","wshpgn.los","wshpgn.las" +"D777","NAD83 (Deprecated - use D122)","E008",0,0,0,"South Carolina",2,2,2,354 +"D778","NAD83 HARN (USA, NADCON)","E008","South Carolina","D122","schpgn.los","schpgn.las" +"D779","NAD83 (Deprecated - use D122)","E008",0,0,0,"South Dakota",2,2,2,354 +"D780","NAD83 HARN (USA, NADCON)","E008","South Dakota","D122","sdhpgn.los","sdhpgn.las" +"D781","NAD83 (Deprecated - use D122)","E008",0,0,0,"Tennessee",2,2,2,354 +"D782","NAD83 HARN (USA, NADCON)","E008","Tennessee","D122","tnhpgn.los","tnhpgn.las" +"D783","NAD83 (Deprecated - use D122)","E008",0,0,0,"Texas (East of 100dW)",2,2,2,354 +"D784","NAD83 HARN (USA, NADCON)","E008","Texas (East of 100dW)","D122","ethpgn.los","ethpgn.las" +"D785","NAD83 (Deprecated - use D122)","E008",0,0,0,"Texas (West of 100dW)",2,2,2,354 +"D786","NAD83 HARN (USA, NADCON)","E008","Texas (West of 100dW)","D122","wthpgn.los","wthpgn.las" +"D787","NAD83 (Deprecated - use D122)","E008",0,0,0,"Utah",2,2,2,354 +"D788","NAD83 HARN (USA, NADCON)","E008","Utah","D122","uthpgn.los","uthpgn.las" +"D789","NAD83 (Deprecated - use D122)","E008",0,0,0,"Virginia",2,2,2,354 +"D790","NAD83 HARN (USA, NADCON)","E008","Virginia","D122","vahpgn.los","vahpgn.las" +"D791","NAD83 (Deprecated - use D122)","E008",0,0,0,"Washington-Oregon",2,2,2,354 +"D792","NAD83 HARN (USA, NADCON)","E008","Washington-Oregon","D122","wohpgn.los","wohpgn.las" +"D793","NAD83 (Deprecated - use D122)","E008",0,0,0,"West Virginia",2,2,2,354 +"D794","NAD83 HARN (USA, NADCON)","E008","West Virginia","D122","wvhpgn.los","wvhpgn.las" +"D795","NAD83 (Deprecated - use D122)","E008",0,0,0,"Wisconsin",2,2,2,354 +"D796","NAD83 HARN (USA, NADCON)","E008","Wisconsin","D122","wihpgn.los","wihpgn.las" +"D797","NAD83 (Deprecated - use D122)","E008",0,0,0,"Wyoming",2,2,2,354 +"D798","NAD83 HARN (USA, NADCON)","E008","Wyoming","D122","wyhpgn.los","wyhpgn.las" "D888","Lebanon Stereographic","E012",154.2668777,107.2190767,-263.01161212,"Lebanon",0,0,0,0,0.310716,0.218736,0.191232,0.99999913 "D889","Lebanon Lambert","E202",190.9999,133.32473,-232.8391,"Lebanon",0,0,0,0,0.307836,0.216756,0.189036,0.9995341 "D890","Luxembourg (LUREF)","E004",-192.986,13.673,-39.309,"Luxembourg",0,0,0,0,0.409900,2.933200,-2.688100,1.00000043 "D891","Datum 73","E004",-223.237,110.193,36.649,"Portugal",0,0,0,0 "D892","Datum Lisboa","E004",-304.046,-60.576,103.640,"Portugal",0,0,0,0 "D893","PDO Survey Datum 1993","E001",-180.624,-225.516,173.919,"Oman",0,0,0,0,0.80970,1.89755,-8.33604,16.71006 -"D894","WGS 1984 semi-major","E020",0,0,0,"WGS 1984 Auxiliary Sphere semi-major axis",0,0,0,0 "D898","TWD97","E008",0,0,0,"Taiwan",0,0,0,0,0.0,0.0,0.0,0.0 "D899","TWD67","E899",-752,-358,-179,"Taiwan",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 +"D886", "Reseau Geodesique Francais 1993", "E899",-752,-358,-179,"France",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 +"D887", "Reseau National Belge 1972", "E899",-752,-358,-179,"Belgium",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 +"D819", "Xian 1980", "E224",0,0,0,"China",0,0,0,0,0,0,0,0 +"D820","Korea 2000","E008",0.0,0.0,0.0,"South Korea",0,0,0,0 +"D821","Pulkovo 1995","E015",24.47,-130.89,-81.56,"Russian Federation",0,0,0,0,0,0,-0.13,-0.22 +"D822","Beijing 1954","E015",15.8,-154.4,-82.3,"China",0,0,0,0 +"D823","Stockholm 1938 (RT38)","E002",0.0,0.0,0.0,"Sweden",0,0,0,0 +"D824","Greenland 1996 (GR96)","E008",0.0,0.0,0.0,"Greenland",0,0,0,0 +"D825","Libyan Geodetic Datum 2006 (LGD2006)","E004",-208.406,-109.878,-2.5764,"Libya",0,0,0,0 +"D826","Reseau Geodesique de la Polynesie Francaise (RGPF)","E008",0.072,-0.507,-0.245,"French Polynesia",0,0,0,0,0.0183,-0.0003,0.007,-0.0093 +"D827","IGC 1962 6th Parallel South","E001",0.0,0.0,0.0,"Democratic Republic of the Congo - adjacent to 6th parallel south",0,0,0,0 +"D828","Geodetic Datum of Malaysia (GDM)","E008",0.0,0.0,0.0,"Malaysia",0,0,0,0 +"D829","New Beijing","E015",0.0,0.0,0.0,"China",0,0,0,0 +"D830","Turkish National Reference Frame (TUKREF)","E008",0.0,0.0,0.0,"Turkey",0,0,0,0 +"D831","Bhutan National Geodetic Datum (DRUKREF)","E008",0.0,0.0,0.0,"Bhutan",0,0,0,0 +"D832","Ukraine 2000","E015",0.0,0.0,0.0,"Ukraine",0,0,0,0 +"D833","Japanese Geodetic Datum 2011 (JGD2011)","E008",0.0,0.0,0.0,"Japan",0,0,0,0 +"D834","Posiciones Geodesicas Argentinas 1998 (POSGAR 98)","E008",0.0,0.0,0.0,"Argentina",0,0,0,0 +"D835","Posiciones Geodesicas Argentinas 1994 (POSGAR 94)","E012",0.0,0.0,0.0,"Argentina",0,0,0,0 +"D836","Posiciones Geodesicas Argentinas 2007 (POSGAR 07)","E008",0.0,0.0,0.0,"Argentina",0,0,0,0 +"D837","Datum Geodesi Nasional 1995 (DGN95)","E012",0.0,0.0,0.0,"Indonesia",0,0,0,0 +"D838","Korea 1995","E012",0.0,0.0,0.0,"South Korea",0,0,0,0 +"D839","Institut Geographique du Congo Belge (IGCB) 1955","E001",-79.9,-158,-168.9,"The Democratic Republic of the Congo (Zaire) - Lower Congo",0,0,0,0 +"D894", "WGS 1984 semi-major","E020",0,0,0,"WGS 1984 Auxiliary Sphere semi-major axis",0,0,0,0 +"D895","ATS77","E910",-95.323,166.098,-69.942,"Maritime Provinces",0,0,0,0,0.215,1.031,-0.047,1.922 +"D896","GosatCAIL1B+ EarthRadius","E025",0,0,0,"GosatCAIL1B+ EarthRadius",0,0,0,0 +"D897","Myanmar","E227",247,785,277,"Myanmar",0,0,0,0 +"D900","China 2000","E231",0,0,0,"China 2000",0,0,0,0 +"D901","Nouvelle Triangulation Francaise (grid shift)","E202","France","-D350","ntf_r93.gsb" +"D902","PRS92","E000",-127.62153,-67.24339,-47.04738,"Philippines Reference System 1992",0,0,0,0,3.06803,-4.90297,-1.57807,-1.06002 +"D903","North American 1983 2011","E008",0,0,0,"Alaska, Canada, CONUS, Central America, Mexico",2,2,2,354 diff --git a/ogr/data/pci_ellips.txt b/ogr/data/pci_ellips.txt index d0cc64645fc4..71f54fb2e080 100644 --- a/ogr/data/pci_ellips.txt +++ b/ogr/data/pci_ellips.txt @@ -1,57 +1,69 @@ ! -! By email on December 2nd, 2010: +! From https://github.com/OSGeo/gdal/issues/8034, June 30, 2023 ! -! I, Louis Burry, on behalf of PCI Geomatics agree to allow the ellips.txt +! I, Michael Goldberg, on behalf of PCI Geomatics agree to allow the ellips.txt ! and datum.txt file to be distributed under the GDAL open source license. ! -! Louis Burry -! VP Technology & Delivery +! Michael Goldberg +! Development Manager ! PCI Geomatics ! +! PCI Ellipsoid Database +! ---------------------- +! This file lists the different reference ellipsoids that may +! be used by PCI coordinate systems. Ellipsoid entries in datum.txt +! refer to entries in this file. +! +! Each ellipsoid is listed on a single line. The format of each record +! is as follows: +! +! Ellipsoid_code, Description_string, Semimajor_axis_m, Semiminor_axis_m [,extra comments] +! +! Ellipsoid_code is the code that uniquely identifies the ellipsoid +! within PCI software +! Description_string is a short description that helps users to identify +! the ellipsoid. It may be listed, for example, in a dropdown list in +! a PCI dialog box. +! Semimajor_axis_m is the ellipsoid semi-major (equatorial) axis length in metres. +! Semiminor_axis_m is the ellipsoid semi-minor (polar) axis length in metres. +! +! Any extra fields may be added after these four elements if desired; they will +! not be read by PCI software but may be helpful for the user. ! ! NOTE: The range of "E908" to "E998" is set aside for ! the use of local customer development. ! -"E009","Airy 1830",6377563.396,6356256.91 -"E011","Modified Airy",6377340.189,6356034.448 -"E910","ATS77",6378135.0,6356750.304922 -"E014","Australian National 1965",6378160.,6356774.719 -"E002","Bessel 1841",6377397.155,6356078.96284 -"E900","Bessel 1841 (Namibia)",6377483.865,6356165.382966 -"E333","Bessel 1841 (Japan By Law)",6377397.155,6356078.963 "E000","Clarke 1866",6378206.4,6356583.8 "E001","Clarke 1880 (RGS)",6378249.145,6356514.86955 -"E202","Clarke 1880 (IGN, France)",6378249.2,6356515.0 +"E002","Bessel 1841",6377397.155,6356078.96284 +"E003","New International 1967",6378157.5,6356772.2 +"E004","International 1924",6378388.,6356911.94613 +"E005","WGS 72",6378135.,6356750.519915 "E006","Everest (India 1830)",6377276.3452,6356075.4133 +"E007","WGS 66",6378145.,6356759.769356 +"E008","GRS 1980",6378137.,6356752.31414 +"E009","Airy 1830",6377563.396,6356256.91 "E010","Everest (W. Malaysia and Singapore 1948)",6377304.063,6356103.039 -"E901","Everest (India 1956)",6377301.243,6356100.228368 -"E902","Everest (W. Malaysia 1969)",6377295.664,6356094.667915 -"E903","Everest (E. Malaysia and Brunei)",6377298.556,6356097.550301 -"E201","Everest (Pakistan)",6377309.613,6356108.570542 -"E017","Fischer 1960",6378166.,6356784.283666 +"E011","Modified Airy",6377340.189,6356034.448 +"E012","WGS 84",6378137.,6356752.314245 "E013","Modified Fischer 1960",6378155.,6356773.3205 -"E018","Fischer 1968",6378150.,6356768.337303 -"E008","GRS 1980",6378137.,6356752.31414 -"E904","Helmert 1906",6378200.,6356818.169628 -"E016","Hough 1960",6378270.,6356794.343479 -"E200","Indonesian 1974",6378160.,6356774.504086 -"E004","International 1924",6378388.,6356911.94613 -"E203","IUGG 67",6378160.,6356774.516090714 +"E014","Australian National 1965",6378160.,6356774.719 "E015","Krassovsky 1940",6378245.,6356863.0188 -"E700","MODIS (Sphere from WGS84)",6371007.181,6371007.181 -"E003","New International 1967",6378157.5,6356772.2 +"E016","Hough 1960",6378270.,6356794.343479 +"E017","Fischer 1960",6378166.,6356784.283666 +"E018","Fischer 1968",6378150.,6356768.337303 "E019","Normal Sphere",6370997.,6370997. -"E905","SGS 85",6378136.,6356751.301569 -"E907","South American 1969",6378160.,6356774.719 -"E906","WGS 60",6378165.,6356783.286959 -"E007","WGS 66",6378145.,6356759.769356 -"E005","WGS 72",6378135.,6356750.519915 -"E012","WGS 84",6378137.,6356752.314245 "E020","WGS 84 semimajor axis",6378137.,6378137. -"E600","D-PAF (Orbits)",6378144.0,6356759.0 -"E601","Test Data Set 1",6378144.0,6356759.0 -"E602","Test Data Set 2",6377397.2,6356079.0 -"E204","War Office",6378300.583,6356752.270 +"E021","WGS 84 semiminor axis",6356752.314245,6356752.314245 +"E022", "Clarke 1866 Authalic Sphere", 6370997.000000, 6370997.000000 +"E023", "GRS 1980 Authalic Sphere", 6371007.000000, 6371007.000000 +"E024", "International 1924 Authalic Sphere", 6371228.000000, 6371228.000000 +"E025","GosatCAIL1B+ EarthRadius",6371008.77138,6371008.77138 +"E200","Indonesian 1974",6378160.,6356774.504086 +"E201","Everest (Pakistan)",6377309.613,6356108.570542 +"E202","Clarke 1880 (IGN, France)",6378249.2,6356515.0 +"E203","IUGG 67",6378160.,6356774.516090714 +"E204","War Office",6378300.000,6356751.689189 "E205","Clarke 1880 Arc",6378249.145,6356514.966 "E206","Bessel Modified",6377492.018,6356173.5087 "E207","Clarke 1858",6378293.639,6356617.98149 @@ -61,17 +73,57 @@ "E211","Everest Modified",6377304.063,6356103.039 "E212","Modified Everest 1969",6377295.664,6356094.668 "E213","Everest (1967 Definition)",6377298.556,6356097.550 -"E214","Clarke 1880 (Benoit)",6378300.79,6356566.43 +"E214","Clarke 1880 (Benoit)",6378300.789000,6356566.435000 "E215","Clarke 1880 (SGA)",6378249.2,6356515.0 "E216","Everest (1975 Definition)",6377299.151,6356098.1451 "E217","GEM 10C",6378137,6356752.31414 "E218","OSU 86F",6378136.2,6356751.516672 "E219","OSU 91A",6378136.3,6356751.6163367 "E220","Sphere",6371000,6371000 -"E221","Struve 1860",6378297,6356655.847 +"E221","Struve 1860",6378298.300000,6356657.142670 "E222","Walbeck",6376896,6355834.847 "E223","Plessis 1817",6376523,6355862.933 "E224","Xian 1980",6378140.0,6356755.288 "E225","EMEP Sphere",6370000,6370000 "E226","Everest (India and Nepal)",6377301.243,6356100.228368 +"E227", "Everest (1830 Definition)", 6377299.365595, 6356098.359005, "EPSG:7042" +"E228", "Danish 1876", 6377019.270000, 6355762.539100 +"E229", "Bessel Namibia (GLM)", 6377483.865280, 6356165.383246 +"E230", "PZ-90", 6378136.000000, 6356751.361746 +"E231", "CGCS2000", 6378137.000000, 6356752.314140 +"E232", "IAG 1975", 6378140.000000, 6356755.288158 +"E233", "NWL 9D", 6378145.000000, 6356759.769489 +"E234", "Hughes 1980", 6378273.000000, 6356889.449000 +"E235", "Clarke 1880 (international foot)", 6378306.369600, 6356571.996000 +"E236", "Clarke 1866 Michigan", 6378450.047549, 6356826.621488 +"E237", "APL 4.5 (1968)", 6378144.000000, 6356757.338698 +"E238", "Airy (War Office)", 6377542.178, 6356235.764 +"E239", "Clarke 1858 (DIGEST)", 6378235.600, 6356560.140 +"E240", "Clarke 1880 (Palestine)", 6378300.782, 6356566.427 +"E241", "Clarke 1880 (Syria)", 6378247.842, 6356513.671 +"E242", "Clarke 1880 (Fiji)", 6378301.000, 6356566.548 +"E243", "Andrae", 6377104.430, 6355847.415 +"E244", "Delambre 1810", 6376985.228, 6356323.664 +"E245", "Delambre (Carte de France)", 6376985.000, 6356323.436 +"E246", "Germaine (Djibouti)", 6378284.000, 6356589.156 +"E247", "Hayford 1909", 6378388.000, 6356909.000 +"E248", "Krayenhoff 1827", 6376950.400, 6356356.341 +"E249", "Plessis Reconstituted", 6376523.994, 6355862.907 +"E250", "GRS 1967", 6378160.000, 6356774.516 +"E251", "Svanberg", 6376797.000, 6355837.971 +"E252", "Walbeck 1819 (Planheft 1942)", 6376895.000, 6355834.000 +"E333","Bessel 1841 (Japan By Law)",6377397.155,6356078.963 +"E600","D-PAF (Orbits)",6378144.0,6356759.0 +"E601","Test Data Set 1",6378144.0,6356759.0 +"E602","Test Data Set 2",6377397.2,6356079.0 +"E700","MODIS (Sphere from WGS84)",6371007.181,6371007.181 "E899","GRS 1967 Modified",6378160.,6356774.719195306 +"E900","Bessel 1841 (Namibia)",6377483.865,6356165.382966 +"E901","Everest (India 1956)",6377301.243,6356100.228368 +"E902","Everest (W. Malaysia 1969)",6377295.664,6356094.667915 +"E903","Everest (E. Malaysia and Brunei)",6377298.556,6356097.550301 +"E904","Helmert 1906",6378200.,6356818.169628 +"E905","SGS 85",6378136.,6356751.301569 +"E906","WGS 60",6378165.,6356783.286959 +"E907","South American 1969",6378160.,6356774.719 +"E910","ATS77",6378135.0,6356750.304922 From 6734edbc3bc138ab25b2ab7d45a62ca36fb217a8 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Thu, 18 Apr 2024 12:19:38 -0400 Subject: [PATCH 076/230] autotest ogr_sql_test.py: use tmp_path --- autotest/ogr/ogr_sql_test.py | 100 ++++++++++++++++------------------- 1 file changed, 47 insertions(+), 53 deletions(-) diff --git a/autotest/ogr/ogr_sql_test.py b/autotest/ogr/ogr_sql_test.py index 96838eb953d7..626b1f7e6db4 100755 --- a/autotest/ogr/ogr_sql_test.py +++ b/autotest/ogr/ogr_sql_test.py @@ -49,77 +49,71 @@ def module_disable_exceptions(): @pytest.mark.parametrize("use_gdal", [True, False]) -def test_ogr_sql_execute_sql(use_gdal): +def test_ogr_sql_execute_sql(tmp_path, use_gdal): - shutil.copy("data/poly.shp", "tmp/test_ogr_sql_execute_sql.shp") - shutil.copy("data/poly.shx", "tmp/test_ogr_sql_execute_sql.shx") + shutil.copy("data/poly.shp", tmp_path / "test_ogr_sql_execute_sql.shp") + shutil.copy("data/poly.shx", tmp_path / "test_ogr_sql_execute_sql.shx") - try: - - def get_dataset(): - return ( - gdal.OpenEx("tmp/test_ogr_sql_execute_sql.shp") - if use_gdal - else ogr.Open("tmp/test_ogr_sql_execute_sql.shp") - ) - - def check_historic_way(): - ds = get_dataset() - - # "Manual" / historic way of using ExecuteSQL() / ReleaseResultSet() - lyr = ds.ExecuteSQL("SELECT * FROM test_ogr_sql_execute_sql") - assert lyr.GetFeatureCount() == 10 - ds.ReleaseResultSet(lyr) - - # lyr invalidated - with pytest.raises(Exception): - lyr.GetName() - - # lyr invalidated - with pytest.raises(Exception): - ds.ReleaseResultSet(lyr) - - ds = None + def get_dataset(): + return ( + gdal.OpenEx(tmp_path / "test_ogr_sql_execute_sql.shp") + if use_gdal + else ogr.Open(tmp_path / "test_ogr_sql_execute_sql.shp") + ) - check_historic_way() + def check_historic_way(): + ds = get_dataset() - def check_context_manager(): - ds = get_dataset() + # "Manual" / historic way of using ExecuteSQL() / ReleaseResultSet() + lyr = ds.ExecuteSQL("SELECT * FROM test_ogr_sql_execute_sql") + assert lyr.GetFeatureCount() == 10 + ds.ReleaseResultSet(lyr) - # ExecuteSQL() as context manager - with ds.ExecuteSQL("SELECT * FROM test_ogr_sql_execute_sql") as lyr: - assert lyr.GetFeatureCount() == 10 + # lyr invalidated + with pytest.raises(Exception): + lyr.GetName() - # lyr invalidated - with pytest.raises(Exception): - lyr.GetName() + # lyr invalidated + with pytest.raises(Exception): + ds.ReleaseResultSet(lyr) - ds = None + ds = None - check_context_manager() + check_historic_way() - # ExecuteSQL() with keep_ref_on_ds=True - def get_lyr(): - return get_dataset().ExecuteSQL( - "SELECT * FROM test_ogr_sql_execute_sql", keep_ref_on_ds=True - ) + def check_context_manager(): + ds = get_dataset() - with get_lyr() as lyr: + # ExecuteSQL() as context manager + with ds.ExecuteSQL("SELECT * FROM test_ogr_sql_execute_sql") as lyr: assert lyr.GetFeatureCount() == 10 # lyr invalidated with pytest.raises(Exception): lyr.GetName() - assert get_lyr().GetFeatureCount() == 10 + ds = None + + check_context_manager() + + # ExecuteSQL() with keep_ref_on_ds=True + def get_lyr(): + return get_dataset().ExecuteSQL( + "SELECT * FROM test_ogr_sql_execute_sql", keep_ref_on_ds=True + ) + + with get_lyr() as lyr: + assert lyr.GetFeatureCount() == 10 + + # lyr invalidated + with pytest.raises(Exception): + lyr.GetName() - # Check that we can actually remove the files (i.e. references on dataset have been dropped) - os.unlink("tmp/test_ogr_sql_execute_sql.shp") - os.unlink("tmp/test_ogr_sql_execute_sql.shx") + assert get_lyr().GetFeatureCount() == 10 - except Exception: - os.unlink("tmp/test_ogr_sql_execute_sql.shp") - os.unlink("tmp/test_ogr_sql_execute_sql.shx") + # Check that we can actually remove the files (i.e. references on dataset have been dropped) + os.unlink(tmp_path / "test_ogr_sql_execute_sql.shp") + os.unlink(tmp_path / "test_ogr_sql_execute_sql.shx") @pytest.mark.require_driver("SQLite") From d70c895bbb3e26b3ee631650cfb37134f30cd149 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 18:22:24 +0200 Subject: [PATCH 077/230] pci_datum.txt/pci_ellips.txt: remove erroneous extra spaces --- ogr/data/pci_datum.txt | 20 ++++++------- ogr/data/pci_ellips.txt | 62 ++++++++++++++++++++--------------------- 2 files changed, 41 insertions(+), 41 deletions(-) diff --git a/ogr/data/pci_datum.txt b/ogr/data/pci_datum.txt index bc4d4cea4c3d..68507ea0d720 100644 --- a/ogr/data/pci_datum.txt +++ b/ogr/data/pci_datum.txt @@ -458,13 +458,13 @@ "D765","NAD83 (Deprecated - use D122)","E008",0,0,0,"Ohio",2,2,2,354 "D766","NAD83 HARN (USA, NADCON)","E008","Ohio","D122","ohhpgn.los","ohhpgn.las" "D767","NAD83 (Deprecated - use D122)","E008",0,0,0,"Oklahoma",2,2,2,354 -"D768","NAD83 HARN (USA, NADCON)","E008","Oklahoma","D122","okhpgn.los","okhpgn.las" +"D768","NAD83 HARN (USA, NADCON)","E008","Oklahoma","D122","okhpgn.los","okhpgn.las" "D769","NAD83 (Deprecated - use D122)","E008",0,0,0,"Pennsylvania",2,2,2,354 "D770","NAD83 HARN (USA, NADCON)","E008","Pennsylvania","D122","pahpgn.los","pahpgn.las" "D771","NAD83 (Deprecated - use D122)","E008",0,0,0,"Puerto Rico-Virgin Is",2,2,2,354 -"D772","NAD83 HARN (USA, NADCON)","E008","Puerto Rico-Virgin Is","D122","pvhpgn.los","pvhpgn.las" +"D772","NAD83 HARN (USA, NADCON)","E008","Puerto Rico-Virgin Is","D122","pvhpgn.los","pvhpgn.las" "D773","American Samoa 1962 (Deprecated - use D189)","E000",-115,118,426,"Samoa (Eastern Islands)",25,25,25,2 -"D774","NAD83 HARN (USA, NADCON)","E008","Samoa (Eastern Islands)","D189","eshpgn.los","eshpgn.las" +"D774","NAD83 HARN (USA, NADCON)","E008","Samoa (Eastern Islands)","D189","eshpgn.los","eshpgn.las" "D775","American Samoa 1962 (Deprecated - use D189)","E000",-115,118,426,"Samoa (Western Islands)",25,25,25,2 "D776","NAD83 HARN (USA, NADCON)","E008","Samoa (Western Islands)","D189","wshpgn.los","wshpgn.las" "D777","NAD83 (Deprecated - use D122)","E008",0,0,0,"South Carolina",2,2,2,354 @@ -474,7 +474,7 @@ "D781","NAD83 (Deprecated - use D122)","E008",0,0,0,"Tennessee",2,2,2,354 "D782","NAD83 HARN (USA, NADCON)","E008","Tennessee","D122","tnhpgn.los","tnhpgn.las" "D783","NAD83 (Deprecated - use D122)","E008",0,0,0,"Texas (East of 100dW)",2,2,2,354 -"D784","NAD83 HARN (USA, NADCON)","E008","Texas (East of 100dW)","D122","ethpgn.los","ethpgn.las" +"D784","NAD83 HARN (USA, NADCON)","E008","Texas (East of 100dW)","D122","ethpgn.los","ethpgn.las" "D785","NAD83 (Deprecated - use D122)","E008",0,0,0,"Texas (West of 100dW)",2,2,2,354 "D786","NAD83 HARN (USA, NADCON)","E008","Texas (West of 100dW)","D122","wthpgn.los","wthpgn.las" "D787","NAD83 (Deprecated - use D122)","E008",0,0,0,"Utah",2,2,2,354 @@ -488,7 +488,7 @@ "D795","NAD83 (Deprecated - use D122)","E008",0,0,0,"Wisconsin",2,2,2,354 "D796","NAD83 HARN (USA, NADCON)","E008","Wisconsin","D122","wihpgn.los","wihpgn.las" "D797","NAD83 (Deprecated - use D122)","E008",0,0,0,"Wyoming",2,2,2,354 -"D798","NAD83 HARN (USA, NADCON)","E008","Wyoming","D122","wyhpgn.los","wyhpgn.las" +"D798","NAD83 HARN (USA, NADCON)","E008","Wyoming","D122","wyhpgn.los","wyhpgn.las" "D888","Lebanon Stereographic","E012",154.2668777,107.2190767,-263.01161212,"Lebanon",0,0,0,0,0.310716,0.218736,0.191232,0.99999913 "D889","Lebanon Lambert","E202",190.9999,133.32473,-232.8391,"Lebanon",0,0,0,0,0.307836,0.216756,0.189036,0.9995341 "D890","Luxembourg (LUREF)","E004",-192.986,13.673,-39.309,"Luxembourg",0,0,0,0,0.409900,2.933200,-2.688100,1.00000043 @@ -497,9 +497,9 @@ "D893","PDO Survey Datum 1993","E001",-180.624,-225.516,173.919,"Oman",0,0,0,0,0.80970,1.89755,-8.33604,16.71006 "D898","TWD97","E008",0,0,0,"Taiwan",0,0,0,0,0.0,0.0,0.0,0.0 "D899","TWD67","E899",-752,-358,-179,"Taiwan",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 -"D886", "Reseau Geodesique Francais 1993", "E899",-752,-358,-179,"France",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 -"D887", "Reseau National Belge 1972", "E899",-752,-358,-179,"Belgium",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 -"D819", "Xian 1980", "E224",0,0,0,"China",0,0,0,0,0,0,0,0 +"D886","Reseau Geodesique Francais 1993","E899",-752,-358,-179,"France",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 +"D887","Reseau National Belge 1972","E899",-752,-358,-179,"Belgium",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329 +"D819","Xian 1980","E224",0,0,0,"China",0,0,0,0,0,0,0,0 "D820","Korea 2000","E008",0.0,0.0,0.0,"South Korea",0,0,0,0 "D821","Pulkovo 1995","E015",24.47,-130.89,-81.56,"Russian Federation",0,0,0,0,0,0,-0.13,-0.22 "D822","Beijing 1954","E015",15.8,-154.4,-82.3,"China",0,0,0,0 @@ -520,10 +520,10 @@ "D837","Datum Geodesi Nasional 1995 (DGN95)","E012",0.0,0.0,0.0,"Indonesia",0,0,0,0 "D838","Korea 1995","E012",0.0,0.0,0.0,"South Korea",0,0,0,0 "D839","Institut Geographique du Congo Belge (IGCB) 1955","E001",-79.9,-158,-168.9,"The Democratic Republic of the Congo (Zaire) - Lower Congo",0,0,0,0 -"D894", "WGS 1984 semi-major","E020",0,0,0,"WGS 1984 Auxiliary Sphere semi-major axis",0,0,0,0 +"D894","WGS 1984 semi-major","E020",0,0,0,"WGS 1984 Auxiliary Sphere semi-major axis",0,0,0,0 "D895","ATS77","E910",-95.323,166.098,-69.942,"Maritime Provinces",0,0,0,0,0.215,1.031,-0.047,1.922 "D896","GosatCAIL1B+ EarthRadius","E025",0,0,0,"GosatCAIL1B+ EarthRadius",0,0,0,0 -"D897","Myanmar","E227",247,785,277,"Myanmar",0,0,0,0 +"D897","Myanmar","E227",247,785,277,"Myanmar",0,0,0,0 "D900","China 2000","E231",0,0,0,"China 2000",0,0,0,0 "D901","Nouvelle Triangulation Francaise (grid shift)","E202","France","-D350","ntf_r93.gsb" "D902","PRS92","E000",-127.62153,-67.24339,-47.04738,"Philippines Reference System 1992",0,0,0,0,3.06803,-4.90297,-1.57807,-1.06002 diff --git a/ogr/data/pci_ellips.txt b/ogr/data/pci_ellips.txt index 71f54fb2e080..ff6ae2984063 100644 --- a/ogr/data/pci_ellips.txt +++ b/ogr/data/pci_ellips.txt @@ -22,13 +22,13 @@ ! Ellipsoid_code is the code that uniquely identifies the ellipsoid ! within PCI software ! Description_string is a short description that helps users to identify -! the ellipsoid. It may be listed, for example, in a dropdown list in +! the ellipsoid. It may be listed, for example, in a dropdown list in ! a PCI dialog box. ! Semimajor_axis_m is the ellipsoid semi-major (equatorial) axis length in metres. ! Semiminor_axis_m is the ellipsoid semi-minor (polar) axis length in metres. ! ! Any extra fields may be added after these four elements if desired; they will -! not be read by PCI software but may be helpful for the user. +! not be read by PCI software but may be helpful for the user. ! ! NOTE: The range of "E908" to "E998" is set aside for ! the use of local customer development. @@ -55,9 +55,9 @@ "E019","Normal Sphere",6370997.,6370997. "E020","WGS 84 semimajor axis",6378137.,6378137. "E021","WGS 84 semiminor axis",6356752.314245,6356752.314245 -"E022", "Clarke 1866 Authalic Sphere", 6370997.000000, 6370997.000000 -"E023", "GRS 1980 Authalic Sphere", 6371007.000000, 6371007.000000 -"E024", "International 1924 Authalic Sphere", 6371228.000000, 6371228.000000 +"E022","Clarke 1866 Authalic Sphere", 6370997.000000, 6370997.000000 +"E023","GRS 1980 Authalic Sphere", 6371007.000000, 6371007.000000 +"E024","International 1924 Authalic Sphere", 6371228.000000, 6371228.000000 "E025","GosatCAIL1B+ EarthRadius",6371008.77138,6371008.77138 "E200","Indonesian 1974",6378160.,6356774.504086 "E201","Everest (Pakistan)",6377309.613,6356108.570542 @@ -86,32 +86,32 @@ "E224","Xian 1980",6378140.0,6356755.288 "E225","EMEP Sphere",6370000,6370000 "E226","Everest (India and Nepal)",6377301.243,6356100.228368 -"E227", "Everest (1830 Definition)", 6377299.365595, 6356098.359005, "EPSG:7042" -"E228", "Danish 1876", 6377019.270000, 6355762.539100 -"E229", "Bessel Namibia (GLM)", 6377483.865280, 6356165.383246 -"E230", "PZ-90", 6378136.000000, 6356751.361746 -"E231", "CGCS2000", 6378137.000000, 6356752.314140 -"E232", "IAG 1975", 6378140.000000, 6356755.288158 -"E233", "NWL 9D", 6378145.000000, 6356759.769489 -"E234", "Hughes 1980", 6378273.000000, 6356889.449000 -"E235", "Clarke 1880 (international foot)", 6378306.369600, 6356571.996000 -"E236", "Clarke 1866 Michigan", 6378450.047549, 6356826.621488 -"E237", "APL 4.5 (1968)", 6378144.000000, 6356757.338698 -"E238", "Airy (War Office)", 6377542.178, 6356235.764 -"E239", "Clarke 1858 (DIGEST)", 6378235.600, 6356560.140 -"E240", "Clarke 1880 (Palestine)", 6378300.782, 6356566.427 -"E241", "Clarke 1880 (Syria)", 6378247.842, 6356513.671 -"E242", "Clarke 1880 (Fiji)", 6378301.000, 6356566.548 -"E243", "Andrae", 6377104.430, 6355847.415 -"E244", "Delambre 1810", 6376985.228, 6356323.664 -"E245", "Delambre (Carte de France)", 6376985.000, 6356323.436 -"E246", "Germaine (Djibouti)", 6378284.000, 6356589.156 -"E247", "Hayford 1909", 6378388.000, 6356909.000 -"E248", "Krayenhoff 1827", 6376950.400, 6356356.341 -"E249", "Plessis Reconstituted", 6376523.994, 6355862.907 -"E250", "GRS 1967", 6378160.000, 6356774.516 -"E251", "Svanberg", 6376797.000, 6355837.971 -"E252", "Walbeck 1819 (Planheft 1942)", 6376895.000, 6355834.000 +"E227","Everest (1830 Definition)", 6377299.365595, 6356098.359005,"EPSG:7042" +"E228","Danish 1876", 6377019.270000, 6355762.539100 +"E229","Bessel Namibia (GLM)", 6377483.865280, 6356165.383246 +"E230","PZ-90", 6378136.000000, 6356751.361746 +"E231","CGCS2000", 6378137.000000, 6356752.314140 +"E232","IAG 1975", 6378140.000000, 6356755.288158 +"E233","NWL 9D", 6378145.000000, 6356759.769489 +"E234","Hughes 1980", 6378273.000000, 6356889.449000 +"E235","Clarke 1880 (international foot)", 6378306.369600, 6356571.996000 +"E236","Clarke 1866 Michigan", 6378450.047549, 6356826.621488 +"E237","APL 4.5 (1968)", 6378144.000000, 6356757.338698 +"E238","Airy (War Office)", 6377542.178, 6356235.764 +"E239","Clarke 1858 (DIGEST)", 6378235.600, 6356560.140 +"E240","Clarke 1880 (Palestine)", 6378300.782, 6356566.427 +"E241","Clarke 1880 (Syria)", 6378247.842, 6356513.671 +"E242","Clarke 1880 (Fiji)", 6378301.000, 6356566.548 +"E243","Andrae", 6377104.430, 6355847.415 +"E244","Delambre 1810", 6376985.228, 6356323.664 +"E245","Delambre (Carte de France)", 6376985.000, 6356323.436 +"E246","Germaine (Djibouti)", 6378284.000, 6356589.156 +"E247","Hayford 1909", 6378388.000, 6356909.000 +"E248","Krayenhoff 1827", 6376950.400, 6356356.341 +"E249","Plessis Reconstituted", 6376523.994, 6355862.907 +"E250","GRS 1967", 6378160.000, 6356774.516 +"E251","Svanberg", 6376797.000, 6355837.971 +"E252","Walbeck 1819 (Planheft 1942)", 6376895.000, 6355834.000 "E333","Bessel 1841 (Japan By Law)",6377397.155,6356078.963 "E600","D-PAF (Orbits)",6378144.0,6356759.0 "E601","Test Data Set 1",6378144.0,6356759.0 From 52bd7ca5451cde02e905179d435f04f799313a3c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 19:13:50 +0200 Subject: [PATCH 078/230] FlatGeobuf: CreateFeature(): error out if a string context is not valid UTF-8 (fixes #7458) --- ogr/ogrsf_frmts/flatgeobuf/ogrflatgeobuflayer.cpp | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ogr/ogrsf_frmts/flatgeobuf/ogrflatgeobuflayer.cpp b/ogr/ogrsf_frmts/flatgeobuf/ogrflatgeobuflayer.cpp index 1cabdd37294d..76cb1163610c 100644 --- a/ogr/ogrsf_frmts/flatgeobuf/ogrflatgeobuflayer.cpp +++ b/ogr/ogrsf_frmts/flatgeobuf/ogrflatgeobuflayer.cpp @@ -2175,6 +2175,15 @@ OGRErr OGRFlatGeobufLayer::ICreateFeature(OGRFeature *poNewFeature) "ICreateFeature: String too long"); return OGRERR_FAILURE; } + if (!CPLIsUTF8(field->String, static_cast<int>(len))) + { + CPLError(CE_Failure, CPLE_AppDefined, + "ICreateFeature: String '%s' is not a valid UTF-8 " + "string", + field->String); + return OGRERR_FAILURE; + } + // Valid cast since feature_max_buffer_size is 2 GB uint32_t l_le = static_cast<uint32_t>(len); CPL_LSBPTR32(&l_le); From 81647093d888573da3cf51ea01fd1a25abe2485c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 19:21:17 +0200 Subject: [PATCH 079/230] CI: remove disabled mingw_w64 configuration (fixes #6335) --- .github/workflows/mingw_w64.yml.disabled | 39 ------ .github/workflows/mingw_w64/install-python.sh | 65 ---------- .github/workflows/mingw_w64/start.sh | 122 ------------------ 3 files changed, 226 deletions(-) delete mode 100644 .github/workflows/mingw_w64.yml.disabled delete mode 100755 .github/workflows/mingw_w64/install-python.sh delete mode 100755 .github/workflows/mingw_w64/start.sh diff --git a/.github/workflows/mingw_w64.yml.disabled b/.github/workflows/mingw_w64.yml.disabled deleted file mode 100644 index bd304e667f73..000000000000 --- a/.github/workflows/mingw_w64.yml.disabled +++ /dev/null @@ -1,39 +0,0 @@ -name: mingw_w64 build - -on: - push: - paths-ignore: - - 'doc/**' - branches-ignore: - - 'backport**' - pull_request: - paths-ignore: - - 'doc/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - - mingw_w64_build: - runs-on: ubuntu-latest - if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Cache - uses: actions/cache@v3 - id: cache - with: - path: | - ${{ github.workspace }}/ccache.tar.gz - key: ${{ runner.os }}-cache-mingw_w64-${{ github.run_id }} - restore-keys: ${{ runner.os }}-cache-mingw_w64- - - - name: Build - run: docker run --privileged=true -e CI -e WORK_DIR="$PWD" -v $PWD:$PWD -v /var/run/docker.sock:/var/run/docker.sock ubuntu:18.04 $PWD/.github/workflows/mingw_w64/start.sh diff --git a/.github/workflows/mingw_w64/install-python.sh b/.github/workflows/mingw_w64/install-python.sh deleted file mode 100755 index cb609440d06a..000000000000 --- a/.github/workflows/mingw_w64/install-python.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash - -# From https://askubuntu.com/a/1200679 -# Licensed under https://creativecommons.org/licenses/by-sa/4.0/ - -set -e - -echo "---------------------------------" -echo "-------- setup wine prefix ------" -echo "---------------------------------" -# We need the developer version of wine. We need at least version 4.14 (see link). -# This is the earliest version I've seen reported to work with python3 well -# Without this, we'd have to run the embedded install of python which is riddled -# with annoying issues. - -# see: https://appdb.winehq.org/objectManager.php?sClass=version&iId=38187 - - -echo "------ Installing required apt packages ------" -apt update -apt install -y wget gnupg software-properties-common apt-utils --no-install-recommends - -echo "------ Add latest wine repo ------" -# Need at least wine 4.14 to install python 3.7 -dpkg --add-architecture i386 -wget -nc https://dl.winehq.org/wine-builds/winehq.key -apt-key add winehq.key -apt-add-repository 'deb https://dl.winehq.org/wine-builds/ubuntu/ bionic main' -apt update - -# Add repo for faudio package. Required for winedev -add-apt-repository -y ppa:cybermax-dexter/sdl2-backport - -echo "-------- Install wine-dev ------" - -apt install -y --no-install-recommends \ - winehq-devel=5.21~bionic \ - wine-devel=5.21~bionic \ - wine-devel-i386=5.21~bionic \ - wine-devel-amd64=5.21~bionic \ - winetricks \ - xvfb # This is for making a dummy X server display - -echo "------ Download python ------" -wget https://www.python.org/ftp/python/3.7.6/python-3.7.6-amd64.exe -#wget https://www.python.org/ftp/python/3.7.6/python-3.7.6.exe - -echo "------ Init wine prefix ------" -WINEPREFIX=~/.wine64 WINARCH=win64 winetricks \ - corefonts \ - win10 - -# Setup dummy screen -Xvfb :0 -screen 0 1024x768x16 & -jid=$! - -echo "------ Install python ------" -DISPLAY=:0.0 WINEPREFIX=~/.wine64 wine cmd /c \ - python-3.7.6-amd64.exe \ - /quiet \ - PrependPath=1 \ - && echo "Python Installation complete!" -# Display=:0.0 redirects wine graphical output to the dummy display. -# This is to avoid docker errors as the python installer requires a display, -# even when quiet install is specified. diff --git a/.github/workflows/mingw_w64/start.sh b/.github/workflows/mingw_w64/start.sh deleted file mode 100755 index c0cb5487f8ab..000000000000 --- a/.github/workflows/mingw_w64/start.sh +++ /dev/null @@ -1,122 +0,0 @@ -#!/bin/sh - -set -e - -SCRIPT_DIR=$(dirname "$0") -case $SCRIPT_DIR in - "/"*) - ;; - ".") - SCRIPT_DIR=$(pwd) - ;; - *) - SCRIPT_DIR=$(pwd)/$(dirname "$0") - ;; -esac - -# Emulate 'mingw_w64' Travis-CI target for the purpose of test skipping -TRAVIS=yes -export TRAVIS -TRAVIS_BRANCH=mingw_w64 -export TRAVIS_BRANCH - -apt-get update -y -DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - sudo wget tzdata - -USER=root -export USER - -cd "$WORK_DIR" - -if test -f "$WORK_DIR/ccache.tar.gz"; then - echo "Restoring ccache..." - (cd $HOME && tar xzf "$WORK_DIR/ccache.tar.gz") -fi - -# Install python -sh $SCRIPT_DIR/install-python.sh -export WINEPREFIX=$HOME/.wine64 - -sudo apt-get install -y --no-install-recommends \ - ccache automake cmake \ - binutils-mingw-w64-x86-64 \ - gcc-mingw-w64-x86-64 \ - g++-mingw-w64-x86-64 \ - g++-mingw-w64 \ - mingw-w64-tools \ - make sqlite3 \ - curl - -export CCACHE_CPP2=yes -export CC="ccache x86_64-w64-mingw32-gcc" -export CXX="ccache x86_64-w64-mingw32-g++" - -# Select posix/pthread for std::mutex -update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix -update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix - -ccache -M 1G -ccache -s - -ln -sf /usr/lib/gcc/x86_64-w64-mingw32/7.3-posix/libstdc++-6.dll $WINEPREFIX/drive_c/windows/ -ln -sf /usr/lib/gcc/x86_64-w64-mingw32/7.3-posix/libgcc_s_seh-1.dll $WINEPREFIX/drive_c/windows/ -ln -sf /usr/x86_64-w64-mingw32/lib/libwinpthread-1.dll $WINEPREFIX/drive_c/windows/ - -$SCRIPT_DIR/../common_install.sh - -# build sqlite3 -wget https://sqlite.org/2018/sqlite-autoconf-3250100.tar.gz -tar xzf sqlite-autoconf-3250100.tar.gz -(cd sqlite-autoconf-3250100 && CFLAGS="-O2 -DSQLITE_ENABLE_COLUMN_METADATA" ./configure --host=x86_64-w64-mingw32 --enable-rtree --prefix=/tmp/install && make -j$(nproc) && make install) - -# Build proj -(cd proj; ./autogen.sh && CFLAGS='-DPROJ_RENAME_SYMBOLS' CXXFLAGS='-DPROJ_RENAME_SYMBOLS' SQLITE3_CFLAGS='-I/tmp/install/include' SQLITE3_LIBS='-L/tmp/install/lib -lsqlite3' ./configure --disable-static --host=x86_64-w64-mingw32 --prefix=/tmp/install && make -j$(nproc)) -(cd proj; sudo make -j$(nproc) install) - -# build GDAL -mkdir build -cd build -cmake .. -DPROJ_INCLUDE_DIR=/tmp/install/include -DPROJ_LIBRARY=/tmp/install/lib/libproj.dll.a -DSQLITE3_INCLUDE_DIR=/tmp/install/include -DSQLITE3_LIBRARY=/tmp/install/lib/libsqlite3.dll.a -DCMAKE_C_FLAGS="-DPROJ_RENAME_SYMBOLS -Werror" -DCMAKE_CXX_FLAGS="-DPROJ_RENAME_SYMBOLS -Werror" -DBUILD_PYTHON_BINDINGS=OFF -DCMAKE_SYSTEM_NAME=Windows -DCMAKE_SYSTEM_VERSION=1 -DCMAKE_C_COMPILER=x86_64-w64-mingw32-gcc -DCMAKE_CXX_COMPILER=x86_64-w64-mingw32-g++ -DCMAKE_RC_COMPILER=86_64-w64-mingw32-windres -DCMAKE_RANLIB=x86_64-w64-mingw32-ranlib -DCMAKE_FIND_ROOT_PATH=/usr/x86_64-w64-mingw32 -DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER -DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY - -make -j$(nproc) -ln -sf $PWD/libgdal-*.dll $WINEPREFIX/drive_c/windows -ln -sf /tmp/install/bin/libproj-15.dll $WINEPREFIX/drive_c/windows -ln -sf /tmp/install/bin/libsqlite3-0.dll $WINEPREFIX/drive_c/windows -cd .. - -cd swig/python -ln -s "$WINEPREFIX/drive_c/users/root/Local Settings/Application Data/Programs/Python/Python37" $WINEPREFIX/drive_c/Python37 -gendef $WINEPREFIX/drive_c/Python37/python37.dll -x86_64-w64-mingw32-dlltool --dllname $WINEPREFIX/drive_c/Python37/python37.dll --input-def python37.def --output-lib $WINEPREFIX/drive_c/Python37/libs/libpython37.a -bash fallback_build_mingw32_under_unix_py37.sh -cd ../.. - -ccache -s - -echo "Saving ccache..." -rm -f "$WORK_DIR/ccache.tar.gz" -(cd $HOME && tar czf "$WORK_DIR/ccache.tar.gz" .ccache) - - -wine64 build/apps/gdalinfo.exe --version -rm -f swig/python/gdal-utils/osgeo_utils/samples/validate_gpkg.py # the sqlite3 lib of python lacks the rtree module -cd autotest -# Does not work under wine -rm -f gcore/rfc30.py -rm -f pyscripts/data/test_utf8* -rm -rf pyscripts/data/漢字 - -export PYTHON_DIR="$WINEPREFIX/drive_c/Python37" - -# install test dependencies -wine64 "$PYTHON_DIR/Scripts/pip.exe" install -U -r ./requirements.txt - -export PYTEST="wine64 $PYTHON_DIR/python.exe -m pytest -vv -p no:sugar --color=no" - - -# Run all the Python autotests -GDAL_DATA=$PWD/../data \ - PYTHONPATH=$PWD/../swig/python/build/lib.win-amd64-3.7 \ - PATH=$PWD/../gdal/build:$PWD/../build/apps:$PWD:$PATH \ - $PYTEST From c5db6df86a853c714813ec7faf8c6298d035a88e Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Thu, 18 Apr 2024 13:44:30 -0400 Subject: [PATCH 080/230] Python bindings: Avoid crash when accessing closed file --- autotest/gcore/vsifile.py | 25 ++++++++++++++++++++ swig/include/python/gdal_python.i | 38 +++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+) diff --git a/autotest/gcore/vsifile.py b/autotest/gcore/vsifile.py index 1054519c0cb7..adc724048e26 100755 --- a/autotest/gcore/vsifile.py +++ b/autotest/gcore/vsifile.py @@ -1316,3 +1316,28 @@ def test_vsifile_eof_cache_read(tmp_path): data = gdal.VSIFReadL(1, 75000, f) # reads past end of file gdal.VSIFCloseL(f) assert data == b"x" * 40000 + + +def test_vsifile_use_closed_file(tmp_path): + + f = gdal.VSIFOpenL(tmp_path / "file.txt", "wb") + assert gdal.VSIFWriteL("0123456789", 1, 10, f) == 10 + gdal.VSIFCloseL(f) + + with pytest.raises(ValueError, match="closed file"): + gdal.VSIFCloseL(f) + + with pytest.raises(ValueError, match="closed file"): + gdal.VSIFEofL(f) + + with pytest.raises(ValueError, match="closed file"): + gdal.VSIFSeekL(f, 0, 0) + + with pytest.raises(ValueError, match="closed file"): + gdal.VSIFTellL(f) + + with pytest.raises(ValueError, match="closed file"): + gdal.VSIFTruncateL(f, 0) + + with pytest.raises(ValueError, match="closed file"): + gdal.VSIFWriteL("0123456789", 1, 10, f) diff --git a/swig/include/python/gdal_python.i b/swig/include/python/gdal_python.i index e8713e76a783..c16c8b20563b 100644 --- a/swig/include/python/gdal_python.i +++ b/swig/include/python/gdal_python.i @@ -352,6 +352,44 @@ void wrapper_VSIGetMemFileBuffer(const char *utf8_path, GByte **out, vsi_l_offse %clear (GByte **out, vsi_l_offset *length); +%pythonappend VSIFCloseL %{ + args[0].this = None +%} + +%pythonprepend VSIFCloseL %{ + if args[0].this is None: + raise ValueError("I/O operation on closed file.") +%} + +%pythonprepend VSIFEofL %{ + if args[0].this is None: + raise ValueError("I/O operation on closed file.") +%} + +%pythonprepend VSIFFlushL %{ + if args[0].this is None: + raise ValueError("I/O operation on closed file.") +%} + +%pythonprepend VSIFSeekL %{ + if args[0].this is None: + raise ValueError("I/O operation on closed file.") +%} + +%pythonprepend VSIFTellL %{ + if args[0].this is None: + raise ValueError("I/O operation on closed file.") +%} + +%pythonprepend VSIFTruncateL %{ + if args[0].this is None: + raise ValueError("I/O operation on closed file.") +%} + +%pythonprepend wrapper_VSIFWriteL %{ + if args[3].this is None: + raise ValueError("I/O operation on closed file.") +%} /* -------------------------------------------------------------------- */ /* GDAL_GCP */ From 7fbffb5394919b203f900afdc0d27f49ff1ceaba Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 18:55:19 +0200 Subject: [PATCH 081/230] gdalwarp: allow passing a WKT geometry as -cutline value, and add -cutline_srs Fixes #7658 --- apps/gdalwarp_lib.cpp | 86 +++++++++++++++++++------ autotest/utilities/test_gdalwarp_lib.py | 44 ++++++++++++- doc/source/programs/gdalwarp.rst | 14 +++- swig/include/python/gdal_python.i | 14 +++- 4 files changed, 134 insertions(+), 24 deletions(-) diff --git a/apps/gdalwarp_lib.cpp b/apps/gdalwarp_lib.cpp index b409d2635da8..c2e82d772623 100644 --- a/apps/gdalwarp_lib.cpp +++ b/apps/gdalwarp_lib.cpp @@ -201,9 +201,13 @@ struct GDALWarpAppOptions ("NAME1=VALUE1","NAME2=VALUE2",...) */ CPLStringList aosTransformerOptions{}; - /*! enable use of a blend cutline from the name OGR support pszCutlineDSName + /*! enable use of a blend cutline from a vector dataset name or a WKT + * geometry */ - std::string osCutlineDSName{}; + std::string osCutlineDSNameOrWKT{}; + + /*! cutline SRS */ + std::string osCutlineSRS{}; /*! the named layer to be selected from the cutline datasource */ std::string osCLayer{}; @@ -250,11 +254,10 @@ struct GDALWarpAppOptions std::vector<int> anDstBands{}; }; -static CPLErr LoadCutline(const std::string &osCutlineDSName, - const std::string &oszCLayer, - const std::string &osCWHERE, - const std::string &osCSQL, - OGRGeometryH *phCutlineRet); +static CPLErr +LoadCutline(const std::string &osCutlineDSNameOrWKT, const std::string &osSRS, + const std::string &oszCLayer, const std::string &osCWHERE, + const std::string &osCSQL, OGRGeometryH *phCutlineRet); static CPLErr TransformCutlineToSource(GDALDataset *poSrcDS, OGRGeometry *poCutline, char ***ppapszWarpOptions, @@ -1277,7 +1280,8 @@ static GDALDatasetH GDALWarpIndirect(const char *pszDest, GDALDriverH hDriver, psOptions->dfMinY == 0 && psOptions->dfMaxX == 0 && psOptions->dfMaxY == 0 && psOptions->dfXRes == 0 && psOptions->dfYRes == 0 && psOptions->nForcePixels == 0 && - psOptions->nForceLines == 0 && psOptions->osCutlineDSName.empty() && + psOptions->nForceLines == 0 && + psOptions->osCutlineDSNameOrWKT.empty() && CanUseBuildVRT(nSrcCount, pahSrcDS)) { CPLStringList aosArgv; @@ -1649,10 +1653,11 @@ static bool ProcessCutlineOptions(int nSrcCount, GDALDatasetH *pahSrcDS, GDALWarpAppOptions *psOptions, OGRGeometryH &hCutline) { - if (!psOptions->osCutlineDSName.empty()) + if (!psOptions->osCutlineDSNameOrWKT.empty()) { CPLErr eError; - eError = LoadCutline(psOptions->osCutlineDSName, psOptions->osCLayer, + eError = LoadCutline(psOptions->osCutlineDSNameOrWKT, + psOptions->osCutlineSRS, psOptions->osCLayer, psOptions->osCWHERE, psOptions->osCSQL, &hCutline); if (eError == CE_Failure) { @@ -3325,21 +3330,40 @@ static bool ValidateCutline(const OGRGeometry *poGeom, bool bVerbose) /* Load blend cutline from OGR datasource. */ /************************************************************************/ -static CPLErr LoadCutline(const std::string &osCutlineDSName, - const std::string &osCLayer, +static CPLErr LoadCutline(const std::string &osCutlineDSNameOrWKT, + const std::string &osSRS, const std::string &osCLayer, const std::string &osCWHERE, const std::string &osCSQL, OGRGeometryH *phCutlineRet) { + if (STARTS_WITH_CI(osCutlineDSNameOrWKT.c_str(), "POLYGON(") || + STARTS_WITH_CI(osCutlineDSNameOrWKT.c_str(), "POLYGON (") || + STARTS_WITH_CI(osCutlineDSNameOrWKT.c_str(), "MULTIPOLYGON(") || + STARTS_WITH_CI(osCutlineDSNameOrWKT.c_str(), "MULTIPOLYGON (")) + { + std::unique_ptr<OGRSpatialReference, OGRSpatialReferenceReleaser> poSRS; + if (!osSRS.empty()) + { + poSRS.reset(new OGRSpatialReference()); + poSRS->SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); + poSRS->SetFromUserInput(osSRS.c_str()); + } + OGRGeometry *poGeom = nullptr; + OGRGeometryFactory::createFromWkt(osCutlineDSNameOrWKT.c_str(), + poSRS.get(), &poGeom); + *phCutlineRet = OGRGeometry::ToHandle(poGeom); + return *phCutlineRet ? CE_None : CE_Failure; + } + /* -------------------------------------------------------------------- */ /* Open source vector dataset. */ /* -------------------------------------------------------------------- */ auto poDS = std::unique_ptr<GDALDataset>( - GDALDataset::Open(osCutlineDSName.c_str(), GDAL_OF_VECTOR)); + GDALDataset::Open(osCutlineDSNameOrWKT.c_str(), GDAL_OF_VECTOR)); if (poDS == nullptr) { CPLError(CE_Failure, CPLE_AppDefined, "Cannot open %s.", - osCutlineDSName.c_str()); + osCutlineDSNameOrWKT.c_str()); return CE_Failure; } @@ -3412,7 +3436,18 @@ static CPLErr LoadCutline(const std::string &osCutlineDSName, /* -------------------------------------------------------------------- */ /* Ensure the coordinate system gets set on the geometry. */ /* -------------------------------------------------------------------- */ - poMultiPolygon->assignSpatialReference(poLayer->GetSpatialRef()); + if (!osSRS.empty()) + { + std::unique_ptr<OGRSpatialReference, OGRSpatialReferenceReleaser> poSRS( + new OGRSpatialReference()); + poSRS->SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); + poSRS->SetFromUserInput(osSRS.c_str()); + poMultiPolygon->assignSpatialReference(poSRS.get()); + } + else + { + poMultiPolygon->assignSpatialReference(poLayer->GetSpatialRef()); + } *phCutlineRet = OGRGeometry::ToHandle(poMultiPolygon.release()); @@ -5792,10 +5827,23 @@ GDALWarpAppOptionsGetParser(GDALWarpAppOptions *psOptions, .help(_("Mode resampling.")); argParser->add_argument("-cutline") - .metavar("<datasource>") - .store_into(psOptions->osCutlineDSName) - .help(_("Enable use of a blend cutline from the name OGR support " - "datasource.")); + .metavar("<datasource>|<WKT>") + .store_into(psOptions->osCutlineDSNameOrWKT) + .help(_("Enable use of a blend cutline from the name of a vector " + "dataset or a WKT geometry.")); + + argParser->add_argument("-cutline_srs") + .metavar("<srs_def>") + .action( + [psOptions](const std::string &s) + { + if (!IsValidSRS(s.c_str())) + { + throw std::invalid_argument("Invalid SRS for -cutline_srs"); + } + psOptions->osCutlineSRS = s; + }) + .help(_("Sets/overrides cutline SRS.")); argParser->add_argument("-cwhere") .metavar("<expression>") diff --git a/autotest/utilities/test_gdalwarp_lib.py b/autotest/utilities/test_gdalwarp_lib.py index 5a3e6b14d71e..c7b57d2e066c 100755 --- a/autotest/utilities/test_gdalwarp_lib.py +++ b/autotest/utilities/test_gdalwarp_lib.py @@ -455,7 +455,7 @@ def test_gdalwarp_lib_19(testgdalwarp_gcp_tif): @pytest.mark.require_driver("CSV") -def test_gdalwarp_lib_21(): +def test_gdalwarp_lib_cutline(): ds = gdal.Warp( "", @@ -471,6 +471,48 @@ def test_gdalwarp_lib_21(): ds = None +############################################################################### +# Test cutline from OGR datasource with cutlineSRS + + +@pytest.mark.require_driver("CSV") +def test_gdalwarp_lib_cutline_with_cutline_srs(): + + ds = gdal.Warp( + "", + "../gcore/data/utmsmall.tif", + format="MEM", + cutlineDSName="data/cutline.csv", + cutlineLayer="cutline", + cutlineSRS="EPSG:26711", + ) + assert ds is not None + + assert ds.GetRasterBand(1).Checksum() == 19139, "Bad checksum" + + ds = None + + +############################################################################### +# Test cutline from WKT + + +def test_gdalwarp_lib_cutline_WKT(): + + ds = gdal.Warp( + "", + "../gcore/data/utmsmall.tif", + format="MEM", + cutlineWKT="POLYGON ((445125 3748212,442222 3748212,442222 3750366,445125 3750366,445125 3748212))", + cutlineSRS="EPSG:26711", + ) + assert ds is not None + + assert ds.GetRasterBand(1).Checksum() == 19139, "Bad checksum" + + ds = None + + ############################################################################### # Test cutline with sourceCRS != targetCRS and targetCRS == cutlineCRS diff --git a/doc/source/programs/gdalwarp.rst b/doc/source/programs/gdalwarp.rst index f7bd97d69458..639fa780d9be 100644 --- a/doc/source/programs/gdalwarp.rst +++ b/doc/source/programs/gdalwarp.rst @@ -30,7 +30,7 @@ Synopsis [-order <1|2|3>] [-refine_gcps <tolerance> [<minimum_gcps>]] [-to <NAME>=<VALUE>]... [-et <err_threshold>] [-wm <memory_in_mb>] [-srcnodata <value>[ <value>...]] [-dstnodata <value>[ <value>...]] [-tap] [-wt Byte|Int8|[U]Int{16|32|64}|CInt{16|32}|[C]Float{32|64}] - [-cutline <datasource>] [-cwhere <expression>] + [-cutline <datasource>|<WKT>] [-cutline_srs <srs_def>] [-cwhere <expression>] [[-cl <layername>]|[-csql <query>]] [-cblend <distance>] [-crop_to_cutline] [-nomd] [-cvmd <meta_conflict_value>] [-setci] [-oo <NAME>=<VALUE>]... [-doo <NAME>=<VALUE>]... [-ovr <level>|AUTO|AUTO-<n>|NONE] @@ -402,9 +402,17 @@ with control information. .. include:: options/co.rst -.. option:: -cutline <datasource> +.. option:: -cutline <datasource>|<WKT> - Enable use of a blend cutline from the name OGR support datasource. + Enable use of a blend cutline from the name of a vector dataset. + Starting with GDAL 3.9, a WKT geometry string starting with POLYGON or + MULTIPOLYGON can also be specified. + +.. option:: -cutline_srs <srs_def> + + .. versionadded:: 3.9 + + Sets or overrides the SRS of the cutline. .. option:: -cl <layername> diff --git a/swig/include/python/gdal_python.i b/swig/include/python/gdal_python.i index e8713e76a783..844c5bbbaf2d 100644 --- a/swig/include/python/gdal_python.i +++ b/swig/include/python/gdal_python.i @@ -2588,6 +2588,8 @@ def WarpOptions(options=None, format=None, srcNodata=None, dstNodata=None, multithread = False, tps = False, rpc = False, geoloc = False, polynomialOrder=None, transformerOptions=None, cutlineDSName=None, + cutlineWKT=None, + cutlineSRS=None, cutlineLayer=None, cutlineWhere=None, cutlineSQL=None, cutlineBlend=None, cropToCutline = False, copyMetadata = True, metadataConflictValue=None, setColorInterpretation = False, @@ -2661,7 +2663,11 @@ def WarpOptions(options=None, format=None, transformerOptions: list or dict of transformer options cutlineDSName: - cutline dataset name + cutline dataset name (mutually exclusive with cutlineDSName) + cutlineWKT: + cutline WKT geometry (POLYGON or MULTIPOLYGON) (mutually exclusive with cutlineWKT) + cutlineSRS: + set/override cutline SRS cutlineLayer: cutline layer name cutlineWhere: @@ -2795,7 +2801,13 @@ def WarpOptions(options=None, format=None, for opt in transformerOptions: new_options += ['-to', opt] if cutlineDSName is not None: + if cutlineWKT is not None: + raise Exception("cutlineDSName and cutlineWKT are mutually exclusive") new_options += ['-cutline', str(cutlineDSName)] + if cutlineWKT is not None: + new_options += ['-cutline', str(cutlineWKT)] + if cutlineSRS is not None: + new_options += ['-cutline_srs', str(cutlineSRS)] if cutlineLayer is not None: new_options += ['-cl', str(cutlineLayer)] if cutlineWhere is not None: From 3a8d3e00c8046565abd7039fe35cb6073ed05392 Mon Sep 17 00:00:00 2001 From: Daniel Baston <dbaston@gmail.com> Date: Thu, 18 Apr 2024 15:12:34 -0400 Subject: [PATCH 082/230] autotest vsiadls.py: use module scope for setup/cleanup --- autotest/gcore/vsiadls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autotest/gcore/vsiadls.py b/autotest/gcore/vsiadls.py index d816378ac009..bceeed06175d 100755 --- a/autotest/gcore/vsiadls.py +++ b/autotest/gcore/vsiadls.py @@ -53,7 +53,7 @@ def open_for_read(uri): ############################################################################### -@pytest.fixture(autouse=True, scope="function") +@pytest.fixture(autouse=True, scope="module") def startup_and_cleanup(): with gdaltest.config_option("CPL_AZURE_VM_API_ROOT_URL", "disabled"): From f107f86697200b05c7e9208ee0ef9883a156fa7f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 18 Apr 2024 21:29:01 +0200 Subject: [PATCH 083/230] ogrinfo: add -limit <nb_features> Fixes https://github.com/OSGeo/gdal/issues/3413 --- apps/ogrinfo_lib.cpp | 17 ++ autotest/utilities/test_ogrinfo_lib.py | 373 +------------------------ doc/source/programs/ogrinfo.rst | 8 +- swig/include/python/gdal_python.i | 5 + 4 files changed, 31 insertions(+), 372 deletions(-) diff --git a/apps/ogrinfo_lib.cpp b/apps/ogrinfo_lib.cpp index 9a2b36e31ec4..397a9c3d9d23 100644 --- a/apps/ogrinfo_lib.cpp +++ b/apps/ogrinfo_lib.cpp @@ -78,6 +78,9 @@ struct GDALVectorInfoOptions bool bStdoutOutput = false; // only set by ogrinfo_bin int nRepeatCount = 1; + /*! Maximum number of features, or -1 if no limit. */ + GIntBig nLimit = -1; + // Only used during argument parsing bool bSummaryParser = false; bool bFeaturesParser = false; @@ -1465,8 +1468,16 @@ static void ReportOnLayer(CPLString &osRet, CPLJSONObject &oLayer, : 0; if (bJson) oLayer.Add("features", oFeatures); + GIntBig nFeatureCount = 0; for (auto &poFeature : poLayer) { + if (psOptions->nLimit >= 0 && + nFeatureCount >= psOptions->nLimit) + { + break; + } + ++nFeatureCount; + if (bJson) { CPLJSONObject oFeature; @@ -2315,6 +2326,12 @@ static std::unique_ptr<GDALArgumentParser> GDALVectorInfoOptionsGetParser( .help(_("Enable listing of features")); } + argParser->add_argument("-limit") + .metavar("<nb_features>") + .action([psOptions](const std::string &s) + { psOptions->nLimit = CPLAtoGIntBig(s.c_str()); }) + .help(_("Limit the number of features per layer.")); + argParser->add_argument("-fields") .choices("YES", "NO") .metavar("YES|NO") diff --git a/autotest/utilities/test_ogrinfo_lib.py b/autotest/utilities/test_ogrinfo_lib.py index 632cd76edbf1..e46db5212df0 100755 --- a/autotest/utilities/test_ogrinfo_lib.py +++ b/autotest/utilities/test_ogrinfo_lib.py @@ -134,7 +134,7 @@ def test_ogrinfo_lib_json_features(): ds = gdal.OpenEx("../ogr/data/poly.shp") - ret = gdal.VectorInfo(ds, format="json", dumpFeatures=True) + ret = gdal.VectorInfo(ds, format="json", dumpFeatures=True, limit=1) del ret["description"] del ret["layers"][0]["geometryFields"][0]["coordinateSystem"]["wkt"] if "projjson" in ret["layers"][0]["geometryFields"][0]["coordinateSystem"]: @@ -220,376 +220,7 @@ def test_ogrinfo_lib_json_features(): ] ], }, - }, - { - "type": "Feature", - "properties": { - "AREA": 247328.172, - "EAS_ID": 179, - "PRFEDEA": "35043423", - }, - "fid": 1, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [480035.34375, 4765558.5], - [480039.03125, 4765539.5], - [479730.375, 4765400.5], - [479647.0, 4765369.5], - [479690.1875, 4765259.5], - [479819.84375, 4765180.5], - [479779.84375, 4765109.5], - [479681.78125, 4764940.0], - [479468.0, 4764942.5], - [479411.4375, 4764940.5], - [479353.0, 4764939.5], - [479208.65625, 4764882.5], - [479196.8125, 4764879.0], - [479123.28125, 4765015.0], - [479046.53125, 4765117.0], - [479029.71875, 4765110.5], - [479014.9375, 4765147.5], - [479149.9375, 4765200.5], - [479639.625, 4765399.5], - [480035.34375, 4765558.5], - ] - ], - }, - }, - { - "type": "Feature", - "properties": { - "AREA": 261752.781, - "EAS_ID": 171, - "PRFEDEA": "35043414", - }, - "fid": 2, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [479819.84375, 4765180.5], - [479859.875, 4765270.0], - [479909.875, 4765370.0], - [479980.21875, 4765409.5], - [480019.71875, 4765319.5], - [480059.90625, 4765239.5], - [480088.8125, 4765139.5], - [480082.96875, 4765049.5], - [480000.28125, 4765043.0], - [479934.96875, 4765020.0], - [479895.125, 4765000.0], - [479734.375, 4764865.0], - [479680.28125, 4764852.0], - [479644.78125, 4764827.5], - [479637.875, 4764803.0], - [479617.21875, 4764760.0], - [479587.28125, 4764718.0], - [479548.03125, 4764693.5], - [479504.90625, 4764609.5], - [479239.8125, 4764505.0], - [479117.8125, 4764847.0], - [479196.8125, 4764879.0], - [479208.65625, 4764882.5], - [479353.0, 4764939.5], - [479411.4375, 4764940.5], - [479468.0, 4764942.5], - [479681.78125, 4764940.0], - [479779.84375, 4765109.5], - [479819.84375, 4765180.5], - ] - ], - }, - }, - { - "type": "Feature", - "properties": { - "AREA": 547597.188, - "EAS_ID": 173, - "PRFEDEA": "35043416", - }, - "fid": 3, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [479014.9375, 4765147.5], - [479029.71875, 4765110.5], - [479117.8125, 4764847.0], - [479239.8125, 4764505.0], - [479305.875, 4764361.0], - [479256.03125, 4764314.5], - [479220.90625, 4764212.5], - [479114.5, 4764174.0], - [479018.28125, 4764418.5], - [478896.9375, 4764371.0], - [478748.8125, 4764308.5], - [478503.03125, 4764218.0], - [478461.75, 4764337.5], - [478443.9375, 4764400.5], - [478447.8125, 4764454.0], - [478448.6875, 4764531.5], - [478502.1875, 4764541.5], - [478683.0, 4764730.5], - [478621.03125, 4764788.5], - [478597.34375, 4764766.5], - [478532.5, 4764695.5], - [478460.125, 4764615.0], - [478408.0625, 4764654.0], - [478315.53125, 4764876.0], - [478889.25, 4765100.0], - [479014.9375, 4765147.5], - ] - ], - }, - }, - { - "type": "Feature", - "properties": { - "AREA": 15775.758, - "EAS_ID": 172, - "PRFEDEA": "35043415", - }, - "fid": 4, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [479029.71875, 4765110.5], - [479046.53125, 4765117.0], - [479123.28125, 4765015.0], - [479196.8125, 4764879.0], - [479117.8125, 4764847.0], - [479029.71875, 4765110.5], - ] - ], - }, - }, - { - "type": "Feature", - "properties": { - "AREA": 101429.977, - "EAS_ID": 169, - "PRFEDEA": "35043412", - }, - "fid": 5, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [480082.96875, 4765049.5], - [480080.28125, 4764979.5], - [480133.96875, 4764856.5], - [479968.46875, 4764788.0], - [479750.6875, 4764702.0], - [479735.90625, 4764752.0], - [479640.09375, 4764721.0], - [479658.59375, 4764670.0], - [479504.90625, 4764609.5], - [479548.03125, 4764693.5], - [479587.28125, 4764718.0], - [479617.21875, 4764760.0], - [479637.875, 4764803.0], - [479644.78125, 4764827.5], - [479680.28125, 4764852.0], - [479734.375, 4764865.0], - [479895.125, 4765000.0], - [479934.96875, 4765020.0], - [480000.28125, 4765043.0], - [480082.96875, 4765049.5], - ] - ], - }, - }, - { - "type": "Feature", - "properties": { - "AREA": 268597.625, - "EAS_ID": 166, - "PRFEDEA": "35043409", - }, - "fid": 6, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [480389.6875, 4764950.0], - [480537.15625, 4765014.0], - [480567.96875, 4764918.0], - [480605.0, 4764835.0], - [480701.0625, 4764738.0], - [480710.25, 4764690.5], - [480588.59375, 4764740.5], - [480540.71875, 4764741.0], - [480515.125, 4764695.0], - [480731.65625, 4764561.5], - [480692.1875, 4764453.5], - [480677.84375, 4764439.0], - [480655.34375, 4764397.5], - [480584.375, 4764353.0], - [480500.40625, 4764326.5], - [480358.53125, 4764277.0], - [480192.3125, 4764183.0], - [480157.125, 4764266.5], - [480234.3125, 4764304.0], - [480289.125, 4764348.5], - [480316.0, 4764395.0], - [480343.5625, 4764477.0], - [480343.71875, 4764532.5], - [480258.03125, 4764767.0], - [480177.15625, 4764742.0], - [480093.75, 4764703.0], - [480011.0, 4764674.5], - [479985.0625, 4764732.0], - [479968.46875, 4764788.0], - [480133.96875, 4764856.5], - [480389.6875, 4764950.0], - ] - ], - }, - }, - { - "type": "Feature", - "properties": { - "AREA": 1634833.375, - "EAS_ID": 158, - "PRFEDEA": "35043369", - }, - "fid": 7, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [480701.0625, 4764738.0], - [480761.46875, 4764778.0], - [480824.96875, 4764820.0], - [480922.03125, 4764850.5], - [480930.71875, 4764852.0], - [480984.25, 4764875.0], - [481088.1875, 4764936.0], - [481136.84375, 4764994.5], - [481281.3125, 4764876.5], - [481291.09375, 4764810.0], - [481465.90625, 4764872.5], - [481457.375, 4764937.0], - [481509.65625, 4764967.0], - [481538.90625, 4764982.5], - [481575.0, 4764999.5], - [481602.125, 4764915.5], - [481629.84375, 4764829.5], - [481645.3125, 4764797.5], - [481635.96875, 4764795.5], - [481235.3125, 4764650.0], - [481209.8125, 4764633.5], - [481199.21875, 4764623.5], - [481185.5, 4764607.0], - [481159.9375, 4764580.0], - [481140.46875, 4764510.5], - [481141.625, 4764480.5], - [481199.84375, 4764180.0], - [481143.4375, 4764010.5], - [481130.3125, 4763979.5], - [481039.9375, 4763889.5], - [480882.6875, 4763670.0], - [480826.0625, 4763650.5], - [480745.1875, 4763628.5], - [480654.4375, 4763627.5], - [480599.8125, 4763660.0], - [480281.9375, 4763576.5], - [480221.5, 4763533.5], - [480199.6875, 4763509.0], - [480195.09375, 4763430.0], - [480273.6875, 4763305.5], - [480309.6875, 4763063.5], - [480201.84375, 4762962.5], - [479855.3125, 4762880.5], - [479848.53125, 4762897.0], - [479728.875, 4763217.5], - [479492.6875, 4763850.0], - [479550.0625, 4763919.5], - [480120.21875, 4764188.5], - [480192.3125, 4764183.0], - [480358.53125, 4764277.0], - [480500.40625, 4764326.5], - [480584.375, 4764353.0], - [480655.34375, 4764397.5], - [480677.84375, 4764439.0], - [480692.1875, 4764453.5], - [480731.65625, 4764561.5], - [480515.125, 4764695.0], - [480540.71875, 4764741.0], - [480588.59375, 4764740.5], - [480710.25, 4764690.5], - [480701.0625, 4764738.0], - ] - ], - }, - }, - { - "type": "Feature", - "properties": { - "AREA": -596610.313, - "EAS_ID": 165, - "PRFEDEA": "35043408", - }, - "fid": 8, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [479750.6875, 4764702.0], - [479968.46875, 4764788.0], - [479985.0625, 4764732.0], - [480011.0, 4764674.5], - [480093.75, 4764703.0], - [480177.15625, 4764742.0], - [480258.03125, 4764767.0], - [480343.71875, 4764532.5], - [480343.5625, 4764477.0], - [480316.0, 4764395.0], - [480289.125, 4764348.5], - [480234.3125, 4764304.0], - [480157.125, 4764266.5], - [480192.3125, 4764183.0], - [480120.21875, 4764188.5], - [479550.0625, 4763919.5], - [479492.6875, 4763850.0], - [479487.75, 4763864.5], - [479442.75, 4763990.0], - [479436.0, 4764023.0], - [479398.9375, 4764100.0], - [479349.625, 4764230.0], - [479305.875, 4764361.0], - [479239.8125, 4764505.0], - [479504.90625, 4764609.5], - [479658.59375, 4764670.0], - [479750.6875, 4764702.0], - ] - ], - }, - }, - { - "type": "Feature", - "properties": { - "AREA": 5268.813, - "EAS_ID": 170, - "PRFEDEA": "35043413", - }, - "fid": 9, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [479750.6875, 4764702.0], - [479658.59375, 4764670.0], - [479640.09375, 4764721.0], - [479735.90625, 4764752.0], - [479750.6875, 4764702.0], - ] - ], - }, - }, + } ], } ], diff --git a/doc/source/programs/ogrinfo.rst b/doc/source/programs/ogrinfo.rst index 2acc951539c3..e2498095e3d4 100644 --- a/doc/source/programs/ogrinfo.rst +++ b/doc/source/programs/ogrinfo.rst @@ -20,7 +20,7 @@ Synopsis [-if <driver_name>] [-json] [-ro] [-q] [-where <restricted_where>|@f<ilename>] [-spat <xmin> <ymin> <xmax> <ymax>] [-geomfield <field>] [-fid <fid>] [-sql <statement>|@<filename>] [-dialect <sql_dialect>] [-al] [-rl] - [-so|-features] [-fields={YES|NO}]] + [-so|-features] [-limit <nb_features>] [-fields={YES|NO}]] [-geom={YES|NO|SUMMARY|WKT|ISO_WKT}] [-oo <NAME>=<VALUE>]... [-nomd] [-listmdd] [-mdd <domain>|all]... [-nocount] [-nogeomtype] [[-noextent] | [-extent3D]] @@ -88,6 +88,12 @@ edit data. .. versionadded:: 3.7 +.. option:: -limit <nb_features> + + .. versionadded:: 3.9 + + Limit the number of features per layer. + .. option:: -q Quiet verbose reporting of various information, including coordinate diff --git a/swig/include/python/gdal_python.i b/swig/include/python/gdal_python.i index e8713e76a783..1e3933460ecb 100644 --- a/swig/include/python/gdal_python.i +++ b/swig/include/python/gdal_python.i @@ -2148,6 +2148,7 @@ def VectorInfoOptions(options=None, deserialize=True, layers=None, dumpFeatures=False, + limit=None, featureCount=True, extent=True, SQLStatement=None, @@ -2179,6 +2180,8 @@ def VectorInfoOptions(options=None, whether to compute and display the layer extent. Can also be set to the string '3D' to request a 3D extent dumpFeatures: set to True to get the dump of all features + limit: + maximum number of features to read per layer """ options = [] if options is None else options @@ -2224,6 +2227,8 @@ def VectorInfoOptions(options=None, else: if not dumpFeatures: new_options += ["-so"] + if limit: + new_options += ["-limit", str(limit)] return (GDALVectorInfoOptions(new_options), format, deserialize) From 700bcd1c4013507e94c7131b8efb8e90475486df Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 00:47:02 +0200 Subject: [PATCH 084/230] Revert "GPKG: change default value of OGR_GPKG_ALLOW_THREADED_RTREE to NO on OSX Arm64" This reverts commit fc81e0f25f278883356f38caecab3a83db5e153c. --- ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp index 5bc10e338233..489fca4c40f2 100644 --- a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp +++ b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp @@ -2644,19 +2644,8 @@ void OGRGeoPackageTableLayer::SetDeferredSpatialIndexCreation(bool bFlag) m_bAllowedRTreeThread = m_poDS->GetLayerCount() == 0 && sqlite3_threadsafe() != 0 && CPLGetNumCPUs() >= 2 && - CPLTestBool(CPLGetConfigOption("OGR_GPKG_ALLOW_THREADED_RTREE", - // For a not yet understood reason, threaded RTree building - // (randomly?) fails on OSX Arm64. This may not be at all specific - // to that platform, but a more general problem, but it can't be - // reproduced elsewhere. - // Cf https://gis.stackexchange.com/questions/479958/how-to-fix-failed-to-prepare-sql-error-when-creating-gpkg-file-from-osm-extrac/479964#479964 - // and random (frequent) failures on GDAL CI (https://github.com/OSGeo/gdal/commit/a83942422fd67471aee23ae11c5d06af27db2857) -#if defined(__arm64__) && defined(__APPLE__) - "NO" -#else - "YES" -#endif - )); + CPLTestBool( + CPLGetConfigOption("OGR_GPKG_ALLOW_THREADED_RTREE", "YES")); // For unit tests if (CPLTestBool(CPLGetConfigOption( From b71b0803ab965dd844fe08c7f66c49b2578cb59f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 00:49:57 +0200 Subject: [PATCH 085/230] GPKG: fix subtle threading issue that caused random failure in threaded RTree creation, in particular on MacOSX ARM64 --- ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp index 489fca4c40f2..57ffc309ccf0 100644 --- a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp +++ b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp @@ -2727,13 +2727,12 @@ void OGRGeoPackageTableLayer::StartAsyncRTree() if (eErr == OGRERR_NONE) { + m_hRTree = gdal_sqlite_rtree_bl_new(4096); try { m_oThreadRTree = std::thread([this]() { AsyncRTreeThreadFunction(); }); m_bThreadRTreeStarted = true; - - m_hRTree = gdal_sqlite_rtree_bl_new(4096); } catch (const std::exception &e) { @@ -2745,6 +2744,11 @@ void OGRGeoPackageTableLayer::StartAsyncRTree() if (!m_bThreadRTreeStarted) { + if (m_hRTree) + { + gdal_sqlite_rtree_bl_free(m_hRTree); + m_hRTree = nullptr; + } m_oQueueRTreeEntries.clear(); m_bErrorDuringRTreeThread = true; sqlite3_close(m_hAsyncDBHandle); @@ -2886,6 +2890,8 @@ static size_t GetMaxRAMUsageAllowedForRTree() void OGRGeoPackageTableLayer::AsyncRTreeThreadFunction() { + CPLAssert(m_hRTree); + const size_t nMaxRAMUsageAllowed = GetMaxRAMUsageAllowedForRTree(); sqlite3_stmt *hStmt = nullptr; GIntBig nCount = 0; From 1ecd8e1bcc5fc5a0a17090144bebfede4f4639eb Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 01:26:26 +0200 Subject: [PATCH 086/230] ogr_parquet.py: mark 2 tests as requiring GEOS --- autotest/ogr/ogr_parquet.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/autotest/ogr/ogr_parquet.py b/autotest/ogr/ogr_parquet.py index 68bcdcea5250..55fd9fb7be5b 100755 --- a/autotest/ogr/ogr_parquet.py +++ b/autotest/ogr/ogr_parquet.py @@ -2981,6 +2981,7 @@ def test_ogr_parquet_nested_types(): # Test float32 bounding box column +@pytest.mark.require_geos def test_ogr_parquet_bbox_float32(tmp_vsimem): outfilename = str(tmp_vsimem / "test_ogr_parquet_bbox_float32.parquet") @@ -3124,6 +3125,7 @@ def test_ogr_parquet_bbox_double(): # as in Overture Maps datasets 2024-04-16-beta.0 +@pytest.mark.require_geos def test_ogr_parquet_bbox_float32_but_no_covering_in_metadata(): ds = ogr.Open("data/parquet/bbox_similar_to_overturemaps_2024-04-16-beta.0.parquet") From 881de5a484598b174684a6210b3a5945e1db4b33 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 02:18:07 +0200 Subject: [PATCH 087/230] Make GDALSuggestedWarpOutput2() more robust to fix issue in test_gdalwarp_lib_ortho_to_long_lat() on MacOS ARM64 --- alg/gdaltransformer.cpp | 4 +++- autotest/utilities/test_gdalwarp_lib.py | 2 -- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/alg/gdaltransformer.cpp b/alg/gdaltransformer.cpp index 4912ab7871de..e8e826eb6e8e 100644 --- a/alg/gdaltransformer.cpp +++ b/alg/gdaltransformer.cpp @@ -1014,7 +1014,9 @@ CPLErr CPL_STDCALL GDALSuggestedWarpOutput2(GDALDatasetH hSrcDS, const auto invGT = pGIPTI->adfSrcInvGeoTransform; const double x = invGT[0] + X * invGT[1] + Y * invGT[2]; const double y = invGT[3] + X * invGT[4] + Y * invGT[5]; - if (x >= 0 && x <= nInXSize && y >= 0 && y <= nInYSize) + constexpr double EPSILON = 1e-5; + if (x >= -EPSILON && x <= nInXSize + EPSILON && + y >= -EPSILON && y <= nInYSize + EPSILON) { if (psRTI->poForwardTransform->Transform(1, &X, &Y) && diff --git a/autotest/utilities/test_gdalwarp_lib.py b/autotest/utilities/test_gdalwarp_lib.py index 5a3e6b14d71e..6697ab651ef5 100755 --- a/autotest/utilities/test_gdalwarp_lib.py +++ b/autotest/utilities/test_gdalwarp_lib.py @@ -3991,8 +3991,6 @@ def test_gdalwarp_lib_ortho_to_long_lat(): data2[j * ds.RasterXSize], data3[j * ds.RasterXSize], ) - if max_val == 0 and gdaltest.is_travis_branch("macos_build_conda"): - pytest.xfail("fails for unknown reason on MacOS ARM64") assert max_val != 0, "line %d" % j From 4da2bfca8db85b9975acb06981c8b31231b72ccd Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 13:03:57 +0200 Subject: [PATCH 088/230] netCDF: fix writing of metadata items whose value has an equal sign Fixes #9702 --- autotest/gdrivers/netcdf.py | 16 +++ frmts/netcdf/netcdfdataset.cpp | 193 ++++++++++++++++----------------- 2 files changed, 107 insertions(+), 102 deletions(-) diff --git a/autotest/gdrivers/netcdf.py b/autotest/gdrivers/netcdf.py index c26d620cbc04..07e6ca616fb4 100755 --- a/autotest/gdrivers/netcdf.py +++ b/autotest/gdrivers/netcdf.py @@ -6513,3 +6513,19 @@ def test_band_names_creation_option(tmp_path): assert gdal.GetSubdatasetInfo(sds_names[0]).GetSubdatasetComponent() == "t2m" assert gdal.GetSubdatasetInfo(sds_names[1]).GetSubdatasetComponent() == "prate" + + +@gdaltest.enable_exceptions() +def test_netcdf_create_metadata_with_equal_sign(tmp_path): + + fname = tmp_path / "test_netcdf_create_metadata_with_equal_sign.nc" + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1) + value = "x" * 1000 + "=y" + src_ds.SetMetadataItem("my_var#long_name", value) + src_ds.GetRasterBand(1).SetMetadataItem("NETCDF_VARNAME", "my_var") + + gdal.Translate(fname, src_ds) + + ds = gdal.Open(fname) + assert ds.GetRasterBand(1).GetMetadataItem("long_name") == value diff --git a/frmts/netcdf/netcdfdataset.cpp b/frmts/netcdf/netcdfdataset.cpp index 067ca209634b..92eb9316e180 100644 --- a/frmts/netcdf/netcdfdataset.cpp +++ b/frmts/netcdf/netcdfdataset.cpp @@ -8948,15 +8948,13 @@ static void CopyMetadata(GDALDataset *poSrcDS, GDALRasterBand *poSrcBand, GDALRasterBand *poDstBand, int nCdfId, int CDFVarID, const char *pszPrefix) { - char **papszFieldData = nullptr; - // Remove the following band meta but set them later from band data. const char *const papszIgnoreBand[] = { CF_ADD_OFFSET, CF_SCALE_FACTOR, "valid_range", "_Unsigned", _FillValue, "coordinates", nullptr}; const char *const papszIgnoreGlobal[] = {"NETCDF_DIM_EXTRA", nullptr}; - char **papszMetadata = nullptr; + CSLConstList papszMetadata = nullptr; if (poSrcDS) { papszMetadata = poSrcDS->GetMetadata(); @@ -8966,108 +8964,94 @@ static void CopyMetadata(GDALDataset *poSrcDS, GDALRasterBand *poSrcBand, papszMetadata = poSrcBand->GetMetadata(); } - const int nItems = CSLCount(papszMetadata); - - for (int k = 0; k < nItems; k++) + for (const auto &[pszKey, pszValue] : cpl::IterateNameValue(papszMetadata)) { - const char *pszField = CSLGetField(papszMetadata, k); - if (papszFieldData) - CSLDestroy(papszFieldData); - papszFieldData = CSLTokenizeString2(pszField, "=", CSLT_HONOURSTRINGS); - if (papszFieldData[1] != nullptr) - { #ifdef NCDF_DEBUG - CPLDebug("GDAL_netCDF", "copy metadata [%s]=[%s]", - papszFieldData[0], papszFieldData[1]); + CPLDebug("GDAL_netCDF", "copy metadata [%s]=[%s]", pszKey, pszValue); #endif - CPLString osMetaName(papszFieldData[0]); - CPLString osMetaValue(papszFieldData[1]); + CPLString osMetaName(pszKey); - // Check for items that match pszPrefix if applicable. - if (pszPrefix != nullptr && !EQUAL(pszPrefix, "")) + // Check for items that match pszPrefix if applicable. + if (pszPrefix && !EQUAL(pszPrefix, "")) + { + // Remove prefix. + if (STARTS_WITH(osMetaName.c_str(), pszPrefix)) { - // Remove prefix. - if (EQUALN(osMetaName, pszPrefix, strlen(pszPrefix))) - { - osMetaName = osMetaName.substr(strlen(pszPrefix)); - } - // Only copy items that match prefix. - else - { - continue; - } + osMetaName = osMetaName.substr(strlen(pszPrefix)); } + // Only copy items that match prefix. + else + { + continue; + } + } - // Fix various issues with metadata translation. - if (CDFVarID == NC_GLOBAL) + // Fix various issues with metadata translation. + if (CDFVarID == NC_GLOBAL) + { + // Do not copy items in papszIgnoreGlobal and NETCDF_DIM_*. + if ((CSLFindString(papszIgnoreGlobal, osMetaName) != -1) || + (STARTS_WITH(osMetaName, "NETCDF_DIM_"))) + continue; + // Remove NC_GLOBAL prefix for netcdf global Metadata. + else if (STARTS_WITH(osMetaName, "NC_GLOBAL#")) { - // Do not copy items in papszIgnoreGlobal and NETCDF_DIM_*. - if ((CSLFindString(papszIgnoreGlobal, osMetaName) != -1) || - (STARTS_WITH(osMetaName, "NETCDF_DIM_"))) - continue; - // Remove NC_GLOBAL prefix for netcdf global Metadata. - else if (STARTS_WITH(osMetaName, "NC_GLOBAL#")) - { - osMetaName = osMetaName.substr(strlen("NC_GLOBAL#")); - } - // GDAL Metadata renamed as GDAL-[meta]. - else if (strstr(osMetaName, "#") == nullptr) - { - osMetaName = "GDAL_" + osMetaName; - } - // Keep time, lev and depth information for safe-keeping. - // Time and vertical coordinate handling need improvements. - /* - else if( STARTS_WITH(szMetaName, "time#") ) - { - szMetaName[4] = '-'; - } - else if( STARTS_WITH(szMetaName, "lev#") ) - { - szMetaName[3] = '-'; - } - else if( STARTS_WITH(szMetaName, "depth#") ) - { - szMetaName[5] = '-'; - } - */ - // Only copy data without # (previously all data was copied). - if (strstr(osMetaName, "#") != nullptr) - continue; - // netCDF attributes do not like the '#' character. - // for( unsigned int h=0; h < strlen(szMetaName) -1 ; h++ ) { - // if( szMetaName[h] == '#') szMetaName[h] = '-'; - // } + osMetaName = osMetaName.substr(strlen("NC_GLOBAL#")); } - else + // GDAL Metadata renamed as GDAL-[meta]. + else if (strstr(osMetaName, "#") == nullptr) { - // Do not copy varname, stats, NETCDF_DIM_*, nodata - // and items in papszIgnoreBand. - if (STARTS_WITH(osMetaName, "NETCDF_VARNAME") || - STARTS_WITH(osMetaName, "STATISTICS_") || - STARTS_WITH(osMetaName, "NETCDF_DIM_") || - STARTS_WITH(osMetaName, "missing_value") || - STARTS_WITH(osMetaName, "_FillValue") || - CSLFindString(papszIgnoreBand, osMetaName) != -1) - continue; + osMetaName = "GDAL_" + osMetaName; + } + // Keep time, lev and depth information for safe-keeping. + // Time and vertical coordinate handling need improvements. + /* + else if( STARTS_WITH(szMetaName, "time#") ) + { + szMetaName[4] = '-'; + } + else if( STARTS_WITH(szMetaName, "lev#") ) + { + szMetaName[3] = '-'; + } + else if( STARTS_WITH(szMetaName, "depth#") ) + { + szMetaName[5] = '-'; } + */ + // Only copy data without # (previously all data was copied). + if (strstr(osMetaName, "#") != nullptr) + continue; + // netCDF attributes do not like the '#' character. + // for( unsigned int h=0; h < strlen(szMetaName) -1 ; h++ ) { + // if( szMetaName[h] == '#') szMetaName[h] = '-'; + // } + } + else + { + // Do not copy varname, stats, NETCDF_DIM_*, nodata + // and items in papszIgnoreBand. + if (STARTS_WITH(osMetaName, "NETCDF_VARNAME") || + STARTS_WITH(osMetaName, "STATISTICS_") || + STARTS_WITH(osMetaName, "NETCDF_DIM_") || + STARTS_WITH(osMetaName, "missing_value") || + STARTS_WITH(osMetaName, "_FillValue") || + CSLFindString(papszIgnoreBand, osMetaName) != -1) + continue; + } #ifdef NCDF_DEBUG - CPLDebug("GDAL_netCDF", "copy name=[%s] value=[%s]", - osMetaName.c_str(), osMetaValue.c_str()); + CPLDebug("GDAL_netCDF", "copy name=[%s] value=[%s]", osMetaName.c_str(), + pszValue); #endif - if (NCDFPutAttr(nCdfId, CDFVarID, osMetaName, osMetaValue) != - CE_None) - CPLDebug("GDAL_netCDF", "NCDFPutAttr(%d, %d, %s, %s) failed", - nCdfId, CDFVarID, osMetaName.c_str(), - osMetaValue.c_str()); + if (NCDFPutAttr(nCdfId, CDFVarID, osMetaName, pszValue) != CE_None) + { + CPLDebug("GDAL_netCDF", "NCDFPutAttr(%d, %d, %s, %s) failed", + nCdfId, CDFVarID, osMetaName.c_str(), pszValue); } } - if (papszFieldData) - CSLDestroy(papszFieldData); - // Set add_offset and scale_factor here if present. if (poSrcBand && poDstBand) { @@ -9662,15 +9646,17 @@ netCDFDataset::CreateCopy(const char *pszFilename, GDALDataset *poSrcDS, eDT = poSrcBand->GetRasterDataType(); // Get var name from NETCDF_VARNAME. - const char *tmpMetadata = poSrcBand->GetMetadataItem("NETCDF_VARNAME"); + const char *pszNETCDF_VARNAME = + poSrcBand->GetMetadataItem("NETCDF_VARNAME"); char szBandName[NC_MAX_NAME + 1]; - if (tmpMetadata != nullptr) + if (pszNETCDF_VARNAME) { if (nBands > 1 && papszExtraDimNames == nullptr) - snprintf(szBandName, sizeof(szBandName), "%s%d", tmpMetadata, - iBand); + snprintf(szBandName, sizeof(szBandName), "%s%d", + pszNETCDF_VARNAME, iBand); else - snprintf(szBandName, sizeof(szBandName), "%s", tmpMetadata); + snprintf(szBandName, sizeof(szBandName), "%s", + pszNETCDF_VARNAME); } else { @@ -9678,26 +9664,29 @@ netCDFDataset::CreateCopy(const char *pszFilename, GDALDataset *poSrcDS, } // Get long_name from <var>#long_name. - char szLongName[NC_MAX_NAME + 1]; - snprintf(szLongName, sizeof(szLongName), "%s#%s", - poSrcBand->GetMetadataItem("NETCDF_VARNAME"), CF_LNG_NAME); - tmpMetadata = poSrcDS->GetMetadataItem(szLongName); - if (tmpMetadata != nullptr) - snprintf(szLongName, sizeof(szLongName), "%s", tmpMetadata); - else - szLongName[0] = '\0'; + const char *pszLongName = ""; + if (pszNETCDF_VARNAME) + { + pszLongName = + poSrcDS->GetMetadataItem(std::string(pszNETCDF_VARNAME) + .append("#") + .append(CF_LNG_NAME) + .c_str()); + if (!pszLongName) + pszLongName = ""; + } constexpr bool bSignedData = false; if (nDim > 2) poBand = new netCDFRasterBand( netCDFRasterBand::CONSTRUCTOR_CREATE(), poDS, eDT, iBand, - bSignedData, szBandName, szLongName, nBandID, nDim, iBand - 1, + bSignedData, szBandName, pszLongName, nBandID, nDim, iBand - 1, panBandZLev, panBandDimPos, panDimIds); else poBand = new netCDFRasterBand( netCDFRasterBand::CONSTRUCTOR_CREATE(), poDS, eDT, iBand, - bSignedData, szBandName, szLongName); + bSignedData, szBandName, pszLongName); poDS->SetBand(iBand, poBand); From 10fa4d1f2962a8e7de06038cb4d1a633b1e49384 Mon Sep 17 00:00:00 2001 From: AbelPau <92721356+AbelPau@users.noreply.github.com> Date: Fri, 19 Apr 2024 13:13:53 +0200 Subject: [PATCH 089/230] Add MiraMonVector read/creation driver (#9688) --- .../miramon/Arcs/3dArcs/linies_3d_WGS84.arc | Bin 0 -> 944 bytes .../miramon/Arcs/3dArcs/linies_3d_WGS84.nod | Bin 0 -> 196 bytes .../miramon/Arcs/3dArcs/linies_3d_WGS84A.dbf | Bin 0 -> 987 bytes .../miramon/Arcs/3dArcs/linies_3d_WGS84A.rel | 161 + .../miramon/Arcs/3dArcs/linies_3d_WGS84N.dbf | Bin 0 -> 525 bytes .../miramon/Arcs/3dArcs/linies_3d_WGS84N.rel | 83 + .../data/miramon/Arcs/EmptyArcs/Empty_ARC.arc | Bin 0 -> 48 bytes .../data/miramon/Arcs/EmptyArcs/Empty_ARC.nod | Bin 0 -> 48 bytes .../miramon/Arcs/EmptyArcs/Empty_ARCA.dbf | Bin 0 -> 193 bytes .../miramon/Arcs/EmptyArcs/Empty_ARCA.rel | 41 + .../miramon/Arcs/EmptyArcs/Empty_ARCN.dbf | Bin 0 -> 129 bytes .../miramon/Arcs/EmptyArcs/Empty_ARCN.rel | 26 + .../miramon/Arcs/SimpleArcs/SimpleArcFile.arc | Bin 0 -> 592 bytes .../miramon/Arcs/SimpleArcs/SimpleArcFile.nod | Bin 0 -> 172 bytes .../Arcs/SimpleArcs/SimpleArcFileA.dbf | Bin 0 -> 789 bytes .../Arcs/SimpleArcs/SimpleArcFileA.rel | 102 + .../Arcs/SimpleArcs/SimpleArcFileN.dbf | Bin 0 -> 481 bytes .../Arcs/SimpleArcs/SimpleArcFileN.rel | 64 + .../miramon/Points/3dpoints/Some3dPoints.pnt | Bin 0 -> 1568 bytes .../miramon/Points/3dpoints/Some3dPointsT.dbf | Bin 0 -> 2710 bytes .../miramon/Points/3dpoints/Some3dPointsT.rel | 112 + .../miramon/Points/EmptyPoints/Empty_PNT.pnt | Bin 0 -> 128 bytes .../miramon/Points/EmptyPoints/Empty_PNTT.dbf | Bin 0 -> 97 bytes .../miramon/Points/EmptyPoints/Empty_PNTT.rel | 23 + .../Points/SimplePoints/SimplePointsFile.pnt | Bin 0 -> 96 bytes .../Points/SimplePoints/SimplePointsFileT.dbf | Bin 0 -> 254 bytes .../Points/SimplePoints/SimplePointsFileT.rel | 58 + .../miramon/Polygons/3dPolygons/tin_3d.arc | Bin 0 -> 1432 bytes .../miramon/Polygons/3dPolygons/tin_3d.nod | Bin 0 -> 192 bytes .../miramon/Polygons/3dPolygons/tin_3d.pol | Bin 0 -> 623 bytes .../miramon/Polygons/3dPolygons/tin_3dA.dbf | Bin 0 -> 1253 bytes .../miramon/Polygons/3dPolygons/tin_3dA.rel | 141 + .../miramon/Polygons/3dPolygons/tin_3dN.dbf | Bin 0 -> 171 bytes .../miramon/Polygons/3dPolygons/tin_3dN.rel | 108 + .../miramon/Polygons/3dPolygons/tin_3dP.dbf | Bin 0 -> 565 bytes .../miramon/Polygons/3dPolygons/tin_3dP.rel | 180 + .../Polygons/EmptyPolygons/Empty_POL.arc | Bin 0 -> 48 bytes .../Polygons/EmptyPolygons/Empty_POL.nod | Bin 0 -> 48 bytes .../Polygons/EmptyPolygons/Empty_POL.pol | Bin 0 -> 112 bytes .../Polygons/EmptyPolygons/Empty_POLA.dbf | Bin 0 -> 193 bytes .../Polygons/EmptyPolygons/Empty_POLA.rel | 44 + .../Polygons/EmptyPolygons/Empty_POLN.dbf | Bin 0 -> 129 bytes .../Polygons/EmptyPolygons/Empty_POLN.rel | 26 + .../Polygons/EmptyPolygons/Empty_POLP.dbf | Bin 0 -> 225 bytes .../Polygons/EmptyPolygons/Empty_POLP.rel | 52 + .../Polygons/SimplePolygons/SimplePolFile.arc | Bin 0 -> 536 bytes .../Polygons/SimplePolygons/SimplePolFile.nod | Bin 0 -> 92 bytes .../Polygons/SimplePolygons/SimplePolFile.pol | Bin 0 -> 349 bytes .../SimplePolygons/SimplePolFileA.dbf | Bin 0 -> 511 bytes .../SimplePolygons/SimplePolFileA.rel | 89 + .../SimplePolygons/SimplePolFileN.dbf | Bin 0 -> 261 bytes .../SimplePolygons/SimplePolFileN.rel | 64 + .../SimplePolygons/SimplePolFileP.dbf | Bin 0 -> 729 bytes .../SimplePolygons/SimplePolFileP.rel | 93 + autotest/ogr/ogr_miramon_vector.py | 677 ++ doc/source/drivers/vector/index.rst | 1 + doc/source/drivers/vector/miramon.rst | 374 + frmts/drivers.ini | 1 + fuzzers/CMakeLists.txt | 1 + fuzzers/build_google_oss_fuzzers.sh | 1 + fuzzers/build_seed_corpus.sh | 20 + fuzzers/ogr_fuzzer.cpp | 26 + ogr/ogrsf_frmts/CMakeLists.txt | 3 + ogr/ogrsf_frmts/generic/ogrregisterall.cpp | 3 + ogr/ogrsf_frmts/miramon/CMakeLists.txt | 14 + ogr/ogrsf_frmts/miramon/data/MM_m_idofic.csv | 233 + ogr/ogrsf_frmts/miramon/mm_constants.h | 173 + ogr/ogrsf_frmts/miramon/mm_gdal_constants.h | 97 + .../miramon/mm_gdal_driver_structs.h | 826 ++ ogr/ogrsf_frmts/miramon/mm_gdal_functions.c | 2906 +++++++ ogr/ogrsf_frmts/miramon/mm_gdal_functions.h | 164 + ogr/ogrsf_frmts/miramon/mm_gdal_structures.h | 116 + ogr/ogrsf_frmts/miramon/mm_rdlayr.c | 707 ++ ogr/ogrsf_frmts/miramon/mm_rdlayr.h | 22 + ogr/ogrsf_frmts/miramon/mm_wrlayr.c | 7456 +++++++++++++++++ ogr/ogrsf_frmts/miramon/mm_wrlayr.h | 222 + ogr/ogrsf_frmts/miramon/ogrmiramon.h | 169 + .../miramon/ogrmiramondatasource.cpp | 291 + ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp | 212 + ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp | 2761 ++++++ ogr/ogrsf_frmts/ogrsf_frmts.h | 1 + scripts/fix_typos.sh | 1 + scripts/typos_allowlist.txt | 22 + 83 files changed, 18967 insertions(+) create mode 100644 autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84.arc create mode 100644 autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84.nod create mode 100644 autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84A.dbf create mode 100644 autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84A.rel create mode 100644 autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84N.dbf create mode 100644 autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84N.rel create mode 100644 autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARC.arc create mode 100644 autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARC.nod create mode 100644 autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCA.dbf create mode 100644 autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCA.rel create mode 100644 autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCN.dbf create mode 100644 autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCN.rel create mode 100644 autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc create mode 100644 autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFile.nod create mode 100644 autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileA.dbf create mode 100644 autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileA.rel create mode 100644 autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileN.dbf create mode 100644 autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileN.rel create mode 100644 autotest/ogr/data/miramon/Points/3dpoints/Some3dPoints.pnt create mode 100644 autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.dbf create mode 100644 autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.rel create mode 100644 autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNT.pnt create mode 100644 autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNTT.dbf create mode 100644 autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNTT.rel create mode 100644 autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFile.pnt create mode 100644 autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFileT.dbf create mode 100644 autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFileT.rel create mode 100644 autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3d.arc create mode 100644 autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3d.nod create mode 100644 autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3d.pol create mode 100644 autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dA.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dA.rel create mode 100644 autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dN.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dN.rel create mode 100644 autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dP.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dP.rel create mode 100644 autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POL.arc create mode 100644 autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POL.nod create mode 100644 autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POL.pol create mode 100644 autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLA.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLA.rel create mode 100644 autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLN.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLN.rel create mode 100644 autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLP.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLP.rel create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFile.arc create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFile.nod create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFile.pol create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileA.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileA.rel create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileN.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileN.rel create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileP.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileP.rel create mode 100644 autotest/ogr/ogr_miramon_vector.py create mode 100644 doc/source/drivers/vector/miramon.rst create mode 100644 ogr/ogrsf_frmts/miramon/CMakeLists.txt create mode 100644 ogr/ogrsf_frmts/miramon/data/MM_m_idofic.csv create mode 100644 ogr/ogrsf_frmts/miramon/mm_constants.h create mode 100644 ogr/ogrsf_frmts/miramon/mm_gdal_constants.h create mode 100644 ogr/ogrsf_frmts/miramon/mm_gdal_driver_structs.h create mode 100644 ogr/ogrsf_frmts/miramon/mm_gdal_functions.c create mode 100644 ogr/ogrsf_frmts/miramon/mm_gdal_functions.h create mode 100644 ogr/ogrsf_frmts/miramon/mm_gdal_structures.h create mode 100644 ogr/ogrsf_frmts/miramon/mm_rdlayr.c create mode 100644 ogr/ogrsf_frmts/miramon/mm_rdlayr.h create mode 100644 ogr/ogrsf_frmts/miramon/mm_wrlayr.c create mode 100644 ogr/ogrsf_frmts/miramon/mm_wrlayr.h create mode 100644 ogr/ogrsf_frmts/miramon/ogrmiramon.h create mode 100644 ogr/ogrsf_frmts/miramon/ogrmiramondatasource.cpp create mode 100644 ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp create mode 100644 ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp diff --git a/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84.arc b/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84.arc new file mode 100644 index 0000000000000000000000000000000000000000..e6e5b38820de94031de2f5e12a031ff23110f418 GIT binary patch literal 944 zcmZ<^a#k?ZGZfe>cwyV(wlDVa7nbyM@A_|_@H%W^)C?B~Pqo0Feb-zZ*cc!{;U~}1 zzdOF#A7^%nt1tO(|LI5!_qP=;4pJQ1(TTfV99V$L8W<sJLDZi=ZO@yZU9yiUUOw$z z<`4Uz=c*+g3xC@i&0BGL_5l}%AHVk&v>tMCU;?T=0Mr1YL3VMkf2s28+)4ZVhHq0m zuKt0$>zv=6r43759A3^je@Sesivu%I?E|1bkiD!xY_Yc4Ug*{ZxIGomr+?gV<F9?} z%grl0fcDLs!x-Ij0-{d<Xc@>ZkQs(sO{Z}69JYsr1I(SUaDcfN<UWvN*?}}j?{=Z5 zLaQ<#!tG<s^PXBX^*`Kyu<!%vOMu$P0c4BaP%}Q~bJ<?6N}FX?@Hcx{_`$-Lc}@{S z@ER9~ecAg{StoykhchhPZ>z3VY6AKn7Ot>xgt;5$URZp>!V4Cj7OHP=ep>6|u*qBX zNaDLc_ONh8*Dw8icS_*F-|+B*g(oZ=Vc`V}Cv^Sjd|3Fv!UY%<=-`2)a<T(Z(81?Q z2Z&~<&nf}Y4}PVzIe^lX0Vu+N$;HVk*8xaB*lN@W77u`kGu*dK1BojX|0)EFC!nfV z*xv*ZuYicd>;szzGJgW9daye{;tQbaZ_a3P0CHjOI`AUC0>lTq4@8691EgW@JCNTE ib|=_9KyjG+VDe!10EH92_2fGMX%zPW`7m{0cK`r}%2>_- literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84.nod b/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84.nod new file mode 100644 index 0000000000000000000000000000000000000000..de26c1fb8e0b6ef6acb5d499bd09b64fefcf4c99 GIT binary patch literal 196 zcmebCcTq6ZGZZ||>=IXB^4*>>&wFao)c^M9{O&AmSmNT~sTSC?@0yDPCj$g9GB7h# z0GS}V0ZMm3=?PGJ29#a^q*;J?1(d%7N*@5y=ztL_zzn6C&}fi6NDgEkE0hLOU^W1p C8x|-4 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84A.dbf b/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84A.dbf new file mode 100644 index 0000000000000000000000000000000000000000..b1460b1b421650476b1adcddce7fd43c6c6cc856 GIT binary patch literal 987 zcmZ`#!D_=W3^f!6gYCB6^99DTEX6&RH7$YRLORB7`H&s<t^1SN!!A~)=@2XLJxNd6 zecnIZ8DqZ9SN?9|$(Z@Lo|nUEelaE$_IV>o>-)64O&`~P_}5E1uZJa<*ZMN7i^fll z-*se8)8#m=bDB$9m_PFie=4-jKMq<r4GaQw*6#_uP@<#AHUbJ|{SnG0fR>0?MZJ*Z z@ebJUdk{GCXgNruRUykO$bI=OSEX{NJf0Cj?BoMc%(m#(s7q8&s&bH%Y7YrI3*<)- zcLMx9KB_yZpzPVqs!rueoTU&T#8y&X0_VUYi<30L*pmKLJq_wjw&T$Ov9RwWaLC73 Io2p>^0mRRI1^@s6 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84A.rel b/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84A.rel new file mode 100644 index 000000000000..c5e02b72eda0 --- /dev/null +++ b/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84A.rel @@ -0,0 +1,161 @@ +[VERSIO] +Vers=4 +SubVers=3 +VersMetaDades=5 +SubVersMetaDades=0 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +visible=0 +descriptor=Longitud de l'arc (projecció) + +[TAULA_PRINCIPAL:LONG_ARCE] +unitats=m +descriptor=Longitud de l'arc (el·lipsoide) + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240311 09500830 +characterSet=006 +nOrganismes=2 +FileIdentifier=linies_3d_29042 + +[METADADES:ORGANISME_1] +role=009 +OrganisationName=CREAF +IndividualName=Abel Pau +PositionName=Tècnic en SIG + +[IDENTIFICATION] +code=linies_3d_29042 +codeSpace= +DatasetTitle=Linies + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=lat/long-WGS84 + +[EXTENT] +MinX=1.28287097369422 +MaxX=1.98292118178235 +MinY=41.1902658152019 +MaxY=41.6776900669325 +toler_env=0 + +[OVERVIEW] +CreationDate=20240311 09500477 + +[OVERVIEW:ASPECTES_TECNICS] +comment1=Nombre d'arcs: 6 +comment2=El fitxer era anteriorment en la projecció UTM-31N-ETRS89 + +[METADADES:ORGANISME_2] +role=009 +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230912 16505195+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS2] +nOrganismes=1 +history=Vec3D.exe 1 C:\Mapes\ColleccionsPreferides\Catalunya-ETRS89\Altimetria30m\MDE30m_ICC_Aster_mar0.img D:\dades\GDAL_V\KML\multi\+\linies.arc D:\dades\GDAL_V\KML\multi\+\linies_3d.arc 0 +purpose=Incorpora la 3a dimensió en capes vectorials +date=20231031 13263780+0100 + +[QUALITY:LINEAGE:PROCESS2:ORGANISME_1] +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS3] +nOrganismes=1 +history=CanviPrj_64.exe D:\dades\GDAL_V\KML\multi\+\linies_3d.arc D:\dades\GDAL_V\KML\multi\+\linies_3d_WGS84.arc lat/long-WGS84 +purpose=Permet fer la transformació per a vectors estructurats de punts (PNT), d'arcs (ARC) i polígons (POL). Per a transformar fitxers de nodes (NOD) cal transformar el fitxer d'arcs associat. +date=20231113 11502174+0100 +NomFitxer=C:\miramon\CanviPrj_64.exe + +[QUALITY:LINEAGE:PROCESS3:ORGANISME_1] +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS3:SOFTWARE_REFERENCE] +Titol= +Edition= +CollectiveTitle= +ISBN= +ISSN= + +[QUALITY:LINEAGE:PROCESS3:INOUT1] +identifier=Param1 +TypeValues=S +ResultUnits= +source=1 + +[QUALITY:LINEAGE:SOURCE1] +NomFitxer=linies_3d.arc +processes=4,5 + +[QUALITY:LINEAGE:PROCESS4] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230912 16505195+0200 + +[QUALITY:LINEAGE:PROCESS4:ORGANISME_1] +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS5] +nOrganismes=1 +history=Vec3D.exe 1 C:\Mapes\ColleccionsPreferides\Catalunya-ETRS89\Altimetria30m\MDE30m_ICC_Aster_mar0.img D:\dades\GDAL_V\KML\multi\+\linies.arc D:\dades\GDAL_V\KML\multi\+\linies_3d.arc 0 +purpose=Incorpora la 3a dimensió en capes vectorials +date=20231031 13263780+0100 + +[QUALITY:LINEAGE:PROCESS5:ORGANISME_1] +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS3:INOUT2] +identifier=Param2 +sentit=1 +TypeValues=S +ResultUnits= +source=<Parent> + +[QUALITY:LINEAGE:PROCESS3:INOUT3] +identifier=Param3 +TypeValues=C +ResultValue=lat/long-WGS84 +ResultUnits= + +[QUALITY:LINEAGE] +processes=1,2,3 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampLongitudArcEllipsoidal=LONG_ARCE +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84N.dbf b/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84N.dbf new file mode 100644 index 0000000000000000000000000000000000000000..9376c48a6ae80cb46c99d4fdfb79940973d7247c GIT binary patch literal 525 zcmZRs=H%gIU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 zXfR0H6(Y}wrk_^<8!*5oXlRU0RKXCNC=RtoB&fw|8xC_#NHEuw1hr<^)S6%o9R+i2 LYH_&N0=rrO3?@Fa literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84N.rel b/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84N.rel new file mode 100644 index 000000000000..750942aead92 --- /dev/null +++ b/autotest/ogr/data/miramon/Arcs/3dArcs/linies_3d_WGS84N.rel @@ -0,0 +1,83 @@ +[VERSIO] +Vers=4 +SubVers=3 +VersMetaDades=5 +SubVersMetaDades=0 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240311 09500993 +characterSet=006 +nOrganismes=2 +FileIdentifier=linies_3d_29042 + +[METADADES:ORGANISME_1] +role=009 +OrganisationName=CREAF +IndividualName=Abel Pau +PositionName=Tècnic en SIG + +[IDENTIFICATION] +code=linies_3d_29042 +codeSpace= +DatasetTitle=Linies + +[OVERVIEW:ASPECTES_TECNICS] +comment1=Nombre d'arcs: 6 +comment2=El fitxer era anteriorment en la projecció UTM-31N-ETRS89 + +[EXTENT] +MinX=1.28287097369422 +MaxX=1.98292118178235 +MinY=41.1902658152019 +MaxY=41.6776900669325 +toler_env=0 + +[OVERVIEW] +CreationDate=20240311 09500616 + +[METADADES:ORGANISME_2] +role=009 +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230912 16505195+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS2] +nOrganismes=1 +history=Vec3D.exe 1 C:\Mapes\ColleccionsPreferides\Catalunya-ETRS89\Altimetria30m\MDE30m_ICC_Aster_mar0.img D:\dades\GDAL_V\KML\multi\+\linies.arc D:\dades\GDAL_V\KML\multi\+\linies_3d.arc 0 +purpose=Incorpora la 3a dimensió en capes vectorials +date=20231031 13263780+0100 + +[QUALITY:LINEAGE:PROCESS2:ORGANISME_1] +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1,2 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARC.arc b/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARC.arc new file mode 100644 index 0000000000000000000000000000000000000000..e089cf12d0597d83a61e9e2099899f85764754c9 GIT binary patch literal 48 ecmZ<^a#k?ZGh~>W<#7Ajver5%{STeb00jUR(iH0e literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARC.nod b/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARC.nod new file mode 100644 index 0000000000000000000000000000000000000000..88197ddde3ebef69a634e4be9114d40b2cd0d79b GIT binary patch literal 48 ecmebCcTq6ZGh~>W<#7Ajver5%{STeb00jUT;}rG) literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCA.dbf b/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..683995629988d9964fba1f3f4be70ce48cf2ac7c GIT binary patch literal 193 zcmZRsW*1=qf`bfMAPN-#WjtNt-Gdz6Je?UB{6LbTs45uz;=^2nLR=$)amf4l`?<$E i1_8B0^|PTG;OFn+8t>`n371EeW&q2(fuyn9&kF#pe-JMK literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCA.rel b/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCA.rel new file mode 100644 index 000000000000..429b66fee63c --- /dev/null +++ b/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCA.rel @@ -0,0 +1,41 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=UTM-31N-ETRS89 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCN.dbf b/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..95689ae5c6954ec096059cfdcca4dea9e26d8711 GIT binary patch literal 129 zcmZRsW*1=qf<^`%5QPeWGM+B+?m>=jp3V#mejrIvR22-4LC(SPj`4o}F4*NmJOe_5 NLDH@ec}6t-ya4143pfA( literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCN.rel b/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCN.rel new file mode 100644 index 000000000000..832186cce6c1 --- /dev/null +++ b/autotest/ogr/data/miramon/Arcs/EmptyArcs/Empty_ARCN.rel @@ -0,0 +1,26 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc b/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc new file mode 100644 index 0000000000000000000000000000000000000000..a5084dcc5b725485e9d8e3cb5f8cc69f76680ef6 GIT binary patch literal 592 zcmZ<^a#k?ZGh|3gDSH?GwbVht)#A2E>Qo1pplciC)G{4HUnXja$@e<2FhBshI=MM7 z<lUz<IyC%W@G-%x%YhZBOn?!h7DSy}7szU+G1-CpW{O+ymL><zNTp8mkck-Pvjf#8 z05vcHX=Wf^5%_cp1NVG~1-bIM2ljP4TurUlZ-3wCU>n*v{o3DlhZSolylTJJ<p5HD z0jLk;K9JonV|6UTr4k+f@4FQh_^s06m%%Jn?<I{6M#TwpqMkH5IBmYoS6SQUzy?(N z0jLi|gY2%G@b~Pc!xauN|4G{Qtym(`?9lFX+{=7<t;00dV=G;cbvpQaJ>osOwH@x? zwUq|e-@PX|z|_OS4;C(`o8wHluXQ+t@lBnwYgx6!%GnZ)t8|(j$`&5{FZ8X#!7ySu z^J9?~hm4C7c_nRG4wl@~hkM_3!NVs=^oZj(_X!S{+y!?t9It_g<FbhIMfEj^@Px$^ zEWBXhgC1^oS@&j$ooRrF!`g$K9KG!I4vB6YRTs}RI|!sNojsYY8XjJ-aQwC<Y|7a^ e%?`{p&V{wd>l_Y*K->)rU#B;<3r}xpb^rhvLhHEz literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFile.nod b/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFile.nod new file mode 100644 index 0000000000000000000000000000000000000000..2b3fa6ee1e50370394723c3eb9280a167191edf6 GIT binary patch literal 172 zcmebCcTq6ZGh|3gDSH?GwbVht)#A2E>Qsj?zNu4oEvt42eVM2wCg1D8!2kh_49pA# pKqiQ;fYJ?6x&um2fYLLd^a3co0!X6+MyLRcW`gozG&7VBq5*xM6Pf@3 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileA.dbf b/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..cb3e15a7209bbbe5b85fd7c7b68ae77740320221 GIT binary patch literal 789 zcmb7;y$*sf6h<%6Flk(L?-LN((gGv$7ZOtvB*v{T;_7<~q;Lx<gB|GMJN<5Z_#FHr z0C?Va_-@t}pec0SWmS^{sL|dwN0fe*U0=SRJO0tCT4&u*H+_FSK($4w8`Ug+tzq)j zsNMHxeNRngyaP_=XWj<S3{U<t3$vezYlW^*U`QF0K`2-(tPr)cPBT0}?iTMMb%_{5 znkF)cMJQx~AreDG;yE%YEZ#%s6g#9`1_|RdvR;P6B19zd?;|*Nkt8DFsSFrTQfY^a OcZPAfctR?R2mAq*eOhk- literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileA.rel b/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileA.rel new file mode 100644 index 000000000000..9f92d9a62f2c --- /dev/null +++ b/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileA.rel @@ -0,0 +1,102 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat,spa,eng +MDIdiom=cat,spa,eng +dateStamp=20230628 16235471+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=16b4eae3-8f74-4145-95db-babb7f0feb0f_SimpleArcFileA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=16b4eae3-8f74-4145-95db-babb7f0feb0f_SimpleArcFileA +codeSpace= +DatasetTitle=Simple Arc File + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels +unitatsY=? + +[EXTENT] +toler_env=0 +MinX=351.333967649907 +MaxX=1369.30161750719 +MinY=201.191246431919 +MaxY=931.88582302564 + +[OVERVIEW] +CreationDate=20230628 16235470+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +descriptor_spa=Identificador Gráfico interno +descriptor_eng=Internal Graphic identifier +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs +descriptor_spa=Número de vertices +descriptor_eng=Number of vertices + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc +descriptor_spa=Longitud del arco +descriptor_eng=Lenght of arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial +descriptor_spa=Nodo inicial +descriptor_eng=Initial node + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final +descriptor_spa=Nodo final +descriptor_eng=Final node + +[TAULA_PRINCIPAL:ATT1] +descriptor=Atributte1 + +[TAULA_PRINCIPAL:ATT2] +descriptor=Attribute2 + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16235471+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileN.dbf b/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..93a6c3f4a05e7123170d8498de79943a641d9018 GIT binary patch literal 481 zcmZRsW|QGyU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 zXfR0H6(Y}wrk_^<8!*5oXlRU0RKXCNC=RtoB&fw|8xC_#NHEuw1hr-)s5Qr~7692( BIRgLy literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileN.rel b/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileN.rel new file mode 100644 index 000000000000..81b6ce35bc31 --- /dev/null +++ b/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16235470+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=e4365dc3-82f4-4da8-ae1f-3f73922adc27_SimpleArcFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=e4365dc3-82f4-4da8-ae1f-3f73922adc27_SimpleArcFileN +codeSpace= +DatasetTitle=Simple Arc File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16235469+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16235470+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/Points/3dpoints/Some3dPoints.pnt b/autotest/ogr/data/miramon/Points/3dpoints/Some3dPoints.pnt new file mode 100644 index 0000000000000000000000000000000000000000..ed3737140bc0e3cef4de2eadb0b3d2a9dfa0b116 GIT binary patch literal 1568 zcmZ{jF=$g!6o#LJB5^6&f;gmgdoE&#gG=OfYA0#Lp~F+_5(|RFNv%uq5h^$&9R!m> zggQ8A5C?f~QkN<b1kpG+G`NWEAV|c~?|<>9kD?j!^MCi;bI(2Zyty;yrb@?42PYd_ z)!Sc>OrI|F*B+qQ`6u1#bfwV$`4pWDMnA8Y%jFMRe?1K24)L9K`)1zPYPC;!KRZ}D zsXUG+TPyG1^?KX##Esu0-=livZQfH>UB0<K47GkXEl$W2SCHqt7dlT>%9Agazs3bx zc;5%!lRp+WpKJZrcz0$MPuL;lON)&yt#4P;tMZQLesAR2w|;jNpY=uS^4*z1C%;a) z74O$N^>6At@29R6dG%w3b?UgD_uTJe-cx2t`5gZ%jGK8+AHLh`9jWU}-gEx4yx$o; znqQ|s8$M6_>BANI(W~$L>wUWClW2kqFD)if<}dE(9zw&tL7@8=;8yVyaQa$oz^&q^ z{u3X;>3h3PyjAhD@D_6r!>!_T@PE}a57(UZiMJ|#0nWYC;-w_AiZ8;|=jMeZvWnN? zFPV=j+$w&<dB~sTLB*HhS)Yivir+D(f9|7r3$A$_EF_Ut{61WBGbE2y{Gs#g%KsRy zx$MEMihl;resHUJ3fFhC2Dgg8f*;TW!mZ*P@XLH~HMmv$JzQmmaI5%dxbJTj--Nq9 ztN1s#<}@XbRs09s=UK&n!~ITH@nGNId0<{Wx8nQZ*&M*F;$v{vZ52Q4JiM#t+4F1f z`Sd(`4n1$4FW2Y!@w~V%o(I3b-`nf%6J-Sf{qcMGecTt<zk8qi={VQp{<t6B{|6oF BYw-X8 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.dbf b/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.dbf new file mode 100644 index 0000000000000000000000000000000000000000..e55c35b2df32598a926fa8f6c1cfb903207164f7 GIT binary patch literal 2710 zcmb`H&uiN-6vx$Nu)*kM>^{LRD`P`Xk|p_Q5+^frO-bBcjn{@zNVh>-=wX-r5&Z}D zmrm9XD2g%BR2}3*U-_f&(|dli-?P_m5kh~CzUVm^ZxE^$-D+Jdt2si=puPyUR83np z8?$KiS*S0%d0m(dMt>IQUewF_2!!4RdeN-v(yp2Jp9lK7Y|qzC*Q}ktqd;%EwzGSz z|4`TQZRUkRoAX0`Sy!vit%26V9O`H1^``6=MO&i9puZfZN5krEbz0i<Uj-YEu^j|w zIaX(PKW_2GuD`>V+a13CIn3{W4U3mseBIyN^e1{dr}V>;5)eR%0{Fy~;v!?zp#Z4F zbn~!t|EC@t;A4oAL}czA3M4z&)i_Ex6a#j6fYT@r?5SL3z*u5{(gi5uP<t7aQd_Y@ z0>}tgV<_%W3X2@GuiNk2+y3I}FaFkF-S<ulAG#Q(P&ts83{Iec=o(E7(A5>_p(0Rw z)vl;;83YQPC!%tDr@$~H5!XKE%A-uRAD+P}RF>Eo6qUpUsH;8Z+V_e<<;-VLg8q4E z>l-Oijq({(aZP5-l}F_OF->Lyr8_l)!~m7OcB;P{mD1aD?lVYfVlikGI9Dw3!+C__ zKxtxt-c<(TsZav+wf%pIX1W4=1|`s#CKJ}xfO4IIsv(1Et~@G_%ix47rh9v$D-Qnv DL0U&Q literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.rel b/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.rel new file mode 100644 index 000000000000..94ff8a8dfe67 --- /dev/null +++ b/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.rel @@ -0,0 +1,112 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240318 15131947+0100 +characterSet=006 +nOrganismes=2 +FileIdentifier=LIDAR3d_totT_14533 + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=CREAF + +[METADADES:ORGANISME_2] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[IDENTIFICATION] +code=LIDAR3d_totT_32431 +codeSpace= +DatasetTitle=Selecció de -> Selecció de -> Fitxer extret de D:\[...]\LIDAR3d_tot.shp + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=UTM-31N-ETRS89 + +[EXTENT] +MinX=440544.58 +MaxX=440551.66 +MinY=4635313.38 +MaxY=4635319.81 +toler_env=0 + +[OVERVIEW] +CreationDate=20240318 15131943+0100 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_1_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern +visible=0 +simbolitzable=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:INTENS] +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ID_CLAS] +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ANGLE] +MostrarUnitats=0 + +[TAULA_PRINCIPAL:RETURN_NR] +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_T_RETURN] +MostrarUnitats=0 + +[TAULA_PRINCIPAL:PULSE_TIME] +MostrarUnitats=0 + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=VecSelec_64.exe D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Points\LidarRectangle\TMP0000.SEL D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Points\LidarRectangle\Some3dPoints +purpose=Un fitxer de text indica quines són les entitats gràfiques i registres de la base de dades que cal desar en el fitxer de sortida. En aquesta ajuda ens referirem a aquest fitxer com a fitxer de seleccions. Es recomana l'extensió SEL, tot i que no és obligatòria. +date=20240318 15131959+0100 +NomFitxer=C:\MiraMon\VecSelec_64.exe + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE:PROCESS1:SOFTWARE_REFERENCE] +Titol= +Edition= +CollectiveTitle= +ISBN= +ISSN= + +[QUALITY:LINEAGE:PROCESS1:INOUT1] +identifier=Param1 +TypeValues=S +ResultUnits= +source=1 + +[QUALITY:LINEAGE:SOURCE1] +NomFitxer=TMP0000.SEL + +[QUALITY:LINEAGE:PROCESS1:INOUT2] +identifier=Param2 +sentit=1 +TypeValues=S +ResultUnits= +source=2 + +[QUALITY:LINEAGE:SOURCE2] +NomFitxer=Some3dPoints + +[QUALITY:LINEAGE] +processes=1 diff --git a/autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNT.pnt b/autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNT.pnt new file mode 100644 index 0000000000000000000000000000000000000000..d06314c2b7ade160ee0e85c297c51342e3c571b8 GIT binary patch literal 128 ucmWIW3sErCGZdPd<#7Ajver6WGy@JWr<9??=85?~V(I@tPysXxrXB!P$21QB literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNTT.dbf b/autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNTT.dbf new file mode 100644 index 0000000000000000000000000000000000000000..82935df8b1a3be5791d5c7b1b20c1691da540a9c GIT binary patch literal 97 zcmZRsW*1=qf<y*!5QPeWGM+B+?m>=jp3V#mejrIMR22-42tH5=0|O6&4`%TK04Pxe Avj6}9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNTT.rel b/autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNTT.rel new file mode 100644 index 000000000000..329c866f3352 --- /dev/null +++ b/autotest/ogr/data/miramon/Points/EmptyPoints/Empty_PNTT.rel @@ -0,0 +1,23 @@ +[VERSIO] +Vers=4 +SubVers=3 +VersMetaDades=5 +SubVersMetaDades=0 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern + + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=UTM-31N-ETRS89 + +[EXTENT] +MinX=2.9E+301 +MaxX=2.9E+301 +MinY=2.9E+301 +MaxY=2.9E+301 + diff --git a/autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFile.pnt b/autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFile.pnt new file mode 100644 index 0000000000000000000000000000000000000000..f543cba80f3e4208d1212fbe4fdcdc6db387563e GIT binary patch literal 96 zcmWIW3sErCGh|xgZgJZrwbbG8>DU&wDNPQ!6DphwW7{02T$9KvY3p)eW`KbECRt0K Xb2q@nVftX|VfqE4bV9nOwmAR*uOuXR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFileT.dbf b/autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFileT.dbf new file mode 100644 index 0000000000000000000000000000000000000000..57527da74671340fbf8db0f3f2debbbfe2da2835 GIT binary patch literal 254 zcmbPG%_hstz`)SMpavw-fCvMFr%SwhkfWQYGXsMkNR|stiDO8JAyf#c1PXXy3<d_E ze2}M8Xozb(145o3DBlRg+zbr72n9w8aA4pFXDTQ-A+b@IhR#S_7bG?c6UkT<aS#Up Dx6B-o literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFileT.rel b/autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFileT.rel new file mode 100644 index 000000000000..69a72d07ba46 --- /dev/null +++ b/autotest/ogr/data/miramon/Points/SimplePoints/SimplePointsFileT.rel @@ -0,0 +1,58 @@ +[VERSIO] +Vers=4 +SubVers=3 +VersMetaDades=5 +SubVersMetaDades=0 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +simbolitzable=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern +descriptor_spa=Identificador Gráfico interno +descriptor_eng=Internal Graphic identifier + +[TAULA_PRINCIPAL:ATT1] +descriptor=Atributte1 + +[TAULA_PRINCIPAL:ATTRIBUTE_2] +descriptor=Atributte2 + +[METADADES] +language=cat,spa,eng +MDIdiom=cat,spa,eng +dateStamp=20230628 16344458+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=68ddf845-79e8-4791-bf7a-5459eb951a04_SimplePointsFile + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=68ddf845-79e8-4791-bf7a-5459eb951a04_SimplePointsFile +codeSpace= +DatasetTitle=Simple Points File + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels + +[EXTENT] +MinX=342.325404376834 +MaxX=594.503182156354 +MinY=715.680304471881 +MaxY=848.806850618409 +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16351606+0200 +ContentDate=20230629 12064184+0200 diff --git a/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3d.arc b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3d.arc new file mode 100644 index 0000000000000000000000000000000000000000..00bd90e368b3d2a2fb145c6bac66fc29061bc725 GIT binary patch literal 1432 zcmZ`%K}b|l6diRG647y^t@<Aq77{uR%)*86K7o;|W)@;d=>!f_ri|jkMTE2vvA{GF zF{D9JH?Gx1(cnM{iW||;MYu931sCHg7v1~*KaW2*eDMGM|GsnXJ@>rl#P8Y7jm@>| zqZ{QP8!abS6<qrjv7D(_f4i5z$DFg+X2;#_m{T5bS1R%0{Y;bP#LIebzYSYXBDWu0 zC0wwha^VZbVuJP1tArwdJ6rLVg{NH#_^UUY-5KJcKKP6srB{$5b7Ohy+x)Dp7SpX0 z6V&h4r?ItT(nWgq4ye!8<(>Qu(#5<Cy-Fz-(Uab9kNln;QqTz=^nuU3tjFW;;n3uA z-_Z-B6Flhi^G-Y->)d;m&-~fxH#*@T@D#OwjOD>tBWt~XoNM^G`&z+w%$ZCkSAK~3 z!kX-IE2fSfoVGq!@DV!UC;IYC4~fUU*we#Es!3H3n10{^kG{-Hd;OT(VAZtQfkDHA zukh2)d*|`E#@FGOpO+pzF@C~F$;TYfk9z{V)Cr$({&HW|m;185tmivFeQuJE^TOPc zFY$6d$(MM!FXtg^|67Tx8bwL_{5;Ar@Oazin4KG}Q+(ep_yTzU6f+}3^*XOF2k}DD zh@!maGd{!zPs^)g^0u4Tb!pq=!*1T5zFAmDd3B(V`*8hEH?MX0>!bvp=VAOvVP5Ov z{P;Eb&NAAh<@GiAteb~^%m*FF!^Z+&xSrn+`Zaj?=bxYP{<-NFb<xl4<J|mu{{h{U By;A@H literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3d.nod b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3d.nod new file mode 100644 index 0000000000000000000000000000000000000000..6b26237eea7883df81879f6c3a3f17188df715b9 GIT binary patch literal 192 zcmebCcTq6ZGrYU4VLR7sJ$XmD`E6@%8Ob|Ntot3m^mw4-p|2WWI8OvRvN1paGmuIE zVh~*br5m911R%}Cz{s!wNQ2a`fYKa5niGgYfE9?DfEZ>TNDO2aOr8bEW&{aB0f@~G R#2_<3G)OH-Kgb-AS^z^&6(j%v literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3d.pol b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3d.pol new file mode 100644 index 0000000000000000000000000000000000000000..5e690dfd30d47573654f4ee92c853ca5025c3860 GIT binary patch literal 623 zcmWIW_fas^Gi2V@u$^nRp1k8>ewH=QjpQ9C*8PrOdOXnaP2i3&ofCnMYzzp%1Z6S; zX;vU+24avH3lM|EVDcbw5CEwG0hoG_IuHf{n7tc<`RBYqvNunt`G5hEy&%0HgMUO6 zYr5)AamYHF>>*V8?LZJ)s(O!syrbOwwl%kq^aI^}?kdpzAUA+8NWTJ5{}<o*eO|?N z4m~<&eWE`1I%FhHojgS!p+E8PWtQa-e`Dx30P4SCcDiDvXp_UbS1aet3_szpsKB<X z3#cC!ZZLac;RXr|5C+-r0A&1ge(4-6(c>UK^Q4}g=pzT1|6%TD3FDi)7ifQ4TH3Pf zXzmXH>i0_e*F3+m*`fd5jqvFvr!n-y>_7BX;|nJ+{E+>h0MtL>?y__LqB<NzpSM0! ze|O6P91koY*RimJ2vA(HfFhfT6G#Bv4U%F7sbU7nGcbafATb7ZaP%^QJjV=+VW3iw THbxGRAXuIiB+m-wgG>bg+<}_6 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dA.dbf b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..36aebaac2b495b3a78615d371955062395aa18f6 GIT binary patch literal 1253 zcma)4OA5j;5FO}Fa3i|U35NXIG|Qq@3KF4;xa;0CcsXxot3~Megk}-w`^=k}CVOP7 z1ra^z-rwS^iJDz^Xsdm*B}xOk5t7th>-JLL&VTshDIL11?d_vKn-)l?UEMXQ`SMem z-u!-`asE=FqY-GQpwW?YZ>Heq%OOzMM+x-*NK3ZSqoNRm@#?4OP)~q12KG*XpWGb9 z!|oN#7e%ep9PmsqN!Z6`;P^snUCA!Mo?sGGsyWg^8!mJ?dr`2aDv)2!)W5yx;Pmk6 r;{O!pBzeubi%1FEhdn8{6FF0`tnv2XOfX)-PDPTjhP{S6fl2lSj|`l% literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dA.rel b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dA.rel new file mode 100644 index 000000000000..4bdd6908fabc --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dA.rel @@ -0,0 +1,141 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240319 11074377+0100 +characterSet=006 +nOrganismes=2 +FileIdentifier=out_ID_B_tin_3d_27042 + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[METADADES:ORGANISME_2] +role=009 +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=out_ID_B_tin_3d_27042 +codeSpace= +DatasetTitle=Triangulació de Delaunay - Selecció de -> Selecció de -> Retall de->D:\dades\20220909_Thiessen\AltEmporda\centroides_02.pnt + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=tin_3d.pol +comment1=S'ha transformat el fitxer 'MARC0000.vec' +comment2=S'ha donat una estructura topològica a l'original + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=UTM-31N-ETRS89 + +[EXTENT] +MinX=510886.760465633 +MaxX=511161.917984244 +MinY=4660885.499725 +MaxY=4661425.355 +toler_env=0 + +[OVERVIEW] +CreationDate=20240319 11074377+0100 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_1 + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=Thiessen_64.exe D:\dades\20220909_Thiessen\AltEmporda\sel2\punts.pnt D:\dades\20220909_Thiessen\AltEmporda\sel2\out_ID_B.pol ID_B 0 /TIN=D:\dades\20220909_Thiessen\AltEmporda\sel2\out_ID_B_tin.pol +date=20220912 14134491+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS2] +nOrganismes=1 +history=Vec3D.exe 1 C:\Mapes\ColleccionsPreferides\Catalunya-ETRS89\Altimetria30m\MDE30m_ICC_Aster_mar0.img F:\dades\20220909_Thiessen\AltEmporda\+\sel2\out_ID_B_tin.arc F:\dades\20220909_Thiessen\AltEmporda\+\sel2\out_ID_B_tin_3d.arc 0 +purpose=Incorpora la 3a dimensió en capes vectorials +date=20231212 15421367+0100 + +[QUALITY:LINEAGE:PROCESS2:ORGANISME_1] +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE:PROCESS3] +nOrganismes=1 +history=VecSelec_64.exe /EMANCIPA D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Polygons\3dPolygons\TMP0000.SEL D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Polygons\3dPolygons\tin_3d +purpose=Un fitxer de text indica quines són les entitats gràfiques i registres de la base de dades que cal desar en el fitxer de sortida. En aquesta ajuda ens referirem a aquest fitxer com a fitxer de seleccions. Es recomana l'extensió SEL, tot i que no és obligatòria. +date=20240318 15230782+0100 +NomFitxer=C:\MiraMon\VecSelec_64.exe + +[QUALITY:LINEAGE:PROCESS3:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE:PROCESS3:SOFTWARE_REFERENCE] +Titol= +Edition= +CollectiveTitle= +ISBN= +ISSN= + +[QUALITY:LINEAGE:PROCESS3:INOUT1] +identifier=EMANCIPA +ResultUnits= + +[QUALITY:LINEAGE:PROCESS3:INOUT2] +identifier=Param1 +TypeValues=S +ResultUnits= +source=1 + +[QUALITY:LINEAGE:SOURCE1] +NomFitxer=TMP0000.SEL + +[QUALITY:LINEAGE:PROCESS3:INOUT3] +identifier=Param2 +sentit=1 +TypeValues=S +ResultUnits= +source=2 + +[QUALITY:LINEAGE:SOURCE2] +NomFitxer=tin_3d + +[QUALITY:LINEAGE] +processes=1,2,3 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dN.dbf b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..d1f5c153999180028d278610a9a95a0dd5bbe138 GIT binary patch literal 171 zcmZRsVHRRzU|?uu0Fh`wgn_}+CEh*A(aqDDfx!<X%Y>%HF~~VM-Z9?K-vzsTh-W}( rFi6@JBF~7XpI1S_K*88RLBSAA8i7e;FlnM-WT>EEs$d3`H3PB%BC-*x literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dN.rel b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dN.rel new file mode 100644 index 000000000000..42bdccbad750 --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dN.rel @@ -0,0 +1,108 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240319 11074377+0100 +characterSet=006 +nOrganismes=2 +FileIdentifier=MARC0000_10835 + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[METADADES:ORGANISME_2] +role=009 +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=MARC0000_07743 +codeSpace= +DatasetTitle=Triangulació de Delaunay - Selecció de -> Selecció de -> Retall de->D:\dades\20220909_Thiessen\AltEmporda\centroides_02.pnt [Plantill] + +[OVERVIEW:ASPECTES_TECNICS] +comment1=Nodes del fitxer d'arcs 'D:\dades\20220909_Thiessen\AltEmporda\sel2\out_ID_B_tin.arc' + +[EXTENT] +MinX=510886.760465633 +MaxX=511161.917984244 +MinY=4660885.499725 +MaxY=4661425.355 +toler_env=0 + +[OVERVIEW] +CreationDate=20240319 11074377+0100 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +simbolitzable=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=VecSelec_64.exe /EMANCIPA D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Polygons\3dPolygons\TMP0000.SEL D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Polygons\3dPolygons\tin_3d +purpose=Un fitxer de text indica quines són les entitats gràfiques i registres de la base de dades que cal desar en el fitxer de sortida. En aquesta ajuda ens referirem a aquest fitxer com a fitxer de seleccions. Es recomana l'extensió SEL, tot i que no és obligatòria. +date=20240318 15230787+0100 +NomFitxer=C:\MiraMon\VecSelec_64.exe + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE:PROCESS1:SOFTWARE_REFERENCE] +Titol= +Edition= +CollectiveTitle= +ISBN= +ISSN= + +[QUALITY:LINEAGE:PROCESS1:INOUT1] +identifier=EMANCIPA +ResultUnits= + +[QUALITY:LINEAGE:PROCESS1:INOUT2] +identifier=Param1 +TypeValues=S +ResultUnits= +source=1 + +[QUALITY:LINEAGE:SOURCE1] +NomFitxer=TMP0000.SEL + +[QUALITY:LINEAGE:PROCESS1:INOUT3] +identifier=Param2 +sentit=1 +TypeValues=S +ResultUnits= +source=2 + +[QUALITY:LINEAGE:SOURCE2] +NomFitxer=tin_3d + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dP.dbf b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..d2a443329d447ffff907483ed8d0629c57b183a1 GIT binary patch literal 565 zcmZ{e-AV*8428Q6h#=mG4^SV#&@}x?E{C-)13E5s5%1<je0bB&sxW9SnltmAoMiYM zZZC~7_dC#cQ+LKpkISKsPt)F*xzS-`nV0ucPvzt7%r7?lSnBjrPPLr*)h6JFmo<OO z$6Cg7WZ!?inQvUPTm8xBWvqKW?wjwr{C(!-`1(8@{^d8;T4x!(AP{o<0_U845EPDp zf%KQPh#<}-Y|;iBID>bo_n>Yqi3|Z{l>*3#n%k>8W`^vzcOl(@J|xlr9aNc{r0ynl zvk16=y_0AlWu+kP5|uibvhyul@36^-Hr;!OUIR&dTffTSz(?-cQQbKUP+oiDsOROF O6C|(MDusc(-u(sM=|{8x literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dP.rel b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dP.rel new file mode 100644 index 000000000000..8dc3779d32db --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/3dPolygons/tin_3dP.rel @@ -0,0 +1,180 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240319 11074377+0100 +characterSet=006 +nOrganismes=2 +FileIdentifier=out_ID_B_tin_3d_14172 + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[METADADES:ORGANISME_2] +role=009 +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=out_ID_B_tin_3d_27042 +codeSpace= +DatasetTitle=Selecció de -> Triangulació de Delaunay - Selecció de -> Selecció de -> Retall de->D:\dades\20220909_Thiessen\AltEmporda\centroides_02.pnt + +[OVERVIEW:ASPECTES_TECNICS] +ArcSource=tin_3d.arc +comment1=S'ha transformat el fitxer 'MARC0000.vec' +comment2=S'ha donat una estructura topològica a l'original +comment3=Ciclat totalment a partir del fitxer F:\dades\20220909_Thiessen\AltEmporda\+\sel2\+\out_ID_B_tin_3d.arc. + +[EXTENT] +MinX=510886.760465633 +MaxX=511161.917984244 +MinY=4660885.499725 +MaxY=4661425.355 +toler_env=0 + +[OVERVIEW] +CreationDate=20240319 11074377+0100 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_1_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:PERIMETRE] +descriptor=Perímetre del polígon (projecció) + +[TAULA_PRINCIPAL:PERIMETREE] +visible=0 +unitats=m +descriptor=Perímetre del polígon (el·lipsoide) + +[TAULA_PRINCIPAL:AREA] +descriptor=Àrea del polígon (projecció) + +[TAULA_PRINCIPAL:AREAE] +visible=0 +unitats=m² +descriptor=Àrea del polígon (el·lipsoide) + +[TAULA_PRINCIPAL:N_ARCS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre d'arcs + +[TAULA_PRINCIPAL:N_POLIG] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de polígons elementals + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=VecSelec_64.exe /EMANCIPA D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Polygons\3dPolygons\TMP0000.SEL D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Polygons\3dPolygons\tin_3d +purpose=Un fitxer de text indica quines són les entitats gràfiques i registres de la base de dades que cal desar en el fitxer de sortida. En aquesta ajuda ens referirem a aquest fitxer com a fitxer de seleccions. Es recomana l'extensió SEL, tot i que no és obligatòria. +date=20240318 15230736+0100 +NomFitxer=C:\MiraMon\VecSelec_64.exe + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE:PROCESS1:SOFTWARE_REFERENCE] +Titol= +Edition= +CollectiveTitle= +ISBN= +ISSN= + +[QUALITY:LINEAGE:PROCESS1:INOUT1] +identifier=EMANCIPA +ResultUnits= + +[QUALITY:LINEAGE:PROCESS1:INOUT2] +identifier=Param1 +TypeValues=S +ResultUnits= +source=1 + +[QUALITY:LINEAGE:SOURCE1] +NomFitxer=TMP0000.SEL + +[QUALITY:LINEAGE:PROCESS1:INOUT3] +identifier=Param2 +sentit=1 +TypeValues=S +ResultUnits= +source=2 + +[QUALITY:LINEAGE:SOURCE2] +NomFitxer=tin_3d + +[QUALITY:LINEAGE:PROCESS2] +nOrganismes=1 +history=VecSelec_64.exe /EMANCIPA D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Polygons\3dPolygons\TMP0000.SEL D:\GitHub-repository\gdal\autotest\ogr\data\miramon\Polygons\3dPolygons\tin_3d +purpose=Un fitxer de text indica quines són les entitats gràfiques i registres de la base de dades que cal desar en el fitxer de sortida. En aquesta ajuda ens referirem a aquest fitxer com a fitxer de seleccions. Es recomana l'extensió SEL, tot i que no és obligatòria. +date=20240318 15230777+0100 +NomFitxer=C:\MiraMon\VecSelec_64.exe + +[QUALITY:LINEAGE:PROCESS2:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE:PROCESS2:SOFTWARE_REFERENCE] +Titol= +Edition= +CollectiveTitle= +ISBN= +ISSN= + +[QUALITY:LINEAGE:PROCESS2:INOUT1] +identifier=EMANCIPA +ResultUnits= + +[QUALITY:LINEAGE:PROCESS2:INOUT2] +identifier=Param1 +TypeValues=S +ResultUnits= +source=3 + +[QUALITY:LINEAGE:SOURCE3] +NomFitxer=TMP0000.SEL + +[QUALITY:LINEAGE:PROCESS2:INOUT3] +identifier=Param2 +sentit=1 +TypeValues=S +ResultUnits= +source=4 + +[QUALITY:LINEAGE:SOURCE4] +NomFitxer=tin_3d + +[QUALITY:LINEAGE] +processes=1,2 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampPerimetreEllipsoidal=PERIMETREE +NomCampArea=AREA +NomCampAreaEllipsoidal=AREAE +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG diff --git a/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POL.arc b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POL.arc new file mode 100644 index 0000000000000000000000000000000000000000..e089cf12d0597d83a61e9e2099899f85764754c9 GIT binary patch literal 48 ecmZ<^a#k?ZGh~>W<#7Ajver5%{STeb00jUR(iH0e literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POL.nod b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POL.nod new file mode 100644 index 0000000000000000000000000000000000000000..88197ddde3ebef69a634e4be9114d40b2cd0d79b GIT binary patch literal 48 ecmebCcTq6ZGh~>W<#7Ajver5%{STeb00jUT;}rG) literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POL.pol b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POL.pol new file mode 100644 index 0000000000000000000000000000000000000000..255d114a0e0cb6dd2b7b5e5755c5c57ecaf4fc3a GIT binary patch literal 112 rcmWIW_fas^GgO$G<#7Ajver5%{STeb$N&N8;^=&+GB8zu%z?526fP+B literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLA.dbf b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..7481dceeefb0d1e45d8b4e5331e4b5bceb390a3d GIT binary patch literal 193 zcmZRc!T<&b8L}V@G}6;0-aW|C&C{8I!4D)QiYD(DALbeq;u;Z*L*B>V&pqBT2&f&Z dpAFRjKYth3cuzl1xIC&f16bY-B#qsEUI3q75E%df literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLA.rel b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLA.rel new file mode 100644 index 000000000000..b2454a7968ba --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLA.rel @@ -0,0 +1,44 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=UTM-31N-ETRS89 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=Empty_POL.pol + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLN.dbf b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..95689ae5c6954ec096059cfdcca4dea9e26d8711 GIT binary patch literal 129 zcmZRsW*1=qf<^`%5QPeWGM+B+?m>=jp3V#mejrIvR22-4LC(SPj`4o}F4*NmJOe_5 NLDH@ec}6t-ya4143pfA( literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLN.rel b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLN.rel new file mode 100644 index 000000000000..832186cce6c1 --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLN.rel @@ -0,0 +1,26 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLP.dbf b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..a4ec5a35e3566f53df3371a1361b850670c5e5e7 GIT binary patch literal 225 zcmZRc!T<&j8R{VnG}6;0-aW|C&C{8I!4D)QiYD(DALbeq;u;Z*Lq5PY$kW#~B*+ye l&xUG%W00#OR0Lu^n!I1UV~{h@a3H{8e}KP_r#nu0UI4Xq5uE@4 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLP.rel b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLP.rel new file mode 100644 index 000000000000..cd5ab65e97a6 --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/EmptyPolygons/Empty_POLP.rel @@ -0,0 +1,52 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[OVERVIEW:ASPECTES_TECNICS] +ArcSource=Empty_POL.arc + +[EXTENT] +toler_env=0 +MinX=2.9E+301 +MaxX=2.9E+301 +MinY=2.9E+301 +MaxY=2.9E+301 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern +visible=0 +TractamentVariable=Ordinal + +[TAULA_PRINCIPAL:N_VERTEXS] +descriptor=Nombre de vèrtexs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:PERIMETRE] +descriptor=Perímetre del polígon + +[TAULA_PRINCIPAL:AREA] +descriptor=Àrea del polígon + +[TAULA_PRINCIPAL:N_ARCS] +descriptor=Nombre d'arcs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_POLIG] +descriptor=Nombre de polígons elementals +visible=0 +MostrarUnitats=0 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampArea=AREA +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFile.arc b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFile.arc new file mode 100644 index 0000000000000000000000000000000000000000..431d702a3a3540257dfdffce360f123848d1eeb8 GIT binary patch literal 536 zcmZ<^a#k?ZGi2E98#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHG%nT3!Qx`GM zUGJG~heHqB*RaOxb#QfTKxH?eYC%-nGFH!W^(hYSdv8Sre(Qvr|LB0;fx_u^4xaNr zCY1GcIdA~gS}-y&01+6weNBkr>znA{yGPA9OQp%dR8&BU|5*pzT}L-DR<}-Rasb)4 z1E`4!NQ1DHuuoUCOREFSoy(sY)!yRmaHu{}u-Mq85$;Zy`;T~ST9YW-=1{=1XUjX6 z7Px;txb6*opjzRu;BL<4DeCnY=C!#Uo5sIk0^I$V@=Mw{p7%H?Xf64*P_-TIU%uVd zKAU`+9d=kPniM47=K%Bfnxh;csREN6-p-uN_CLD;9xj3#+rAovwmOK1@3PEPnB>6k zX|B$DufqY|zE6uM1zmpvG#46Auy6`G`>G`2PoqP}O}kqSbK4wX;RFjmfwL6`*4t|x X(gN6zt#oa7FnhbkkNsGM1G;$t@$uhR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFile.nod b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFile.nod new file mode 100644 index 0000000000000000000000000000000000000000..e5d310c3c0cee2ef29d314c8b4e269060b4f902e GIT binary patch literal 92 zcmebCcTq6ZGi2E98#`I@Yl%af+p%f<8zwmHXlZ`)KDENZbN<JKvc4_{W(EjgWME?O V05U;z0F;h^%7G|GD4PjL0|5OV62Jfe literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFile.pol b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFile.pol new file mode 100644 index 0000000000000000000000000000000000000000..0a2e4736fe6478f5c622ee571b3c8a07b4443c76 GIT binary patch literal 349 zcmWIW_fas^GgR2^8#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHGEDQ+12xKxL zxD3odHX;2;${9RB2A~0${)l<*de3Y-9D3NkhBaQVgWC_%0>dEvZOd3a%hjhiG>W+N z7MK2ZaNm0?D)3t;-2IOZ=p87WUgzLB|6@W~UzY<+FNh8R5^i4;V)*(dIxIVRca6>b zuMWO@)Qq!KnjB0;1*G_&b-?|9bQ5EB>y#!G{SiR@Qo=r6(JrkHr@l^&Y`%Ke0Tc!x Nzz(ti2$(?u1OVn<W$pj~ literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileA.dbf b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..22d64985be2349156fb209f77c88885217bf9fc4 GIT binary patch literal 511 zcmZ{fK?=e!5Jd+H-MQ19i<}@dF^%c6*t7&Bg<8a2_Flo$c{5uPzb)e|!soxd3CSM$ zJ|m(h-KTRkTcXgUZm8REB8oHJt**p$^~30Imp}a6$1c^wq<{Q;xghq9Pa%dcUt)Um zcE<es8^KM%l@_Jau9D7L<0_O$tj?A~!9sF~O6h|vl_|7AiNxw`DKspkpFauf3RxOs Kt0OoPtLqo7Pdilr literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileA.rel b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileA.rel new file mode 100644 index 000000000000..355451718486 --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileA.rel @@ -0,0 +1,89 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204654+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels +unitatsY=? + +[EXTENT] +toler_env=0 +MinX=335.3187440533326 +MaxX=1224.163653663228 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204653+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=SimplePolFile.pol + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204654+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileN.dbf b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..19847ee7684ceccc4001218201034fe3e646fd67 GIT binary patch literal 261 zcmZRsW|LuNU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 kXfR0H6(Y}wrk_^<8!*5oXlR5@RKXCNC=Rt)O)xY903eGTUH||9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileN.rel b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileN.rel new file mode 100644 index 000000000000..c0f476e1f0de --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204653+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16204652+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204653+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileP.dbf b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..5d00a81ae4a44cefb1edfafaf085f49073b42336 GIT binary patch literal 729 zcma)&OHRWu5QYa8b%8)^U~ev9+2cp-rAbW{sf5Zzh+S^L2{=4%<V+rFBxaHM=l|v* zXZO3?-vYqP%}@TW;t8NR^v4w5ni`<J(7RPj+keH>#qZAzevYYmk6ns|Kkt_dLW-d{ z!~QjI`;h9qJjcfU^T(+<Zt~D|a+n6lPXM?t#h5b3q6hvsf|4H?g}9<Fmf&It!dK0$ z>BZEnkfbdsCeeBBtkuqsrj)2F$q}WWOg2r+Fm~qrI@1e@R5?;CuU6bhEl`u`oUw{m p9#Kr@>{?Boj2&irAr)1#w=wGkH(Fa?5mv&O{Cf>5J<a^@+8-ctQn>&C literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileP.rel b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileP.rel new file mode 100644 index 000000000000..659952f5e451 --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/SimplePolygons/SimplePolFileP.rel @@ -0,0 +1,93 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204988+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=00691677-6d15-40f8-9d62-e8df34876e80_SimplePolFileP + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code= +codeSpace= +DatasetTitle=Simple Pol File + +[OVERVIEW:ASPECTES_TECNICS] +ArcSource=SimplePolFile.arc + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204988+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[EXTENT] +toler_env=0 +MinX=335.318744053333 +MaxX=1224.16365366323 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204986+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern +visible=0 +TractamentVariable=Ordinal + +[TAULA_PRINCIPAL:N_VERTEXS] +descriptor=Nombre de vèrtexs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:PERIMETRE] +descriptor=Perímetre del polígon + +[TAULA_PRINCIPAL:AREA] +descriptor=Àrea del polígon + +[TAULA_PRINCIPAL:N_ARCS] +descriptor=Nombre d'arcs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_POLIG] +descriptor=Nombre de polígons elementals +visible=0 +MostrarUnitats=0 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampArea=AREA +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG + +[TAULA_PRINCIPAL:ATT1] +descriptor=atribute1 + +[TAULA_PRINCIPAL:ATT2] +descriptor=atribute2 diff --git a/autotest/ogr/ogr_miramon_vector.py b/autotest/ogr/ogr_miramon_vector.py new file mode 100644 index 000000000000..98a3cad117e8 --- /dev/null +++ b/autotest/ogr/ogr_miramon_vector.py @@ -0,0 +1,677 @@ +#!/usr/bin/env pytest +# -*- coding: utf-8 -*- +############################################################################### +# $Id$ +# +# Project: GDAL/OGR Test Suite +# Purpose: Test read functionality for OGR MiraMon vector driver. +# Author: Abel Pau <a dot pau at creaf.uab.cat> +# +############################################################################### +# Copyright (c) 2024, Even Rouault <even dot rouault at spatialys.com> +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. +############################################################################### + +# import os +# import pdb + +import gdaltest + +# import ogrtest +import pytest + +# from osgeo import gdal, ogr, osr +from osgeo import gdal, ogr + +pytestmark = pytest.mark.require_driver("MiraMonVector") + +############################################################################### +# basic point test + + +def check_simple_point(ds): + + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 3 + assert lyr.GetGeomType() == ogr.wkbPoint + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 0 + assert ( + f.GetGeometryRef().ExportToWkt() == "POINT (513.488106565226 848.806850618409)" + ) + assert f.GetField("ID_GRAFIC") == 0 + assert f.GetFieldAsString("ATT1") == "A" + assert f.GetFieldAsString("ATTRIBUTE_2") == "B" + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert ( + f.GetGeometryRef().ExportToWkt() == "POINT (342.325404376834 715.680304471881)" + ) + assert f.GetField("ID_GRAFIC") == 1 + assert f.GetFieldAsString("ATT1") == "C" + assert f.GetFieldAsString("ATTRIBUTE_2") == "D" + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert ( + f.GetGeometryRef().ExportToWkt() == "POINT (594.503182156354 722.692543360232)" + ) + assert f.GetField("ID_GRAFIC") == 2 + assert f.GetFieldAsString("ATT1") == "" + assert f.GetFieldAsString("ATTRIBUTE_2") == "" + + +def test_ogr_miramon_read_simple_point(): + + ds = gdal.OpenEx("data/miramon/Points/SimplePoints/SimplePointsFile.pnt") + assert ds is not None, "Failed to get dataset" + + check_simple_point(ds) + + +def test_ogr_miramon_write_simple_pointV11(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pnt") + gdal.VectorTranslate( + out_filename, + "data/miramon/Points/SimplePoints/SimplePointsFile.pnt", + format="MiraMonVector", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_point(ds) + + +def test_ogr_miramon_write_simple_pointV20(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pnt") + gdal.VectorTranslate( + out_filename, + "data/miramon/Points/SimplePoints/SimplePointsFile.pnt", + format="MiraMonVector", + options="-lco Version=V2.0", + ) + + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_point(ds) + + +############################################################################### +# basic linestring test + + +def check_simple_arc(ds): + + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 4 + assert lyr.GetGeomType() == ogr.wkbLineString + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 0 + assert ( + f.GetGeometryRef().ExportToWkt() + == "LINESTRING (351.333967649907 610.58039961936,474.450999048575 824.784015223546,758.721217887776 838.797335870549,1042.99143672698 610.58039961936,1369.30161750719 562.534728829636)" + ) + assert f.GetField("ID_GRAFIC") == 0 + assert f.GetField("N_VERTEXS") == 5 + assert f.GetField("LONG_ARC") == pytest.approx(1226.052754666, abs=1e-5) + assert f.GetField("NODE_INI") == 0 + assert f.GetField("NODE_FI") == 1 + assert f.GetFieldAsString("ATT1") == "A" + assert f.GetFieldAsString("ATT2") == "B" + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 1 + assert ( + f.GetGeometryRef().ExportToWkt() + == "LINESTRING (794.755470980069 442.420551855326,613.583254043818 399.379638439531,642.61084681261 212.201712654565,861.819219790726 201.191246431919,1041.99048525219 460.437678401472,598.568981922029 591.562321598428,1109.05423406285 931.88582302564)" + ) + assert f.GetField("ID_GRAFIC") == 1 + assert f.GetField("N_VERTEXS") == 7 + assert f.GetField("LONG_ARC") == pytest.approx(1986.750568, abs=1e-5) + assert f.GetField("NODE_INI") == 2 + assert f.GetField("NODE_FI") == 3 + assert f.GetFieldAsString("ATT1") == "C" + assert f.GetFieldAsString("ATT2") == "D" + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 2 + assert ( + f.GetGeometryRef().ExportToWkt() + == "LINESTRING (887.843958135159 858.816365366268,989.941008563323 767.729781160749)" + ) + assert f.GetField("ID_GRAFIC") == 2 + assert f.GetField("N_VERTEXS") == 2 + assert f.GetField("LONG_ARC") == pytest.approx(136.823147, abs=1e-5) + assert f.GetField("NODE_INI") == 4 + assert f.GetField("NODE_FI") == 5 + assert f.GetFieldAsString("ATT1") == "C" + assert f.GetFieldAsString("ATT2") == "D" + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 3 + assert ( + f.GetGeometryRef().ExportToWkt() + == "LINESTRING (537.510941960088 719.684110371025,496.471931493865 633.602283539436,432.411037107567 572.544243577495,415.394862036206 631.600380589864,492.468125594722 642.610846812509,564.536631779308 630.599429115078)" + ) + assert f.GetField("ID_GRAFIC") == 3 + assert f.GetField("N_VERTEXS") == 6 + assert f.GetField("LONG_ARC") == pytest.approx(396.238966, abs=1e-5) + assert f.GetField("NODE_INI") == 6 + assert f.GetField("NODE_FI") == 7 + assert f.GetFieldAsString("ATT1") == "E" + assert f.GetFieldAsString("ATT2") == "F" + + +def test_ogr_miramon_read_simple_arc(): + + ds = gdal.OpenEx("data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc") + assert ds is not None, "Failed to get dataset" + check_simple_arc(ds) + + +def test_ogr_miramon_write_simple_arcV11(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.arc") + gdal.VectorTranslate( + out_filename, + "data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc", + format="MiraMonVector", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_arc(ds) + del ds + + +def test_ogr_miramon_write_simple_arcV20(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.arc") + gdal.VectorTranslate( + out_filename, + "data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc", + format="MiraMonVector", + options="-lco Version=V2.0", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_arc(ds) + del ds + + +############################################################################### +# basic polygon test + + +def check_simple_polygon(ds): + + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 3 + assert lyr.GetGeomType() == ogr.wkbPolygon + + # going to the first polygon + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 0 + assert ( + f.GetGeometryRef().ExportToWkt() + == "POLYGON ((335.318744053333 769.731684110321,552.525214081877 856.814462416696,775.737392959137 707.672692673594,648.616555661325 493.469077069408,386.367269267414 498.473834443337,335.318744053333 769.731684110321))" + ) + assert f.GetField("ID_GRAFIC") == 1 + assert f.GetField("N_VERTEXS") == 6 + assert f.GetField("PERIMETRE") == pytest.approx(1289.866489495, abs=1e-5) + assert f.GetField("AREA") == pytest.approx(112471.221989, abs=1e-5) + assert f.GetField("N_ARCS") == 1 + assert f.GetField("N_POLIG") == 1 + assert f.GetFieldAsString("ATT1") == "A" + assert f.GetFieldAsString("ATT2") == "B" + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 1 + assert ( + f.GetGeometryRef().ExportToWkt() + == "POLYGON ((1068.01522359662 849.807802093194,1160.10275927693 795.756422454755,1224.16365366323 682.648905803946,1156.09895337779 525.499524262557,962.915318744103 489.465271170264,830.789724072362 617.587059942862,924.879162702239 740.704091341529,1068.01522359662 849.807802093194))" + ) + assert f.GetField("ID_GRAFIC") == 2 + assert f.GetField("N_VERTEXS") == 8 + assert f.GetField("PERIMETRE") == pytest.approx(1123.514024, abs=1e-5) + assert f.GetField("AREA") == pytest.approx(88563.792204, abs=1e-5) + assert f.GetField("N_ARCS") == 1 + assert f.GetField("N_POLIG") == 1 + assert f.GetFieldAsString("ATT1") == "C" + assert f.GetFieldAsString("ATT2") == "D" + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 2 + assert ( + f.GetGeometryRef().ExportToWkt() + == "POLYGON ((636.605137963894 390.371075166458,580.551855375883 575.547098001853,723.687916270269 594.565176022785,796.757373929641 475.451950523261,744.707897240773 396.376784015173,636.605137963894 390.371075166458))" + ) + assert f.GetField("ID_GRAFIC") == 3 + assert f.GetField("N_VERTEXS") == 6 + assert f.GetField("PERIMETRE") == pytest.approx(680.544697, abs=1e-5) + assert f.GetField("AREA") == pytest.approx(30550.052343, abs=1e-5) + assert f.GetField("N_ARCS") == 1 + assert f.GetField("N_POLIG") == 1 + assert f.GetFieldAsString("ATT1") == "C" + assert f.GetFieldAsString("ATT2") == "D" + + +def test_ogr_miramon_read_simple_polygon(): + + ds = gdal.OpenEx( + "data/miramon/Polygons/SimplePolygons/SimplePolFile.pol", gdal.OF_VECTOR + ) + assert ds is not None, "Failed to get dataset" + check_simple_polygon(ds) + + +def test_ogr_miramon_write_simple_polygonV11(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pol") + gdal.VectorTranslate( + out_filename, + "data/miramon/Polygons/SimplePolygons/SimplePolFile.pol", + format="MiraMonVector", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_polygon(ds) + + +def test_ogr_miramon_write_simple_polygonV20(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pol") + gdal.VectorTranslate( + out_filename, + "data/miramon/Polygons/SimplePolygons/SimplePolFile.pol", + format="MiraMonVector", + options="-lco Version=V2.0", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_polygon(ds) + + +############################################################################### +# testing empty layers + + +def test_ogr_miramon_empty_point_layers(): + + ds = gdal.OpenEx("data/miramon/Points/EmptyPoints/Empty_PNT.pnt", gdal.OF_VECTOR) + assert ds is not None, "Failed to get dataset" + + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 0 + + f = lyr.GetNextFeature() + assert f is None, "Failed to get empty feature" + + ds = None + + +def test_ogr_miramon_empty_arc_layers(): + + ds = gdal.OpenEx("data/miramon/Arcs/EmptyArcs/Empty_ARC.arc", gdal.OF_VECTOR) + assert ds is not None, "Failed to get dataset" + + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 0 + + f = lyr.GetNextFeature() + assert f is None, "Failed to get empty feature" + + ds = None + + +def test_ogr_miramon_empty_pol_layers(): + + ds = gdal.OpenEx( + "data/miramon/Polygons/EmptyPolygons/Empty_POL.pol", gdal.OF_VECTOR + ) + assert ds is not None, "Failed to get dataset" + + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + + # The layer has no features + assert lyr.GetFeatureCount() == 0 + + f = lyr.GetNextFeature() + assert f is None, "Failed to get empty feature" + + ds = None + + +############################################################################### +# testing 3d part + + +def check_3d_point(ds): + + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 31 + assert lyr.GetGeomType() == ogr.wkbPoint25D + + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 0 + + assert f.GetGeometryRef().ExportToWkt() == "POINT (440551.66 4635315.3 619.96)" + + g = f.GetGeometryRef() + assert g is not None, "Failed to get geometry" + assert g.GetCoordinateDimension() == 3 + assert g.GetZ() == 619.96 + + f = lyr.GetFeature(30) + assert f is not None, "Failed to get feature" + g = f.GetGeometryRef() + assert g is not None, "Failed to get geometry" + assert g.GetZ() == 619.77 + + +def test_ogr_miramon_read_3d_point(tmp_vsimem): + + ds = gdal.OpenEx("data/miramon/Points/3dpoints/Some3dPoints.pnt", gdal.OF_VECTOR) + assert ds is not None, "Failed to get dataset" + check_3d_point(ds) + + +def test_ogr_miramon_write_3d_point(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pnt") + gdal.VectorTranslate( + out_filename, + "data/miramon/Points/3dpoints/Some3dPoints.pnt", + format="MiraMonVector", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_3d_point(ds) + + +def check_3d_arc(ds): + + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 6 + assert lyr.GetGeomType() == ogr.wkbLineString25D + + f = lyr.GetFeature(0) + assert f is not None, "Failed to get feature" + g = f.GetGeometryRef() + assert g is not None, "Failed to get geometry" + assert g.GetCoordinateDimension() == 3 + assert g.GetPointCount() == 4 + p = g.GetPoint(0) + assert p[2] == 595.1063842773438 + p = g.GetPoint(1) + assert p[2] == 326.656005859375 + p = g.GetPoint(2) + assert p[2] == 389.99432373046875 + p = g.GetPoint(3) + assert p[2] == 716.6224975585938 + + f = lyr.GetFeature(5) + assert f is not None, "Failed to get feature" + g = f.GetGeometryRef() + assert g is not None, "Failed to get geometry" + assert g.GetCoordinateDimension() == 3 + assert g.GetPointCount() == 2 + p = g.GetPoint(0) + assert p[2] == 233.82064819335938 + p = g.GetPoint(1) + assert p[2] == 794.5372314453125 + + ds = None + + +def test_ogr_miramon_read_3d_arc(tmp_vsimem): + + ds = gdal.OpenEx("data/miramon/Arcs/3dArcs/linies_3d_WGS84.arc", gdal.OF_VECTOR) + assert ds is not None, "Failed to get dataset" + check_3d_arc(ds) + + +def test_ogr_miramon_write_3d_arc(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.arc") + gdal.VectorTranslate( + out_filename, + "data/miramon/Arcs/3dArcs/linies_3d_WGS84.arc", + format="MiraMonVector", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_3d_arc(ds) + del ds + + +def check_3d_pol(ds): + + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 5 + assert lyr.GetGeomType() == ogr.wkbPolygon25D + + f = lyr.GetFeature(0) + assert f is not None, "Failed to get feature" + g = f.GetGeometryRef() + assert g is not None, "Failed to get geometry" + assert g.GetCoordinateDimension() == 3 + r = g.GetGeometryRef(0) + assert r is not None, "Failed to get geometry" + assert r.GetPointCount() == 4 + p = r.GetPoint(0) + assert p[2] == 11.223576545715332 + p = r.GetPoint(1) + assert p[2] == 9.221868515014648 + p = r.GetPoint(2) + assert p[2] == 21.929399490356445 + p = r.GetPoint(3) + assert p[2] == 11.223576545715332 + + f = lyr.GetFeature(4) + assert f is not None, "Failed to get feature" + g = f.GetGeometryRef() + assert g is not None, "Failed to get geometry" + assert g.GetCoordinateDimension() == 3 + r = g.GetGeometryRef(0) + assert r is not None, "Failed to get geometry" + assert r.GetPointCount() == 4 + p = r.GetPoint(0) + assert p[2] == 18.207277297973633 + p = r.GetPoint(1) + assert p[2] == 21.929399490356445 + p = r.GetPoint(2) + assert p[2] == 5.746463775634766 + p = r.GetPoint(3) + assert p[2] == 18.207277297973633 + + +def test_ogr_miramon_read_3d_pol(): + + ds = gdal.OpenEx("data/miramon/Polygons/3dPolygons/tin_3d.pol", gdal.OF_VECTOR) + assert ds is not None, "Failed to get dataset" + check_3d_pol(ds) + + +def test_ogr_miramon_write_3d_pol(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pol") + gdal.VectorTranslate( + out_filename, + "data/miramon/Polygons/3dPolygons/tin_3d.pol", + format="MiraMonVector", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_3d_pol(ds) + del ds + + +############################################################################### +# ogrsf test in some files + + +@pytest.mark.parametrize( + "filename", + [ + "Points/3dpoints/Some3dPoints.pnt", + "Points/SimplePoints/SimplePointsFile.pnt", + "Points/EmptyPoints/Empty_PNT.pnt", + "Arcs/SimpleArcs/SimpleArcFile.arc", + "Arcs/EmptyArcs/Empty_ARC.arc", + "Arcs/3dArcs/linies_3d_WGS84.arc", + "Polygons/SimplePolygons/SimplePolFile.pol", + "Polygons/EmptyPolygons/Empty_POL.pol", + "Polygons/3dPolygons/tin_3d.pol", + ], +) +def test_ogr_miramon_test_ogrsf(filename): + + import test_cli_utilities + + if test_cli_utilities.get_test_ogrsf_path() is None: + pytest.skip("test_ogrsf not available") + + ret = gdaltest.runexternal( + test_cli_utilities.get_test_ogrsf_path() + " -ro data/miramon/" + filename + ) + + assert "INFO" in ret + assert "ERROR" not in ret + + +############################################################################### +# -lco tests: CreationLanguage + + +@pytest.mark.parametrize( + "Language, expected_description", + [ + ("CAT", "Identificador Gràfic intern"), + ("SPA", "Identificador Gráfico interno"), + ("ENG", "Internal Graphic identifier"), + ], +) +def test_ogr_miramon_CreationLanguage(tmp_vsimem, Language, expected_description): + + out_filename = str(tmp_vsimem / "out.pnt") + gdal.VectorTranslate( + out_filename, + "data/miramon/Points/SimplePoints/SimplePointsFile.pnt", + format="MiraMonVector", + options="-lco CreationLanguage=" + Language, + ) + + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + + layer_def = lyr.GetLayerDefn() + field_index = layer_def.GetFieldIndex("ID_GRAFIC") + assert field_index >= 0 + + field_def = layer_def.GetFieldDefn(field_index) + field_description = field_def.GetAlternativeNameRef() + assert field_description == expected_description + + +############################################################################### +# -lco tests: CreationLanguage + + +@pytest.mark.parametrize( + "Language,expected_description", + [ + ("CAT", "Identificador Gràfic intern"), + ("SPA", "Identificador Gráfico interno"), + ("ENG", "Internal Graphic identifier"), + ], +) +def test_ogr_miramon_OpenLanguagePoint(Language, expected_description): + + ds = gdal.OpenEx( + "data/miramon/Points/SimplePoints/SimplePointsFile.pnt", + gdal.OF_VECTOR, + open_options=["OpenLanguage=" + Language], + ) + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + + layer_def = lyr.GetLayerDefn() + field_index = layer_def.GetFieldIndex("ID_GRAFIC") + assert field_index >= 0 + + field_def = layer_def.GetFieldDefn(field_index) + field_description = field_def.GetAlternativeNameRef() + assert field_description == expected_description + + +@pytest.mark.parametrize( + "Language,expected_description", + [ + ("CAT", "Node inicial"), + ("SPA", "Nodo inicial"), + ("ENG", "Initial node"), + ], +) +def test_ogr_miramon_OpenLanguageArc(Language, expected_description): + + ds = gdal.OpenEx( + "data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc", + gdal.OF_VECTOR, + open_options=["OpenLanguage=" + Language], + ) + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + + layer_def = lyr.GetLayerDefn() + field_index = layer_def.GetFieldIndex("NODE_INI") + assert field_index >= 0 + + field_def = layer_def.GetFieldDefn(field_index) + field_description = field_def.GetAlternativeNameRef() + assert field_description == expected_description diff --git a/doc/source/drivers/vector/index.rst b/doc/source/drivers/vector/index.rst index 02db55e2d72a..818b44db488f 100644 --- a/doc/source/drivers/vector/index.rst +++ b/doc/source/drivers/vector/index.rst @@ -67,6 +67,7 @@ Vector drivers lvbag mapml memory + miramon mitab mongodbv3 mssqlspatial diff --git a/doc/source/drivers/vector/miramon.rst b/doc/source/drivers/vector/miramon.rst new file mode 100644 index 000000000000..48f1427d184b --- /dev/null +++ b/doc/source/drivers/vector/miramon.rst @@ -0,0 +1,374 @@ +.. _vector.miramon: + +MiraMon Vector +==================== + +.. shortname:: MiraMonVector + +.. built_in_by_default:: + +This driver is capable of translating (reading and writing) structured vectors +of point, arc (*linestrings*), and polygon types from MiraMon vector format. + +In MiraMon the concepts of OGRMultiPoints and OGRMultiLineStrings are not supported, +but the driver translates a multipoint into N points and a multistring into N arcs. +When reading a MiraMon file of type *.pol*, the corresponding +layer will be reported as of type wkbPolygon, but depending on the +number of parts of each geometry, the actual type of the geometry for +each feature can be either OGRPolygon or OGRMultiPolygon. + +The reading driver verifies if multipart polygons adhere to the +specification (that is to say, the vertices of outer rings should be +oriented clockwise on the X/Y plane, and those of inner rings +counterclockwise). Otherwise, the driver corrects the orientation +(in the original format this specification is not the case as polygon +files are based on topological arc files, where the order of the vertices +may be relevant). + +Measures (M coordinate) are not supported. +Symbolization is neither read nor generated by this driver. + +A `look-up-table of MiraMon <https://www.miramon.cat/help/eng/mm32/AP6.htm>`__ and +`EPSG <https://epsg.org/home.html>`__ Spatial Reference Systems allows matching +identifiers in both systems. + +If a layer contains an old *.rel* format file (used some decades ago), +a warning message will appear explaining how to convert it into a modern *.rel 4* file. + +Driver capabilities +------------------- + +.. supports_create:: + +.. supports_georeferencing:: + +.. supports_virtualio:: + +Overview of MiraMon format +-------------------------- + +The MiraMon format is a binary format for vector layer data, linked to +one or more database tables, with or without topology and with rich metadata. +More information about the structured MiraMon vector format is available `on the public +specification <https://www.miramon.cat/new_note/eng/notes/MiraMon_structured_vectors_file_format.pdf>`__. + +It is important to keep in mind that a MiraMon vector layer is composed by several files as follows: + +To operate with a point layer, you must provide the name with the extension .pnt +(the T.dbf and T.rel files must accompany the .pnt). + +To operate with a linestring layer, you must provide the name with the extension .arc +(the A.dbf and A.rel, .nod, N.dbf, and N.rel files must accompany the .arc). + +To operate with a polygon layer, you must provide the name with the extension .pol +(the P.dbf, P.rel, A.dbf and A.rel, .nod, N.dbf, and N.rel files must accompany the .pol). + +By providing only the main file name, the driver will automatically use the other sidecar files to obtain the +necessary information. In the creation of MiraMon layers, you only need to provide the name +of the main file (with or without extension), and the driver will create the rest of the files. + +The following outlines the information contained within each sidecar file: + +Preliminary note: *FileName* is, in the following explanations, the first part of the name +of the layer file. + +- **Point layers**: These layers contain *point* type features which are described by a single + coordinate (x,y) or (x, y, z). Each layer is composed by 3 files: + + - *FileName.pnt* file: Contains the geographic database with the coordinates that define the + point vector features. + + - *FileNameT.dbf* file (note the 'T' before the '.'): Contains the main table of the database + in dBASE (DBF) format, or in `extended DBF format <https://www.miramon.cat/new_note/eng/notes/DBF_estesa.pdf>`__, + if necessary. It contains the information (usually alphanumeric, but also file or web links, etc) + of every feature. The Feature Identifier (FID) field is a field called *ID_GRAFIC* and relates + every geographical feature to one or more records in the main table. + + - *FileNameT.rel* file (note the 'T' before the '.'): Contains the layer metadata, + the relational structure of the database (links between the main table and other + tables [thesauruses, etc] if needed, and the cardinality of the link) and the default + symbolization description. In the GDAL environment + only some aspects are documented: the spatial reference system, the language of the + metadata (English), the extension and a description of the fields. + +- **Arc layers**: These layers contain *linestring* type features which are lines + described by a series of segments, each one defined by coordinates (x, y) or (x, y, z). + Both extreme vertices of each *linestring* are called nodes. Each layer is composed by 6 files: + + - *FileName.arc* file: Contains the geographic database with the coordinates that define the + linestring (arc) vector features. + + - *FileNameA.dbf* file (note the 'A' before the '.'): Contains the main table of the database + in dBASE (DBF) format, or in `extended DBF format <https://www.miramon.cat/new_note/eng/notes/DBF_estesa.pdf>`__, + if necessary. It contains the information (usually alphanumeric, but also file or web links, etc) + of every feature. The Feature Identifier (FID) field is a field called *ID_GRAFIC* and relates + every geographical feature to one or more records in the main table. + + - *FileNameA.rel* file (note the 'A' before the '.'): Contains the layer metadata, + the relational structure of the database (links between the main table and other + tables [thesauruses, etc] if needed, and the cardinality of the link) and the default + symbolization description. In the GDAL environment + only some aspects are documented: the spatial reference system, the language of the + metadata (English), the extension and a description of the fields. + + - *FileName.nod* file: Contains the geographic database with information about the + linestring needed to define each node. It is necessary in the MiraMon vector format but not read by + the GDAL MiraMon vector driver because nodes contain topological information that is not + transferred to other formats. + + - *FileNameN.dbf* file (note the 'N' before the '.'): Contains the main table of the database + in dBASE (DBF) format, or in extended DBF if necessary. This table contains information about + the relationships between arcs and nodes, and other attributes of the nodes, if needed. + It is necessary in the MiraMon vector format but not read by the GDAL MiraMon vector driver because + nodes contain topological information that is not transferred to other formats. + + - *FileNameN.rel* file (note the 'N' before the '.'): Contains the layer metadata, + the relational structure of the database (links between the main table and other + tables [thesauruses, etc] if needed, and the cardinality of the link) and the default + symbolization description. It is necessary in the MiraMon vector format but not read by + the GDAL MiraMon vector driver because nodes contain topological information that is not + transferred to other formats. + +- **Polygon layers**: These layers contain *polygon* or *multipolygon* type features. + In MiraMon vector format a polygon is a closed shape described by one or more arcs. + A polygon can have holes inside it. A polygon can also be linked to other polygons; + in this case, it is termed a group (*multipolygon*). + Each layer is composed by 9 files: + + - *FileName.pol* file: Contains the geographic database with information about the linestring + vector features needed to define the polygon (or multipolygon) vector features. + + - *FileNameP.dbf* file (note the 'P' before the '.'): Contains the main table of the database + in dBASE (DBF) format, or in `extended DBF format <https://www.miramon.cat/new_note/eng/notes/DBF_estesa.pdf>`__, + if necessary. It contains the information (usually alphanumeric, but also file or web links, etc) + of every feature. The Feature Identifier (FID) field is a field called *ID_GRAFIC* and relates + every geographical feature to one or more records in the main table. + + - *FileNameP.rel* file (note the 'P' before the '.'): Contains the layer metadata, + the relational structure of the database (links between the main table and other + tables [thesauruses, etc] if needed, and the cardinality of the link) and the default + symbolization description. In the GDAL environment + only some aspects are documented: the spatial reference system, the language of the + metadata (English), the extension and a description of the fields. + + - *FileName.arc* file: Contains the geographic database with the coordinates that define the + arc vector features. The polygons within the polygon file reference the arcs in this file by their index. + + - *FileNameA.dbf* file (note the 'A' before the '.'): Contains the main table of the database + in dBASE (DBF) format, or in extended DBF if necessary. This table contains information about + the relationship between arcs and polygons, not the main features information. It is necessary in + MiraMon but not read directly by the GDAL MiraMon vector driver because + it is redundant to the information on the linestring part. + + - *FileNameA.rel* file (note the 'A' before the '.'): Contains additional data about the data, + the relations of the database and the symbolization description. It is necessary in + MiraMon but not read directly by the GDAL MiraMon vector driver. + + - *FileName.nod* file: Contains the geographic database with information about the + linestring needed to define each node. It is necessary in the MiraMon vector format but not read by + the GDAL MiraMon vector driver because nodes contain topological information that is not + transferred to other formats. + + - *FileNameN.dbf* file (note the 'N' before the '.'): Contains the main table of the database + in dBASE (DBF) format, or in extended DBF if necessary. This table contains information about + the relationships between arcs and nodes, and other attributes of the nodes, if needed. + It is necessary in the MiraMon vector format but not read by the GDAL MiraMon vector driver because + nodes contain topological information that is not transferred to other formats. + + - *FileNameN.rel* file (note the 'N' before the '.'): Contains additional data about the data, + the relations of the database and the symbolization description. It is necessary in + MiraMon but not read directly by the GDAL MiraMon vector driver. + +Encoding +-------- + +When reading MiraMon files the code page setting in the header of the .dbf file +is read and used to translate string fields to UTF-8 (regardless of whether they +are in ANSI, OEM or UTF-8). + +When writing MiraMon files the codepage of *.dbf* files can be ANSI or UTF8 +depending on the layer creation option DBFEncoding. + +Creation Issues +--------------- + +MiraMon can only store one kind of geometry per layer +(points, arcs or polygons). Mixing different kinds of layers +(including raster and geoservices as WMS or WMTS) is possible through MiraMon maps (.mmm). +During creation the driver generates the necessary files to +accommodate each of the three possible types of geometries. +For instance, if a layer or a dataset contains points and arcs, +a set of point files and a set of arc files will be created. + +Consequently, during creation the MiraMon vector driver output can be a +folder or a set of files with the appropriate extension (*.pnt*, etc): + +- If the output is a **folder**, it will contain all the translated layers with the original name in the origin dataset. + + - In this case a *.mmm* file will be created referencing all layers in the origin dataset to make an + easy open of the dataset using the MiraMon software. + - In this case, please specify the MiraMon file output format name using the -f option (**-f MiraMonVector**). + +- If the output is a **file** with extension all the translated layers in the origin dataset will be created with the specified name. + Use this option only when you know that there is only one layer with one feature type in the origin dataset. + +The attributes of the MiraMon feature are stored in an associated *.dbf*. +If a classical DBF IV table could not be used (too many fields or records, +large text fields, etc) a file type called extended DBF is used. +This is an improvement of dBASE IV DBF files. The specification of this format can be found in `this file +<https://www.miramon.cat/new_note/eng/notes/DBF_estesa.pdf>`__. + +Note that extended *.dbf* files cannot be opened with Excel or +other typical programs. If the complete MiraMon Professional software +is not installed on the computer, the free and standalone +MiraD application can be downloaded from +`this page <https://www.miramon.cat/USA/Prod-MiraD.htm>`__ to open them. + +Connection string +----------------- + +The MiraMon driver accepts three types of sources of data: + +When translating from a MiraMon vector format, the MiraMon vector driver input needs a file with one of the +described extensions: + +- *.pnt* for *points*. +- *.arc* for *linestrings*. +- *.pol* for *polygons* (or *multipolygons*). + +The extension *.nod* is not valid for translation. Take in consideration all auxiliary files described above. + +Field sizes +----------- + +The driver will automatically extend string and integer fields to +dynamically accommodate the length of the data to be inserted. + +Size Issues +----------- + +Geometry: The MiraMon vector format explicitly uses 32-bit offsets in the 1.1 version +and 64-bit offsets in the 2.0 version. It is better to produce 1.1 version files if 2.0 +version is not really necessary than always use 2.0 version. Version 1.x files are smaller. + +Attributes: The dbf format does not have any offsets in it, so it can be +arbitrarily large. + +Open options +------------ + +The following open options are available. + +- .. oo:: Height + :choices: First, Lowest, Highest + + Sets which of the possible heights for each vertex is read: + the *first*, the *lowest* or the *highest* one. It only applies to + MiraMon multi-height layers, where the same X,Y vertex can have more than one Z. + +- .. oo:: MultiRecordIndex + :choices: 1, 2, ..., Last, JSON + + In case of fields of type List, if the output driver cannot support them, + user can select which one wants to keep: *MultiRecordIndex=1* for first, *MultiRecordIndex=2* for second, etc + and *MultiRecordIndex=last* for the last element of the list. + *MultiRecordIndex=JSON* option converts the list in a single value in JSON format. + If not specified, all elements of the list will be translated by default as a OGR list field type. + +- .. oo:: OpenLanguage + :choices: ENG, CAT, SPA + :default: ENG + + If the layer to be opened is multilingual (in fact, the *.rel* file), this + parameter sets the language to be read. + + +Dataset creation options +------------------------ + +None + +Layer creation options +---------------------- + +- .. lco:: Version + :choices: V1.1, V2.0, last_version + :default: V1.1 + + Version of the file. + Version 1.1 is limited to an unsigned 32-bit integer for FID, for internal + offsets and for the number of entities the layer can handle. + It is the default option. + Version 2.0 is the 64-bit version. It is practically unlimited + (unsigned 64-bit integer for FID and internal offsets). + last_version selects to the last existing version ever. + +- .. lco:: DBFEncoding + :choices: UTF8, ANSI + :default: ANSI + + Encoding of the *.dbf* files. + The MiraMon vector driver can write *.dbf* files in UTF-8 or ANSI charsets. + + As at the moment of this release, UTF-8 tables are not editable in the + `MiraD application <https://www.miramon.cat/USA/Prod-MiraD.htm>`__, so it + is recommended to use ANSI instead, if there are no coding problems. + +- .. oo:: CreationLanguage + :choices: ENG, CAT, SPA + :default: ENG + + Sets the language used in the metadata file (*.rel*) for the descriptors of + the *.dbf* fields. + +Examples +-------- + +- A translation from an *Example_1.dxf* file with one layer but some different geometric types + in the layer, will result 'file1.dxf' into a new MiraMon set of layers in the 'output_folder'. + + :: + + ogr2ogr output_folder Example_1.dxf -f MiraMonVector -lco Version=V1.1 + + +- A translation from a *Example_2.dxf* file with one polygon type layer 'file1.dxf' into a new MiraMon layer + 'territories.pol' (with UTF-8 encoding at the *.dbf* files) is performed like this: + + :: + + ogr2ogr territories.pol Example_2.dxf -lco DBFEncoding=UTF8 (no needed to include **-f MiraMonVector** because the output layer is not a directory) + + +- A translation from a MiraMon layer of arcs, 'rivers.arc', into a new *.gml* file (taking only the first element of + the multirecords in the attributes table) is performed like this: + + :: + + ogr2ogr rivers.gml rivers.arc -oo MultiRecordIndex=1 + +- A translation from a MiraMon layer 'tracks.arc' into a new *.gml* file taking the first height of + every point is performed like this: + + :: + + ogr2ogr tracks.gml tracks.arc -oo Height=First + +- A translation from a MiraMon layer 'tracks.arc' into a new *.gml* file taking the last height of + every point and documenting the attribute descriptors in Catalan (if the layer is multilingual + and it has this language available) is performed like this: + + :: + + ogr2ogr tracks.gml tracks.arc -oo Height=First -oo Language=CAT + + +See Also +-------- + +- `MiraMon's vector format specifications <https://www.miramon.cat/new_note/eng/notes/MiraMon_structured_vectors_file_format.pdf>`__ +- `MiraMon Extended DBF format <https://www.miramon.cat/new_note/eng/notes/DBF_estesa.pdf>`__ +- `MiraMon vector layer concepts <https://www.miramon.cat/help/eng/mm32/ap2.htm#structured_vector>`__. +- `MiraMon page <https://www.miramon.cat/Index_usa.htm>`__ +- `MiraMon help guide <https://www.miramon.cat/help/eng>`__ +- `Grumets research group, the people behind MiraMon <https://www.grumets.cat/index_eng.htm>`__ diff --git a/frmts/drivers.ini b/frmts/drivers.ini index 01cce7154ea0..116fdab02b9d 100644 --- a/frmts/drivers.ini +++ b/frmts/drivers.ini @@ -262,6 +262,7 @@ Arrow GTFS PMTiles JSONFG +MiraMonVector # Put TIGER and AVCBIN at end since they need poOpenInfo->GetSiblingFiles() Tiger diff --git a/fuzzers/CMakeLists.txt b/fuzzers/CMakeLists.txt index 47a0c5bf144c..5cf1b6dc73bf 100644 --- a/fuzzers/CMakeLists.txt +++ b/fuzzers/CMakeLists.txt @@ -90,6 +90,7 @@ build_ogr_specialized_fuzzer(avcbin RegisterOGRAVCBin "/vsimem/test.tar" "/vsita build_ogr_specialized_fuzzer(gml RegisterOGRGML "/vsimem/test.tar" "/vsitar//vsimem/test.tar/test.gml") build_fuzzer(NAME cad_fuzzer SOURCES ogr_fuzzer.cpp DEFINITIONS -DREGISTER_FUNC=RegisterOGRCAD) +build_fuzzer(NAME ogr_miramon_fuzzer SOURCES ogr_fuzzer.cpp DEFINITIONS -DREGISTER_FUNC=RegisterOGRMiraMon -DMEM_FILENAME="/vsimem/test.tar" -DFOR_OGR_MIRAMON) function (build_gdal_specialized_fuzzer _format _registerFunc _memfile _gdalfile) build_fuzzer( diff --git a/fuzzers/build_google_oss_fuzzers.sh b/fuzzers/build_google_oss_fuzzers.sh index 6095517a0120..ad0b2c2a863c 100755 --- a/fuzzers/build_google_oss_fuzzers.sh +++ b/fuzzers/build_google_oss_fuzzers.sh @@ -93,6 +93,7 @@ build_ogr_specialized_fuzzer avcbin RegisterOGRAVCBin "/vsimem/test.tar" "/vsita build_ogr_specialized_fuzzer gml RegisterOGRGML "/vsimem/test.tar" "/vsitar//vsimem/test.tar/test.gml" build_ogr_specialized_fuzzer gmlas RegisterOGRGMLAS "/vsimem/test.tar" "GMLAS:/vsitar//vsimem/test.tar/test.gml" build_ogr_specialized_fuzzer fgb RegisterOGRFlatGeobuf "/vsimem/test.fgb" "/vsimem/test.fgb" +build_fuzzer ogr_miramon_fuzzer $(dirname $0)/ogr_fuzzer.cpp -DREGISTER_FUNC=RegisterOGRMiraMon -DMEM_FILENAME="\"/vsimem/test.tar\"" -DFOR_OGR_MIRAMON build_fuzzer cad_fuzzer $(dirname $0)/ogr_fuzzer.cpp -DREGISTER_FUNC=RegisterOGRCAD formats="GTiff HFA" diff --git a/fuzzers/build_seed_corpus.sh b/fuzzers/build_seed_corpus.sh index f158e5bf8fe5..291fc011250d 100755 --- a/fuzzers/build_seed_corpus.sh +++ b/fuzzers/build_seed_corpus.sh @@ -592,6 +592,26 @@ rm -f $OUT/lvbag_fuzzer_seed_corpus.zip zip -r $OUT/lvbag_fuzzer_seed_corpus.zip ./*.xml >/dev/null cd $OLDPWD +echo "Building ogr_miramon_fuzzer_seed_corpus.zip" +rm -f $OUT/ogr_miramon_fuzzer_seed_corpus.zip +CUR_DIR=$PWD +cd $(dirname $0)/../autotest/ogr/data/miramon +for subdir in *; do + (cd $subdir + for subdir2 in *; do + (cd $subdir2 + printf "FUZZER_FRIENDLY_ARCHIVE\\n" > $CUR_DIR/ogr_miramon_${subdir}_${subdir2}.tar + for file in *; do + printf "***NEWFILE***:%s\\n" "$file" >> $CUR_DIR/ogr_miramon_${subdir}_${subdir2}.tar + cat $file >> $CUR_DIR/ogr_miramon_${subdir}_${subdir2}.tar + done + ) + done + ) +done +cd $CUR_DIR +zip -r $OUT/ogr_miramon_fuzzer_seed_corpus.zip ogr_miramon_*.tar >/dev/null +rm ogr_miramon_*.tar echo "Copying data to $OUT" cp $(dirname $0)/../data/* $OUT diff --git a/fuzzers/ogr_fuzzer.cpp b/fuzzers/ogr_fuzzer.cpp index 75286078a614..805473ce236e 100644 --- a/fuzzers/ogr_fuzzer.cpp +++ b/fuzzers/ogr_fuzzer.cpp @@ -107,6 +107,32 @@ int LLVMFuzzerTestOneInput(const uint8_t *buf, size_t len) CPLPushErrorHandler(CPLQuietErrorHandler); #ifdef USE_FILESYSTEM OGRDataSourceH hDS = OGROpen(szTempFilename, FALSE, nullptr); +#elif defined(FOR_OGR_MIRAMON) + std::string osVsitarPrefix("/vsitar/"); + char **papszFiles = + VSIReadDir(std::string(osVsitarPrefix).append(MEM_FILENAME).c_str()); + std::string osFileInTar; + for (int i = 0; papszFiles && papszFiles[i]; ++i) + { + if (EQUAL(CPLGetExtension(papszFiles[i]), "pol") || + EQUAL(CPLGetExtension(papszFiles[i]), "arc") || + EQUAL(CPLGetExtension(papszFiles[i]), "pnt")) + { + osFileInTar = papszFiles[i]; + break; + } + } + CSLDestroy(papszFiles); + OGRDataSourceH hDS = nullptr; + if (!osFileInTar.empty()) + { + hDS = OGROpen(std::string(osVsitarPrefix) + .append(MEM_FILENAME) + .append("/") + .append(osFileInTar) + .c_str(), + FALSE, nullptr); + } #else OGRDataSourceH hDS = OGROpen(GDAL_FILENAME, FALSE, nullptr); #endif diff --git a/ogr/ogrsf_frmts/CMakeLists.txt b/ogr/ogrsf_frmts/CMakeLists.txt index ec61b85a1529..f11285935dcb 100644 --- a/ogr/ogrsf_frmts/CMakeLists.txt +++ b/ogr/ogrsf_frmts/CMakeLists.txt @@ -47,6 +47,9 @@ ogr_optional_driver(vdv "VDV-451/VDV-452/INTREST Data Format") ogr_optional_driver(flatgeobuf FlatGeobuf) ogr_optional_driver(mapml MapML) ogr_optional_driver(jsonfg JSONFG) +if( NOT WORDS_BIGENDIAN ) + ogr_optional_driver(miramon "MiraMonVector") +endif() # ###################################################################################################################### # diff --git a/ogr/ogrsf_frmts/generic/ogrregisterall.cpp b/ogr/ogrsf_frmts/generic/ogrregisterall.cpp index be4de0ae847f..8b364b141964 100644 --- a/ogr/ogrsf_frmts/generic/ogrregisterall.cpp +++ b/ogr/ogrsf_frmts/generic/ogrregisterall.cpp @@ -263,6 +263,9 @@ void OGRRegisterAllInternal() #ifdef JSONFG_ENABLED RegisterOGRJSONFG(); #endif +#ifdef MIRAMON_ENABLED + RegisterOGRMiraMon(); +#endif // NOTE: you need to generally insert your own driver before that line. diff --git a/ogr/ogrsf_frmts/miramon/CMakeLists.txt b/ogr/ogrsf_frmts/miramon/CMakeLists.txt new file mode 100644 index 000000000000..8eccd4e46b07 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/CMakeLists.txt @@ -0,0 +1,14 @@ +add_gdal_driver( + TARGET ogr_MiraMon + SOURCES ogrmiramondatasource.cpp ogrmiramondriver.cpp ogrmiramonlayer.cpp mm_wrlayr.c mm_gdal_functions.c mm_rdlayr.c + PLUGIN_CAPABLE) +gdal_standard_includes(ogr_MiraMon) +target_include_directories(ogr_MiraMon PRIVATE $<TARGET_PROPERTY:ogrsf_generic,SOURCE_DIR>) + +set(GDAL_DATA_FILES + ${CMAKE_CURRENT_SOURCE_DIR}/data/MM_m_idofic.csv +) +set_property( + TARGET ${GDAL_LIB_TARGET_NAME} + APPEND + PROPERTY RESOURCE "${GDAL_DATA_FILES}") diff --git a/ogr/ogrsf_frmts/miramon/data/MM_m_idofic.csv b/ogr/ogrsf_frmts/miramon/data/MM_m_idofic.csv new file mode 100644 index 000000000000..c19fd2581642 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/data/MM_m_idofic.csv @@ -0,0 +1,233 @@ +PSIDGEODES;ID_GEODES;NOTA_CAT;NOTA_SPA;NOTA_ENG +ESRI:102022;Albers_Equal_Area-Africa-WGS84;;; +ESRI:102025;Albers_Equal_Area-Asia_North-WGS84;;; +EPSG:5070;Albers_Equal_Area-N_America-NAD83;https://epsg.io/5070;https://epsg.io/5070;https://epsg.io/5070 +Azimuthal_Equidistant;Azimuthal_Equidistant-0-90-WGS84;;; +EPSG:4088;Cilindrical_Equidistant-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +Cylindrical_Equal_Area;Cylindrical_Equal_Area-15-0-WGS84;;; +EPSG:22171;Gauss-Kruger_Faja1-PosGAR98;;; +EPSG:22172;Gauss-Kruger_Faja2-PosGAR98;;; +EPSG:22173;Gauss-Kruger_Faja3-PosGAR98;;; +EPSG:22174;Gauss-Kruger_Faja4-PosGAR98;;; +EPSG:22175;Gauss-Kruger_Faja5-PosGAR98;;; +EPSG:22176;Gauss-Kruger_Faja6-PosGAR98;;; +EPSG:22177;Gauss-Kruger_Faja7-PosGAR98;;; +EPSG:3763;Gauss-Kruger_Portugal-ETRS89;;; +PT-TM06/ETRS89;Gauss-Kruger_Portugal-ETRS89;;; +EPSG:20791;Gauss-Kruger_Portugal-Lisboa1937;;; +EPSG:2932;Gauss-Kruger_Qatar-QND;;; +EPSG:3116;Gauss-Kruger_Zona2-MAGNA;;; +SR-ORG:9111;Geostationary-WGS84;;; +Goode_Homolosine;Goode_Homolosine-WGS84;;; +ESRI:102017;LambertAzimEqualA-0-90-WGS84-Ellipsoide;https://epsg.io/102017;https://epsg.io/102017;https://epsg.io/102017 +EPSG:9821;LambertAzimEqualA-0-90-WGS84-Esfera;https://epsg.io/9821-method;https://epsg.io/9821-method;https://epsg.io/9821-method +Lambert_Azimuthal_Equal_Area;LambertAzimEqualA-0-90-WGS84-Esfera;;; +Lambert_Azimuthal_Equal_Area-0-90-WGS84;LambertAzimEqualA-0-90-WGS84-Esfera;;; +EPSG:3035;Lambert_Azimuthal_Equal_Area-1052-ETRS89;;; +urn:ogc:def:crs:EPSG::3035;Lambert_Azimuthal_Equal_Area-1052-ETRS89;;; +ETRS-LAEA;Lambert_Azimuthal_Equal_Area-1052-ETRS89;;; +SR-ORG:7297;Lambert_Azimuthal_Equal_Area-9-48-ETRS89;;; +ETRS-LCC;Lambert_Conformal_Conic-Europa-ETRS89;;; +EPSG:3034;Lambert_Conformal_Conic-Europa-ETRS89;;; +EPSG:2154;Lambert_Conformal_Conic-França-ETRS89;;; +EPSG:2062;Lambert_Conformal_Conic-Madrid1870;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26191;Lambert_Conformal_Conic-Maroc_N-Merchich;;; +EPSG:27561;Lambert_Conformal_Conic-ZoneI-NTF;;; +EPSG:27562;Lambert_Conformal_Conic-ZoneII-NTF;;; +EPSG:27563;Lambert_Conformal_Conic-ZoneIII-NTF;;; +Lambert_Conformal_Conic;Lambert_Conformal_Conic-ZoneIII-NTF;;; +EPSG:27573;Lambert_Conformal_Conic-ZoneIII_ext-NTF;;; +EPSG:27572;Lambert_Conformal_Conic-ZoneII_ext-NTF;;; +AUTO2:LCC,1,14.5,38,35,41;Lambert_Conformal_Conic_ICC_Mediterrani;;; +AUTO2:MERCATOR,1,0,0.0;Mercator-Equator-ED50-UB/ICC;;; +Mercator;Mercator-Equator-ED50-UB/ICC;;; +Mercator-ED50;Mercator-Equator-ED50-UB/ICC;;; +Mercator-ED50-UB/ICC;Mercator-Equator-ED50-UB/ICC;;; +Mercator-Ecuador-ED50-UB/ICC;Mercator-Equator-ED50-UB/ICC;;; +EPSG:3395;Mercator-Equator-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +AUTO2:MERCATOR,1,0,40.60;Mercator-IHM-485-60k-ED50-UB/ICC;;; +AUTO2:MERCATOR,1,0,41.42;Mercator-IHM-489-50k-ED50-UB/ICC;;; +AUTO2:MERCATOR_WGS84,1,0,41.42;Mercator-IHM-489-50k-WGS84;;; +EPSG:3785;Mercator-Popular-Visualisation-Sphere;;; +EPSG:3857;Mercator-Popular-Visualisation-Sphere;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +urn:ogc:def:crs:EPSG::3857;Mercator-Popular-Visualisation-Sphere;;; +EPSG:900913;Mercator-Popular-Visualisation-Sphere;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +ESRI:102100;Mercator-Popular-Visualisation-Sphere;;; +EPSG:21782;ObliqueMercator-Rosenmund1903;;; +SR-ORG:6842;Sinusoidal-V5-MODIS;https://spatialreference.org/ref/sr-org/modis-sinusoidal/;https://spatialreference.org/ref/sr-org/modis-sinusoidal/;https://spatialreference.org/ref/sr-org/modis-sinusoidal/ +SR-ORG:6974;Sinusoidal-V5-MODIS;https://spatialreference.org/ref/sr-org/modis-sinusoidal-3/;https://spatialreference.org/ref/sr-org/modis-sinusoidal-3/;https://spatialreference.org/ref/sr-org/modis-sinusoidal-3/ +SR-ORG:6965;Sinusoidal-V5-MODIS;https://spatialreference.org/ref/sr-org/modis-sinusoidal-2/;https://spatialreference.org/ref/sr-org/modis-sinusoidal-2/;https://spatialreference.org/ref/sr-org/modis-sinusoidal-2/ +Sinusoidal;Sinusoidal-WGS84;;; +EPSG:3909;TransverseMercator-BalkansMGI1901;;; +EPSG:2393;TransverseMercator-Finland-KKJ;;; +EPSG:29903;TransverseMercator-Ireland1965;;; +EPSG:2039;TransverseMercator-Israel1989;;; +EPSG:3003;TransverseMercator-Monte_Mario-Italy_Z1;;; +EPSG:3021;TransverseMercator-Sweden-RT90;;; +EPSG:26710;UTM-10N-NAD27-CW;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32610;UTM-10N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26901;UTM-1N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26902;UTM-2N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26903;UTM-3N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26904;UTM-4N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26905;UTM-5N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26906;UTM-6N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26906;UTM-7N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26908;UTM-8N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26909;UTM-9N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26910;UTM-10N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26911;UTM-11N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26912;UTM-12N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26913;UTM-13N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26914;UTM-14N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26915;UTM-15N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26916;UTM-16N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26917;UTM-17N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26918;UTM-18N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26919;UTM-19N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26920;UTM-20N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26921;UTM-21N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26922;UTM-22N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26923;UTM-23N-NAD83;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26711;UTM-11N-NAD27-CW;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4486;UTM-13N-ITRF92;;; +EPSG:26713;UTM-13N-NAD27-MX;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4487;UTM-14N-ITRF92;;; +EPSG:26714;UTM-14N-NAD27-MX;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4488;UTM-15N-ITRF92;;; +EPSG:26715;UTM-15N-NAD27-MX;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:26915;UTM-15N-NAD83;;; +EPSG:26716;UTM-16N-NAD27-BC;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32616;UTM-16N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:24877;UTM-17S-PSA56-P;;; +EPSG:29187;UTM-17S-SAD69-PE;;; +EPSG:32717;UTM-17S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32618;UTM-18N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:29188;UTM-18S-SAD69-CH;;; +EPSG:32718;UTM-18S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:29169;UTM-19N-SAD69-BR;;; +EPSG:32619;UTM-19N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:24879;UTM-19S-PSAD56-BC;;; +EPSG:24879-1201;UTM-19S-PSAD56-BC;Transformació per defecte segons https://epsg.io/24879;Transformación por defecto según https://epsg.io/24879;Default transformation according to https://epsg.io/24879 +EPSG:24879-1203;UTM-19S-PSAD56-CN;;; +EPSG:24879-1209;UTM-19S-PSAD56-V;;; +EPSG:29189;UTM-19S-SAD69-CH;;; +EPSG:32719;UTM-19S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:29170;UTM-20N-SAD69-BR;;; +EPSG:32620;UTM-20N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:29190;UTM-20S-SAD69-BR;;; +EPSG:32720;UTM-20S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29171;UTM-21N-SAD69-BR;;; +EPSG:32621;UTM-21N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29191;UTM-21S-SAD69-BR;;; +EPSG:32721;UTM-21S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29172;UTM-22N-SAD69-BR;;; +EPSG:32622;UTM-22N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29192;UTM-22S-SAD69-BR;;; +EPSG:32722;UTM-22S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;; +EPSG:29193;UTM-23S-SAD69-BR;;; +EPSG:29194;UTM-24S-SAD69-BR;;; +EPSG:29195;UTM-25S-SAD69-BR;;; +ETRS-TM26;UTM-26N-ETRS89;;; +EPSG:3038;UTM-26N-ETRS89;;; +EPSG:3039;UTM-27N-ETRS89;;; +ETRS-TM27;UTM-27N-ETRS89;;; +EPSG:32627;UTM-27N-WGS84;Ordre d'eixos preferit: est-nord (XY). Sense paràmetres TOWGS84 a https://epsg.io/;Orden de ejes preferido: est-norte (XY). Sin parámetros TOWGS84 en https://epsg.io/;Preferred axis order: east-north (XY). No TOWGS84 parameters at https://epsg.io/ +EPSG:3040;UTM-28N-ETRS89;Ordre d'eixos preferit: nord-est (YX);Orden de ejes preferido: norte-est (YX). Sin parámetros TOWGS84 en https://epsg.io/;Preferred axis order: north-east (YX). No TOWGS84 parameters at https://epsg.io/ +ETRS-TM28;UTM-28N-ETRS89;;; +EPSG:4083;UTM-28N-REGCAN95;;; +EPSG:32628;UTM-28N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:23029;UTM-29N-S/IGN;;; +EPSG:23029-0000;UTM-29N-ED50-ABDF;;; +EPSG:23029-1145;UTM-29N-ED50-PS;;; +EPSG:25829;UTM-29N-ETRS89;Ordre d'eixos preferit: est-nord (XY);Orden de ejes preferido: est-norte (XY);Preferred axis order: east-north (XY) +EPSG:3041;UTM-29N-ETRS89;Ordre d'eixos preferit: nord-est (YX);Orden de ejes preferido: norte-est (YX);Preferred axis order: north-east (YX) +urn:ogc:def:crs:EPSG::25829;UTM-29N-ETRS89;;; +ETRS-TM29;UTM-29N-ETRS89;;; +EPSG:23029-1633;UTM-29N-S/IGN;;; +EPSG:32629;UTM-29N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +urn:ogc:def:crs:EPSG::23029;UTM-29N-S/IGN;;; +EPSG:25830;UTM-30N-ETRS89;Ordre d'eixos preferit: est-nord (XY);Orden de ejes preferido: est-norte (XY);Preferred axis order: east-north (XY) +EPSG:3042;UTM-30N-ETRS89;Ordre d'eixos preferit: nord-est (YX);Orden de ejes preferido: norte-est (YX);Preferred axis order: north-east (YX) +urn:ogc:def:crs:EPSG::25830;UTM-30N-ETRS89;;; +ETRS-TM30;UTM-30N-ETRS89;;; +EPSG:23030;UTM-30N-S/IGN;;; +EPSG:23030-0000;UTM-30N-ABDF;Transformació per defecte segons https://epsg.io/23030-to-4326;Transformación por defecto según https://epsg.io/23030-to-4326;Default transformation according to https://epsg.io/23030-to-4326 +EPSG:23030-15933;UTM-30N-IP;;; +EPSG:23030-1631;UTM-30N-Balearic;;; +EPSG:23030-1635;UTM-30N-NW_IP;;; +EPSG:23030-1145;UTM-30N-PS;;; +EPSG:23030-1633;UTM-30N-S/IGN;;; +urn:ogc:def:crs:EPSG::23030;UTM-30N-S/IGN;;; +UTM-30N;UTM-30N-S/IGN;;; +EPSG:32630;UTM-30N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25831;UTM-31N-ETRS89;Ordre d'eixos preferit: est-nord (XY);Orden de ejes preferido: est-norte (XY);Preferred axis order: east-north (XY) +EPSG:3043;UTM-31N-ETRS89;Ordre d'eixos preferit: nord-est (YX);Orden de ejes preferido: norte-est (YX);Preferred axis order: north-east (YX) +urn:ogc:def:crs:EPSG::25831;UTM-31N-ETRS89;;; +ETRS-TM31;UTM-31N-ETRS89;;; +UTM-31N;UTM-31N-UB/ICC;;; +UTM-31N-ED50;UTM-31N-UB/ICC;;; +EPSG:23031;UTM-31N-UB/ICC;Excepcionalment no es fa correspondre a UTM-31N-ABDF (=https://epsg.io/23031) sinó a UTM-31N-UB/ICC per compatibilitat descendent;Excepcionalmente no se hace corresponder a UTM-31N-ABDF (=https://epsg.io/23031) sino a UTM-31N-UB/ICC por compatibilidad descendente;Exceptionally it does not correspond to UTM-31N-ABDF (=https://epsg.io/23031) but to UTM-31N-UB/ICC for backwards compatibility +EPSG:23031-0000;UTM-31N-ABDF;Transformació per defecte segons https://epsg.io/23031-to-4326;Transformación por defecto según https://epsg.io/23031-to-4326;Default transformation according to https://epsg.io/23031-to-4326 +urn:ogc:def:crs:EPSG::23031;UTM-31N-UB/ICC;;; +EPSG:32631;UTM-31N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25832;UTM-32N-ETRS89;;; +ETRS-TM32;UTM-32N-ETRS89;;; +EPSG:32632;UTM-32N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25833;UTM-33N-ETRS89;;; +ETRS-TM33;UTM-33N-ETRS89;;; +EPSG:32633;UTM-33N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:22033;UTM-33S-Camacupa1980;;; +EPSG:32733;UTM-33S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25834;UTM-34N-ETRS89;;; +ETRS-TM34;UTM-34N-ETRS89;;; +EPSG:2100;UTM-34N-GGRS87;;; +EPSG:32634;UTM-34N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:25835;UTM-35N-ETRS89;;; +ETRS-TM35;UTM-35N-ETRS89;;; +EPSG:32635;UTM-35N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +ETRS-TM36;UTM-36N-ETRS89;;; +EPSG:25836;UTM-36N-ETRS89;;; +EPSG:32636;UTM-36N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:2736;UTM-36S-Tete-MZ;;; +EPSG:32736;UTM-36S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +ETRS-TM37;UTM-37N-ETRS89;;; +EPSG:25837;UTM-37N-ETRS89;;; +EPSG:32637;UTM-37N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +ETRS-TM38;UTM-38N-ETRS89;;; +EPSG:25838;UTM-38N-ETRS89;;; +ETRS-TM39;UTM-39N-ETRS89;;; +EPSG:32642;UTM-42N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32643;UTM-43N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32644;UTM-44N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32647;UTM-47N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32659;UTM-59N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32759;UTM-59S-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:32606;UTM-6N-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4218;lat/long-Bogota;;; +EPSG:4149;lat/long-CH1903;;; +EPSG:4230-1145;lat/long-ED50-PS;;; +EPSG:4230-1633;lat/long-ED50-S/IGN;;; +EPSG:4230-0000;lat/long-ED50-ABDF;Transformació per defecte segons https://epsg.io/4230-to-4326;Transformación por defecto según https://epsg.io/4230-to-4326;Default transformation according to https://epsg.io/4230-to-4326 +EPSG:4230;lat/long-ED50-UB/ICC;;; +lat/long-ED50;lat/long-ED50-UB/ICC;;; +EPSG:4258;lat/long-ETRS89;;; +EPSG:4686;lat/long-MAGNA;;; +EPSG:4903;lat/long-Madrid1870;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4261;lat/long-Merchich;;; +EPSG:4267;lat/long-NAD27-BC;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +EPSG:4269;lat/long-NAD83-AA;;; +EPSG:4275;lat/long-NTF;;; +EPSG:4190;lat/long-PosGAR98;;; +EPSG:4081;lat/long-REGCAN95;;; +EPSG:5527;lat/long-SAD69-CH;;; +EPSG:4124;lat/long-Sweden-RT90;;; +EPSG:4127;lat/long-Tete-MZ;;; +EPSG:4326;lat/long-WGS84;Sense paràmetres TOWGS84 a https://epsg.io/;Sin parámetros TOWGS84 en https://epsg.io/;No TOWGS84 parameters at https://epsg.io/ +urn:ogc:def:crs:EPSG::4326;lat/long-WGS84;;; +CRS:84;lat/long-WGS84;;; +Equirectangular;lat/long-WGS84;;; +lat/long;lat/long-WGS84;;; +urn:ogc:def:crs:OGC:1.3:CRS84;lat/long-WGS84;;; +EPSG:9377;Transverse-Mercator_Colombia_ONacional;;; +MAGNA-SIRGAS / Origen-Nacional;Transverse-Mercator_Colombia_ONacional;https://origen.igac.gov.co/herramientas.html;https://origen.igac.gov.co/herramientas.html;https://origen.igac.gov.co/herramientas.html diff --git a/ogr/ogrsf_frmts/miramon/mm_constants.h b/ogr/ogrsf_frmts/miramon/mm_constants.h new file mode 100644 index 000000000000..dbef6343ccd4 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_constants.h @@ -0,0 +1,173 @@ +#ifndef __MM_CONSTANTS_H +#define __MM_CONSTANTS_H +/* -------------------------------------------------------------------- */ +/* Constants used in GDAL and in MiraMon */ +/* -------------------------------------------------------------------- */ +#ifdef GDAL_COMPILATION +CPL_C_START // Necessary for compiling C in GDAL project +#else +#ifndef UINT32_MAX +#define UINT32_MAX _UI32_MAX +#endif +#endif // GDAL_COMPILATION + +#define MM_OFFSET_BYTESxCAMP_CAMP_CLASSIC 16 +#define MM_OFFSET_BYTESxCAMP_CAMP_ESPECIAL 21 +#define MM_MAX_LON_RESERVAT_1_CAMP_BD_XP 4 +#define MM_OFFSET_RESERVAT2_BYTESxCAMP_CAMP_ESPECIAL 3 +#define MM_OFFSET_RESERVAT2_OFFSET_NOM_ESTES 7 +#define MM_OFFSET_RESERVED2_EXTENDED_NAME_SIZE 11 +#define MM_MAX_LON_RESERVAT_2_CAMP_BD_XP 13 + +#define MM_ES_DBF_ESTESA(dbf_version) \ + (((dbf_version) == MM_MARCA_VERSIO_1_DBF_ESTESA) ? TRUE : FALSE) + +#define MM_UNDEFINED_STATISTICAL_VALUE (2.9E+301) +#define MM_CPL_PATH_BUF_SIZE 2048 + +// BIT 1 +#define MM_BIT_1_ON 0x02 // Generated using MiraMon +// BIT 3 +#define MM_BIT_3_ON 0x08 // Multipolygon +// BIT 4 +#define MM_BIT_4_ON 0x10 // 3D +// BIT 5 +#define MM_BIT_5_ON \ + 0x20 // Explicital polygons (every polygon has only one arc) + +#define MM_CREATED_USING_MIRAMON MM_BIT_1_ON +#define MM_LAYER_MULTIPOLYGON MM_BIT_3_ON +#define MM_LAYER_3D_INFO MM_BIT_4_ON + +#define MM_BOOLEAN char +#define MM_HANDLE void * + +#define MM_MESSAGE_LENGTH 512 +#define MM_MAX_BYTES_FIELD_DESC 360 +#define MM_MAX_BYTES_IN_A_FIELD_EXT (UINT32_MAX - 1) +#define MM_MAX_LON_FIELD_NAME_DBF 129 +#define MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF 11 +#define MM_MAX_LON_UNITATS 66 +#define MM_MAX_LON_UNITATS_CAMP MM_MAX_LON_UNITATS + +// Determines if an arc is external, the last one in a ring or +// if it has to be inverted to be consistent with other arcs +// in the ring. +#define MM_POL_EXTERIOR_SIDE 0x01 +#define MM_POL_END_RING 0x02 +#define MM_POL_REVERSE_ARC 0x04 + +// Z Part +#define MM_SELECT_FIRST_COORDZ 0 +#define MM_SELECT_HIGHEST_COORDZ 1 +#define MM_SELECT_LOWEST_COORDZ 2 + +#define MM_STRING_HIGHEST_ALTITUDE 0x0001 +#define MM_STRING_LOWEST_ALTITUDE 0x0002 + +#define /*double*/ MM_NODATA_COORD_Z (-1.0E+300) + +// General static variables +#define MM_MAX_LEN_LAYER_NAME 255 +#define MM_MAX_LEN_LAYER_IDENTIFIER 255 + +#define MM_TYPICAL_NODE 0 +#define MM_LINE_NODE 1 +#define MM_RING_NODE 2 +#define MM_FINAL_NODE 3 + +#define MM_MAX_ID_SNY 41 + +#ifndef GDAL_COMPILATION + typedef unsigned int uint32_t; +typedef int int32_t; +#endif + +// Extended DBF +// Type of the number of records of an extended DBF +#define MM_MAX_N_CAMPS_DBF_CLASSICA 255 +#define MM_MAX_AMPLADA_CAMP_C_DBF_CLASSICA 254 + +#define MM_MARCA_VERSIO_1_DBF_ESTESA 0x90 +#define MM_MARCA_DBASE4 0x03 +#define MM_MAX_LON_RESERVAT_1_BASE_DADES_XP 2 +#define MM_MAX_LON_DBF_ON_A_LAN_BASE_DADES_XP 12 +#define MM_MAX_LON_RESERVAT_2_BASE_DADES_XP 2 + +#define MM_MAX_LON_DESCRIPCIO_CAMP_DBF MM_CPL_PATH_BUF_SIZE + 100 +#define MM_NUM_IDIOMES_MD_MULTIDIOMA 4 + +#define MM_CAMP_NO_MOSTRABLE 0 +#define MM_CAMP_MOSTRABLE 1 +#define MM_CAMP_MOSTRABLE_QUAN_TE_CONTINGUT 4 +#define MM_CAMP_QUE_MOSTRA_TESAURE 2 +#define MM_CAMP_QUE_MOSTRA_TAULA_BDXP_O_BDODBC 3 + +#define MM_CAMP_INDETERMINAT 0 +#define MM_CATEGORICAL_FIELD 1 +#define MM_CAMP_ORDINAL 2 +#define MM_QUANTITATIVE_CONTINUOUS_FIELD 3 + +#define MM_CAMP_NO_SIMBOLITZABLE 0 +#define MM_CAMP_SIMBOLITZABLE 1 + +#define MM_NO_ES_CAMP_GEOTOPO 0 +#define MM_CAMP_ES_ID_GRAFIC 1 +#define MM_CAMP_ES_MAPA_X 2 +#define MM_CAMP_ES_MAPA_Y 3 +#define MM_CAMP_ES_MAPA_Z 17 +#define MM_CAMP_ES_N_VERTEXS 4 +#define MM_CAMP_ES_LONG_ARC 5 +#define MM_CAMP_ES_LONG_ARCE 6 +#define MM_CAMP_ES_NODE_INI 7 +#define MM_CAMP_ES_NODE_FI 8 +#define MM_CAMP_ES_ARCS_A_NOD 9 +#define MM_CAMP_ES_TIPUS_NODE 10 +#define MM_CAMP_ES_PERIMETRE 11 +#define MM_CAMP_ES_PERIMETREE 12 +#define MM_CAMP_ES_PERIMETRE_3D 18 +#define MM_CAMP_ES_AREA 13 +#define MM_CAMP_ES_AREAE 14 +#define MM_CAMP_ES_AREA_3D 19 +#define MM_CAMP_ES_N_ARCS 15 +#define MM_CAMP_ES_N_POLIG 16 +#define MM_CAMP_ES_PENDENT 20 +#define MM_CAMP_ES_ORIENTACIO 21 + +#define MM_JOC_CARAC_ANSI_MM 1252 +#define MM_JOC_CARAC_ANSI_DBASE 0x58 +#define MM_JOC_CARAC_OEM850_MM 850 +#define MM_JOC_CARAC_OEM850_DBASE 0x14 +#define MM_JOC_CARAC_UTF8_DBF 0xFF +#define MM_JOC_CARAC_UTF8_MM 8 + +typedef unsigned char MM_BYTE; + +#define MM_PRIMER_OFFSET_a_OFFSET_1a_FITXA 8 +#define MM_SEGON_OFFSET_a_OFFSET_1a_FITXA 30 + +#define MM_FIRST_OFFSET_to_N_RECORDS 4 +#define MM_SECOND_OFFSET_to_N_RECORDS 16 + +#define MM_FIELD_NAME_TOO_LONG 0x01 +#define MM_FIELD_NAME_CHARACTER_INVALID 0x02 +#define MM_FIELD_NAME_FIRST_CHARACTER_ 0x04 + +#define MM_MAX_AMPLADA_CAMP_N_DBF 21 +#define MM_MAX_AMPLADA_CAMP_C_DBF 254 +#define MM_MAX_AMPLADA_CAMP_D_DBF 10 + +#define MM_PRIVATE_POINT_DB_FIELDS 1 +#define MM_PRIVATE_ARC_DB_FIELDS 5 +#define MM_PRIVATE_POLYGON_DB_FIELDS 6 + +#define MM_NOU_N_DECIMALS_NO_APLICA 0 +#define MM_APLICAR_NOU_N_DECIMALS 1 +#define MM_NOMES_DOCUMENTAR_NOU_N_DECIMALS 2 +#define MM_PREGUNTA_SI_APLICAR_NOU_N_DECIM 3 +#define MM_CHARACTERS_DOUBLE 40 + +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif +#endif //__MM_CONSTANTS_H diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_constants.h b/ogr/ogrsf_frmts/miramon/mm_gdal_constants.h new file mode 100644 index 000000000000..f42878f7d088 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_constants.h @@ -0,0 +1,97 @@ +#ifndef __MM_GDAL_CONSTANTS_H +#define __MM_GDAL_CONSTANTS_H +/* -------------------------------------------------------------------- */ +/* Constants used in GDAL and in MiraMon */ +/* -------------------------------------------------------------------- */ +#ifndef GDAL_COMPILATION +#ifdef _WIN64 +#include "gdal\release-1911-x64\cpl_port.h" // For GUInt64 +#else +#include "gdal\release-1911-32\cpl_port.h" // For GUInt64 +#endif +#else +#include "cpl_port.h" // For GUInt64 +CPL_C_START // Necessary for compiling C in GDAL project +#endif // GDAL_COMPILATION + +#if defined(_WIN32) && !defined(strcasecmp) +#define strcasecmp stricmp +#endif + +#define MAX_LOCAL_MESSAGE 5000 + +#define sprintf_UINT64 "%llu" + +// Type of the Feature ID: determines the maximum number of features in a layer. +typedef GUInt64 MM_INTERNAL_FID; +// Offset to the coordinates of the Features. +typedef GUInt64 MM_FILE_OFFSET; + +// Type of the coordinates of a Point, Arc or Polygons points. +typedef double MM_COORD_TYPE; + +// Points + +// StringLines (or Arcs) +typedef GUInt64 MM_N_VERTICES_TYPE; // size_t in MiraMon + +// Polygons (or polypolygons) +typedef GUInt64 MM_POLYGON_ARCS_COUNT; +typedef GUInt64 MM_POLYGON_RINGS_COUNT; + +// Z Part +typedef int MM_SELEC_COORDZ_TYPE; + +// Extended DBF +// Type of the number of fields of an extended DBF +typedef GUInt32 + MM_EXT_DBF_N_FIELDS; //(TIPUS_NUMERADOR_CAMP in MiraMon internal code) +#define MM_MAX_EXT_DBF_N_FIELDS_TYPE UINT32_MAX + +// In MiraMon code: MM_TIPUS_BYTES_PER_CAMP_DBF +typedef GUInt32 MM_BYTES_PER_FIELD_TYPE_DBF; + +// In MiraMon code: MM_TIPUS_BYTES_ACUMULATS_DBF +typedef GUInt32 MM_ACCUMULATED_BYTES_TYPE_DBF; + +// Type of the number of records of an extended DBF +typedef GUInt32 MM_EXT_DBF_N_MULTIPLE_RECORDS; +typedef GUInt64 MM_EXT_DBF_N_RECORDS; +typedef GInt64 MM_EXT_DBF_SIGNED_N_RECORDS; +#define scanf_MM_EXT_DBF_SIGNED_N_RECORDS "%lld" +typedef GInt32 MM_FIRST_RECORD_OFFSET_TYPE; + +typedef GInt32 MM_N_HEIGHT_TYPE; + +#define MM_ARC_HEIGHT_FOR_EACH_VERTEX \ + 1 // In MiraMon code: MM_ARC_ALCADA_PER_CADA_VERTEX +#define MM_ARC_CONSTANT_HEIGHT -1 // In MiraMon code: MM_ARC_ALCADA_CONSTANT +// In MiraMon code: MM_ARC_TIPUS_ALCADA +#define MM_ARC_HEIGHT_TYPE(n) \ + (((n) < 0) ? MM_ARC_CONSTANT_HEIGHT : MM_ARC_HEIGHT_FOR_EACH_VERTEX) +// In MiraMon code: MM_ARC_N_ALCADES +#define MM_ARC_N_HEIGHTS(n) (((n) < 0) ? -(n) : (n)) +// In MiraMon code: MM_ARC_N_TOTAL_ALCADES_DISC +#define MM_ARC_TOTAL_N_HEIGHTS_DISK(n, n_vrt) \ + (((n) < 0) ? -(n) : (n) * (MM_N_HEIGHT_TYPE)(n_vrt)) + +#define MM_EscriuOffsetNomEstesBD_XP(bd_xp, i_camp, offset_nom_camp) \ + memcpy((bd_xp)->pField[(i_camp)].reserved_2 + \ + MM_OFFSET_RESERVAT2_OFFSET_NOM_ESTES, \ + &(offset_nom_camp), 4) + +enum MM_TipusNomCamp +{ + NM_CLASSICAL_DBF_AND_VALID_NAME = 0, + MM_DBF_NAME_LOWERCASE_AND_VALID, + MM_VALID_EXTENDED_DBF_NAME, + MM_DBF_NAME_NO_VALID +}; + +#define MM_DonaBytesNomEstesCamp(camp) \ + ((MM_BYTE)((camp)->reserved_2[MM_OFFSET_RESERVED2_EXTENDED_NAME_SIZE])) + +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif +#endif //__MM_GDAL_CONSTANTS_H diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_driver_structs.h b/ogr/ogrsf_frmts/miramon/mm_gdal_driver_structs.h new file mode 100644 index 000000000000..5ae905329dd4 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_driver_structs.h @@ -0,0 +1,826 @@ +#ifndef __MM_GDAL_DRIVER_STRUCTS_H +#define __MM_GDAL_DRIVER_STRUCTS_H +/* -------------------------------------------------------------------- */ +/* Necessary functions to read/write a MiraMon Vector File */ +/* -------------------------------------------------------------------- */ + +#ifdef GDAL_COMPILATION +#include "mm_gdal_constants.h" +#include "mm_gdal_structures.h" + +CPL_C_START // Necessary for compiling in GDAL project +#else +#include <stdio.h> // For FILE +#include "mm_constants.h" +#include "mm_gdal\mm_gdal_structures.h" +#endif + +// For MetaData +#define SECTION_VERSIO "VERSIO" +#define KEY_Vers "Vers" +#define KEY_SubVers "SubVers" +#define MM_VERS 4 +#define MM_SUBVERS 3 +#define KEY_VersMetaDades "VersMetaDades" +#define KEY_SubVersMetaDades "SubVersMetaDades" +#define MM_VERS_METADADES 5 +#define MM_SUBVERS_METADADES 0 +#define SECTION_METADADES "METADADES" +#define KEY_FileIdentifier "FileIdentifier" +#define SECTION_IDENTIFICATION "IDENTIFICATION" +#define KEY_code "code" +#define KEY_codeSpace "codeSpace" +#define KEY_DatasetTitle "DatasetTitle" +#define SECTION_OVERVIEW "OVERVIEW" +#define SECTION_OVVW_ASPECTES_TECNICS "OVERVIEW:ASPECTES_TECNICS" +#define KEY_ArcSource "ArcSource" +#define SECTION_EXTENT "EXTENT" +#define KEY_toler_env "toler_env" +#define KEY_MinX "MinX" +#define KEY_MaxX "MaxX" +#define KEY_MinY "MinY" +#define KEY_MaxY "MaxY" +#define KEY_CreationDate "CreationDate" +#define SECTION_SPATIAL_REFERENCE_SYSTEM "SPATIAL_REFERENCE_SYSTEM" +#define SECTION_HORIZONTAL "HORIZONTAL" +#define KEY_HorizontalSystemIdentifier "HorizontalSystemIdentifier" +#define SECTION_TAULA_PRINCIPAL "TAULA_PRINCIPAL" +#define KEY_IdGrafic "IdGrafic" +#define KEY_TipusRelacio "TipusRelacio" +#define KEY_descriptor "descriptor" +#define KEY_HorizontalSystemDefinition "HorizontalSystemDefinition" +#define KEY_unitats "unitats" +#define KEY_unitatsY "unitatsY" +#define KEY_language "language" +#define KEY_Value_eng "eng" +#define KEY_MDIdiom "MDIdiom" +#define KEY_characterSet "characterSet" +#define KEY_Value_characterSet "006" + +// MiraMon feature field names +#define szMMNomCampIdGraficDefecte "ID_GRAFIC" +#define szMMNomCampPerimetreDefecte "PERIMETRE" +#define szMMNomCampAreaDefecte "AREA" +#define szMMNomCampLongitudArcDefecte "LONG_ARC" +#define szMMNomCampNodeIniDefecte "NODE_INI" +#define szMMNomCampNodeFiDefecte "NODE_FI" +#define szMMNomCampArcsANodeDefecte "ARCS_A_NOD" +#define szMMNomCampTipusNodeDefecte "TIPUS_NODE" +#define szMMNomCampNVertexsDefecte "N_VERTEXS" +#define szMMNomCampNArcsDefecte "N_ARCS" +#define szMMNomCampNPoligonsDefecte "N_POLIG" + +#define MAX_RELIABLE_SF_DOUBLE \ + 15 // Maximum nr. of reliable significant figures in any double. + +// Initial width of MiraMon fields +#define MM_MIN_WIDTH_ID_GRAFIC 3 +#define MM_MIN_WIDTH_N_VERTEXS 5 +#define MM_MIN_WIDTH_INITIAL_NODE MM_MIN_WIDTH_ID_GRAFIC + 1 +#define MM_MIN_WIDTH_FINAL_NODE MM_MIN_WIDTH_ID_GRAFIC + 1 +#define MM_MIN_WIDTH_ARCS_TO_NODE 1 +#define MM_MIN_WIDTH_LONG 14 // For LONG_ARC and PERIMETRE +#define MM_MIN_WIDTH_AREA 19 // For LONG_ARC and PERIMETRE + +#define MM_MIN_WIDTH_N_ARCS 2 +#define MM_MIN_WIDTH_N_POLIG 2 + +// Types of layers in MiraMon +#define MM_LayerType_Unknown 0 // Unknown type, or DBF alone +#define MM_LayerType_Point 1 // Layer of Points +#define MM_LayerType_Point3d 2 // Layer of 3D Points +#define MM_LayerType_Arc 3 // Layer of Arcs +#define MM_LayerType_Arc3d 4 // Layer of 3D Arcs +#define MM_LayerType_Pol 5 // Layer of Polygons +#define MM_LayerType_Pol3d 6 // Layer of 3D Polygons +#define MM_LayerType_Node 7 // Layer of Nodes (internal) +#define MM_LayerType_Raster 8 // Layer of Raster Type + +#define MM_FIRST_NUMBER_OF_POINTS 10000 +#define MM_INCR_NUMBER_OF_POINTS 1000 +#define MM_FIRST_NUMBER_OF_ARCS 10000 +#define MM_INCR_NUMBER_OF_ARCS 1000 +#define MM_FIRST_NUMBER_OF_NODES 20000 // 2*MM_FIRST_NUMBER_OF_ARCS +#define MM_INCR_NUMBER_OF_NODES 2000 +#define MM_FIRST_NUMBER_OF_POLYGONS 10000 +#define MM_INCR_NUMBER_OF_POLYGONS 1000 +#define MM_FIRST_NUMBER_OF_VERTICES 10000 +#define MM_INCR_NUMBER_OF_VERTICES 1000 + +#define MM_1MB 1048576 // 1 MB of buffer + +// Version asked for user +#define MM_UNKNOWN_VERSION 0 +#define MM_LAST_VERSION 1 +#define MM_32BITS_VERSION 2 +#define MM_64BITS_VERSION 3 + +// AddFeature returns +#define MM_CONTINUE_WRITING_FEATURES 0 +#define MM_FATAL_ERROR_WRITING_FEATURES 1 +#define MM_STOP_WRITING_FEATURES 2 + +// Size of the FID (and OFFSETS) in the current version +#define MM_SIZE_OF_FID_4BYTES_VERSION 4 +#define MM_SIZE_OF_FID_8BYTES_VERSION 8 + +/* Different values that first member of every PAL section element can take*/ +#define MM_EXTERIOR_ARC_SIDE 0x01 +#define MM_END_ARC_IN_RING 0x02 +#define MM_ROTATE_ARC 0x04 + +#define ARC_VRT_INICI 0 +#define ARC_VRT_FI 1 + +#define STATISTICAL_UNDEF_VALUE (2.9E+301) + +#define MAXIMUM_OBJECT_INDEX_IN_2GB_VECTORS UINT32_MAX //_UI32_MAX +#define MAXIMUM_OFFSET_IN_2GB_VECTORS UINT32_MAX //_UI32_MAX + +// Number of rings a polygon could have (it is just an initial approximation) +#define MM_MEAN_NUMBER_OF_RINGS 10 + +// Number of coordinates a feature could have (it is just an initial approximation) +#define MM_MEAN_NUMBER_OF_NCOORDS 100 +#define MM_MEAN_NUMBER_OF_COORDS 1000 + +// Initial and increment number of records and fields. +#define MM_INIT_NUMBER_OF_RECORDS 1 +#define MM_INC_NUMBER_OF_RECORDS 5 +#define MM_INIT_NUMBER_OF_FIELDS 20 +#define MM_INC_NUMBER_OF_FIELDS 10 + + enum FieldType { + /*! Numeric Field */ + MM_Numeric = 0, + /*! Character Fi eld */ + MM_Character = 1, + /*! Data Field */ MM_Data = + 2, + /*! Logic Field */ MM_Logic = + 3 + }; + +// Size of disk parts of the MiraMon vector format +// Common header +#define MM_HEADER_SIZE_32_BITS 48 +#define MM_HEADER_SIZE_64_BITS 64 + +// Points +#define MM_SIZE_OF_TL 16 + +// Nodes +#define MM_SIZE_OF_NH_32BITS 8 +#define MM_SIZE_OF_NH_64BITS 12 +#define MM_SIZE_OF_NL_32BITS 4 +#define MM_SIZE_OF_NL_64BITS 8 + +// Arcs +#define MM_SIZE_OF_AH_32BITS 56 +#define MM_SIZE_OF_AH_64BITS 72 +#define MM_SIZE_OF_AL 16 + +// Polygons +#define MM_SIZE_OF_PS_32BITS 8 +#define MM_SIZE_OF_PS_64BITS 16 +#define MM_SIZE_OF_PH_32BITS 64 +#define MM_SIZE_OF_PH_64BITS 80 +#define MM_SIZE_OF_PAL_32BITS 5 +#define MM_SIZE_OF_PAL_64BITS 9 + +// 3D part +#define MM_SIZE_OF_ZH 32 +#define MM_SIZE_OF_ZD_32_BITS 24 +#define MM_SIZE_OF_ZD_64_BITS 32 + +// Coordinates +#define MM_SIZE_OF_COORDINATE 16 + +// Recode in DBF's +#define MM_RECODE_UTF8 0 +#define MM_RECODE_ANSI 1 + +// Language in REL files: +// It is the language of the MiraMon generated descriptors. +// Metadata will not be translated but these descriptors are +// generated from scratch and it is good to use a custom language. +#define MM_DEF_LANGUAGE 0 +#define MM_ENG_LANGUAGE 1 // English language +#define MM_CAT_LANGUAGE 2 // Catalan language +#define MM_SPA_LANGUAGE 3 // Spanish language + +/* -------------------------------------------------------------------- */ +/* Structures */ +/* -------------------------------------------------------------------- */ +// Auxiliary structures +struct MMBoundingBox +{ + double dfMinX; + double dfMaxX; + double dfMinY; + double dfMaxY; +}; + +struct MM_POINT_2D +{ + double dfX; + double dfY; +}; + +struct ARC_VRT_STRUCTURE +{ + struct MM_POINT_2D vertice; + MM_BOOLEAN bIniFi; // boolean: 0=initial, 1=final + MM_INTERNAL_FID nIArc; // Internal arc index + MM_INTERNAL_FID nINod; // Internal node index, empty at the beginning */ +}; + +struct MM_VARIABLES_LLEGEIX_POLS +{ + size_t Nomb_Max_Coord; + size_t Bloc_Max_Coord; + size_t Nomb_Max_Coord_Z; + size_t Nomb_Max_avnp; + size_t Nomb_Max_Elem; + size_t Nomb_Max_vora_de_qui; +}; + +struct MM_FLUSH_INFO +{ + size_t nMyDiskSize; + GUInt64 NTimesFlushed; + + // Pointer to an OPEN file where to flush. + FILE_TYPE *pF; + // Offset in the disk where to flush + MM_FILE_OFFSET OffsetWhereToFlush; + + GUInt64 TotalSavedBytes; // Internal use + + // Block where to be saved + size_t SizeOfBlockToBeSaved; + void *pBlockToBeSaved; + + // Block where to save the pBlockToBeSaved or read from + void *pBlockWhereToSaveOrRead; + // Number of full bytes: flushed every time it is needed + GUInt64 nNumBytes; + // Number of bytes allocated: flushed every time it is needed + GUInt64 nBlockSize; + + // Internal Use + MM_FILE_OFFSET CurrentOffset; +}; + +// MIRAMON METADATA +struct MiraMonVectorMetaData +{ + char *szLayerTitle; + char *aLayerName; + char *aArcFile; // Polygon's arc name + int ePlainLT; // Plain layer type (no 3D specified): MM_LayerType_Point, + // MM_LayerType_Arc, MM_LayerType_Node, MM_LayerType_Pol + char *pSRS; // EPSG code of the spatial reference system. + char *pXUnit; // X units if pszSRS is empty. + char *pYUnit; // Y units if pszSRS is empty. If Y units is empty, + // X unit will be assigned as Y unit by default. + + struct MMBoundingBox hBB; // Bounding box of the entire layer + + // Pointer to a Layer DataBase, used to create MiraMon DBF (extended) file. + struct MiraMonDataBase *pLayerDB; + + // Language in REL files: + // It is the language of the MiraMon generated descriptors. + // Metadata will not be translated but these descriptors are + // generated from scratch and it is good to use a custom language. + char nMMLanguage; +}; + +// MIRAMON DATA BASE +#define MM_GRAPHICAL_ID_INIT_SIZE 5 +#define MM_N_VERTEXS_INIT_SIZE 12 +#define MM_LONG_ARC_INIT_SIZE 12 +#define MM_LONG_ARC_DECIMALS_SIZE 6 +#define MM_NODE_INI_INIT_SIZE 5 +#define MM_NODE_FI_INIT_SIZE 5 + +#define MM_PERIMETRE_INIT_SIZE 13 +#define MM_PERIMETRE_DECIMALS_SIZE 6 +#define MM_AREA_INIT_SIZE 14 +#define MM_AREA_DECIMALS_SIZE 6 + +#define MM_N_ARCS_INIT_SIZE 3 +#define MM_N_ARCS_DECIMALS_SIZE 3 + +#define MM_ARCS_A_NOD_INIT_SIZE 1 + +struct MiraMonFieldValue +{ + MM_BOOLEAN bIsValid; // If 1 the value is filled. If 0, there is no value. +#define MM_INIT_STRING_FIELD_VALUE 50000 // Never less than 10 + MM_EXT_DBF_N_FIELDS nNumDinValue; // Size of the reserved string value + char *pDinValue; // Used if MM_MAX_STRING_FIELD_VALUE is not enough + double dValue; // For double and 32 bit integer numeric values + GInt64 iValue; // For 64 bit integer values. + //MM_BOOLEAN kbValue; // For binary values. +}; + +struct MiraMonRecord +{ + MM_EXT_DBF_N_FIELDS nMaxField; // Number of reserved fields + MM_EXT_DBF_N_FIELDS nNumField; // Number of fields + struct MiraMonFieldValue *pField; // Value of the fields. +}; + +struct MiraMonDataBaseField +{ + char pszFieldName[MM_MAX_LON_FIELD_NAME_DBF + 1]; + char pszFieldDescription[MM_MAX_BYTES_FIELD_DESC + 1]; + enum FieldType eFieldType; // See enum FieldType + GUInt32 nFieldSize; // MM_MAX_BYTES_IN_A_FIELD_EXT as maximum + GUInt32 nNumberOfDecimals; // MM_MAX_BYTES_IN_A_FIELD_EXT as maximum + MM_BOOLEAN bIs64BitInteger; // For 64 bits integer fields +}; + +struct MiraMonDataBase +{ + MM_EXT_DBF_N_FIELDS nNFields; + struct MiraMonDataBaseField *pFields; +}; + +struct MMAdmDatabase +{ + // MiraMon table (extended DBF) + // Name of the extended DBF file + char pszExtDBFLayerName[MM_CPL_PATH_BUF_SIZE]; + // Pointer to the extended DBF file + FILE_TYPE *pFExtDBF; + // Pointer to a MiraMon table (auxiliary) + struct MM_DATA_BASE_XP *pMMBDXP; + // How to write all it to disk + struct MM_FLUSH_INFO FlushRecList; + char *pRecList; // Records list // (II mode) + + // Temporary space where to mount the DBF record. + // Reused every time a feature is created + GUInt64 nNumRecordOnCourse; + char *szRecordOnCourse; +}; + +struct MM_ID_GRAFIC_MULTIPLE_RECORD +{ + MM_FILE_OFFSET offset; + MM_EXT_DBF_N_MULTIPLE_RECORDS + nMR; // Determines the number of the list (multiple record) +}; + +// MIRAMON GEOMETRY + +// Top Header section +struct MM_TH +{ + char aLayerVersion[2]; + char aLayerSubVersion; + + char aFileType[3]; // (PNT, ARC, NOD, POL) + + unsigned short int bIs3d; + unsigned short int bIsMultipolygon; // Only apply to polygons + + unsigned char Flag; // 1 byte: defined at DefTopMM.H + struct MMBoundingBox hBB; + MM_INTERNAL_FID nElemCount; // 4/8 bytes depending on the version + // 8/4 reserved bytes depending on the version +}; + +// Z Header (32 bytes) +struct MM_ZH +{ + size_t nMyDiskSize; + // 16 bytes reserved + double dfBBminz; // 8 bytes Minimum Z + double dfBBmaxz; // 8 bytes Maximum Z +}; + +// Z Description +struct MM_ZD +{ + double dfBBminz; // 8 bytes Minimum Z + double dfBBmaxz; // 8 bytes Maximum Z + GInt32 nZCount; // 4 bytes (signed) + // 4 bytes reserved (only in version 2.0) + MM_FILE_OFFSET nOffsetZ; // 4 or 8 bytes depending on the version +}; + +struct MM_ZSection +{ + // Offset where the section begins in disk. It is a precalculated value + // using nElemCount from LayerInfo. TH+n*CL + MM_FILE_OFFSET ZSectionOffset; + struct MM_ZH ZHeader; // (I mode) + + // Number of pZDescription allocated + // nMaxZDescription = nElemCount from LayerInfo + MM_FILE_OFFSET ZDOffset; + size_t nZDDiskSize; + GUInt64 nMaxZDescription; + struct MM_ZD *pZDescription; //(I mode) + + struct MM_FLUSH_INFO FlushZL; + char *pZL; // (II mode) +}; + +// Header of Arcs +struct MM_AH +{ + struct MMBoundingBox dfBB; + MM_N_VERTICES_TYPE nElemCount; // 4/8 bytes depending on the version + MM_FILE_OFFSET nOffset; // 4/8 bytes depending on the version + MM_INTERNAL_FID nFirstIdNode; // 4/8 bytes depending on the version + MM_INTERNAL_FID nLastIdNode; // 4/8 bytes depending on the version + double dfLength; +}; + +// Header of Nodes +struct MM_NH +{ + short int nArcsCount; + char cNodeType; + // 1 reserved byte + MM_FILE_OFFSET nOffset; // 4/8 bytes depending on the version +}; + +// Header of Polygons +struct MM_PH +{ + // Common Arc & Polyons section + struct MMBoundingBox dfBB; + MM_POLYGON_ARCS_COUNT nArcsCount; // 4/8 bytes depending on the version + MM_POLYGON_RINGS_COUNT + nExternalRingsCount; // 4/8 bytes depending on the version + MM_POLYGON_RINGS_COUNT nRingsCount; // 4/8 bytes depending on the version + MM_FILE_OFFSET nOffset; // 4/8 bytes depending on the version + double dfPerimeter; + double dfArea; + //struct GEOMETRIC_I_TOPOLOGIC_POL GeoTopoPol; +}; + +struct MM_PAL_MEM +{ + unsigned char VFG; + MM_INTERNAL_FID nIArc; // 4/8 bytes depending on the version +}; + +/* Every MiraMon file is composed as is specified in documentation. + Here are the structures to every file where we can find two ways + of keeping the information in memory (to be, finally, flushed to the disk) + * (I mode) Pointers to structs that keep information that changes every + time a feature is added. They will be written at the end to the disk. + * (II mode) Memory blocks that are used as buffer blocks to store + information that is going to be flushed (as are) to the disk + periodically instead of writing them to the disk every time a Feature + is added (not efficient). The place where they are going to be flushed + depends on one variable: the number of elements of the layer. +*/ + +// MiraMon Point Layer: TH, List of CL (coordinates), ZH, ZD, ZL +struct MiraMonPointLayer +{ + // Name of the layer with extension + char pszLayerName[MM_CPL_PATH_BUF_SIZE]; + FILE_TYPE *pF; + + // Coordinates x,y of the points + struct MM_FLUSH_INFO FlushTL; + char *pTL; // (II mode) + char pszTLName[MM_CPL_PATH_BUF_SIZE]; // Temporary file where to flush + FILE_TYPE *pFTL; // Pointer to temporary file where to flush + + // Z section + // Temporary file where the Z coordinates are stored + // if necessary + char psz3DLayerName[MM_CPL_PATH_BUF_SIZE]; + FILE_TYPE *pF3d; + struct MM_ZSection pZSection; + + // MiraMon table (extended DBF) + struct MMAdmDatabase MMAdmDB; + + // Metadata name + char pszREL_LayerName[MM_CPL_PATH_BUF_SIZE]; +}; + +struct MiraMonNodeLayer +{ + char + pszLayerName[MM_CPL_PATH_BUF_SIZE]; // Name of the layer with extension + FILE_TYPE *pF; + + // Header of every node + GUInt32 nSizeNodeHeader; + MM_INTERNAL_FID nMaxNodeHeader; // Number of pNodeHeader allocated + struct MM_NH *pNodeHeader; // (I mode) + + // NL: arcs confuent to node + struct MM_FLUSH_INFO FlushNL; // (II mode) + char *pNL; // + char pszNLName[MM_CPL_PATH_BUF_SIZE]; // Temporary file where to flush + FILE_TYPE *pFNL; // Pointer to temporary file where to flush + + struct MMAdmDatabase MMAdmDB; + + // Metadata name + char pszREL_LayerName[MM_CPL_PATH_BUF_SIZE]; +}; + +struct MiraMonArcLayer +{ + char + pszLayerName[MM_CPL_PATH_BUF_SIZE]; // Name of the layer with extension + FILE_TYPE *pF; + + // Temporal file where the Z coordinates are stored + // if necessary + char psz3DLayerName[MM_CPL_PATH_BUF_SIZE]; + FILE_TYPE *pF3d; + + // Header of every arc + GUInt32 nSizeArcHeader; + MM_INTERNAL_FID nMaxArcHeader; // Number of allocated pArcHeader + struct MM_AH *pArcHeader; // (I mode) + + // AL Section + struct MM_FLUSH_INFO FlushAL; + unsigned short int nALElementSize; // 16 bytes: 2 doubles (coordinates) + char *pAL; // Arc List // (II mode) + char pszALName[MM_CPL_PATH_BUF_SIZE]; // Temporary file where to flush + FILE_TYPE *pFAL; // Pointer to temporary file where to flush + + // Z section + struct MM_ZSection pZSection; + + // Node layer associated to the arc layer + struct MM_TH TopNodeHeader; + struct MiraMonNodeLayer MMNode; + + // Private data + GUInt64 nMaxArcVrt; // Number of allocated + struct ARC_VRT_STRUCTURE *pArcVrt; + MM_FILE_OFFSET nOffsetArc; // It is an auxiliary offset + + struct MMAdmDatabase MMAdmDB; + + // Metadata name + char pszREL_LayerName[MM_CPL_PATH_BUF_SIZE]; +}; + +struct MiraMonPolygonLayer +{ + char + pszLayerName[MM_CPL_PATH_BUF_SIZE]; // Name of the layer with extension + FILE_TYPE *pF; + + // PS part + struct MM_FLUSH_INFO FlushPS; + unsigned short int nPSElementSize; + char *pPS; // Polygon side (II mode) + char pszPSName[MM_CPL_PATH_BUF_SIZE]; // Temporary file where to flush + FILE_TYPE *pFPS; // Pointer to temporary file where to flush + + // Header of every polygon + MM_INTERNAL_FID nMaxPolHeader; // Number of pPolHeader allocated + unsigned short int nPHElementSize; + struct MM_PH *pPolHeader; // (I mode) + + // PAL + struct MM_FLUSH_INFO FlushPAL; + unsigned short int nPALElementSize; + char *pPAL; // Polygon Arc List // (II mode) + char pszPALName[MM_CPL_PATH_BUF_SIZE]; // Temporary file where to flush + FILE_TYPE *pFPAL; // Pointer to temporary file where to flush + + // Arc layer associated to the arc layer + struct MM_TH TopArcHeader; + struct MiraMonArcLayer MMArc; + + struct MMAdmDatabase MMAdmDB; + + // Metadata name + char pszREL_LayerName[MM_CPL_PATH_BUF_SIZE]; +}; + +/* +#define MM_VECTOR_LAYER_LAST_VERSION 1 +#define CheckMMVectorLayerVersion(a, r) \ + { \ + if ((a)->Version != MM_VECTOR_LAYER_LAST_VERSION) \ + return (r); \ + } +*/ + +// Information that allows to reuse memory stuff when +// features are being read +struct MiraMonFeature +{ + // Number of parts + MM_POLYGON_RINGS_COUNT nNRings; // =1 for lines and points + MM_POLYGON_RINGS_COUNT nIRing; // The ring is being processed + + // Number of reserved elements in *pNCoord (a vector with number of vertices in each ring) + MM_N_VERTICES_TYPE nMaxpNCoordRing; + MM_N_VERTICES_TYPE *pNCoordRing; // [0]=1 for lines and points + + // Number of reserved elements in *pCoord + MM_N_VERTICES_TYPE nMaxpCoord; + // Number of used elements in *pCoord (only for reading features) + MM_N_VERTICES_TYPE nNumpCoord; + // Coordinate index that is being processed + MM_N_VERTICES_TYPE nICoord; + // List of the coordinates of the feature + struct MM_POINT_2D *pCoord; + + // Number of reserved elements in *flag_VFG + MM_INTERNAL_FID nMaxVFG; + char *flag_VFG; // In case of multipolygons, for each ring: + // if flag_VFG[i]|MM_EXTERIOR_ARC_SIDE: outer ring if set + // if flag_VFG[i]|MM_END_ARC_IN_RING: always set (every ring has only + // one arc) + // if flag_VFG[i]|MM_ROTATE_ARC: coordinates are in the inverse order + // of the read ones + + // List of the Z-coordinates (as many as pCoord) + // Number of reserved elements in *pZCoord + MM_N_VERTICES_TYPE nMaxpZCoord; + // Number of used elements in *pZCoord + MM_N_VERTICES_TYPE nNumpZCoord; + MM_COORD_TYPE *pZCoord; + + // Records of the feature + MM_EXT_DBF_N_MULTIPLE_RECORDS nNumMRecords; + // Number of reserved elements in *pRecords + MM_EXT_DBF_N_MULTIPLE_RECORDS nMaxMRecords; + struct MiraMonRecord *pRecords; + + // Number of features just processed (for writing) + MM_INTERNAL_FID nReadFeatures; +}; + +// There is the possibility of creating a map with all layers +// to visualize it with only one click +struct MiraMonVectMapInfo +{ + char pszMapName[MM_CPL_PATH_BUF_SIZE]; + FILE_TYPE *fMMMap; + int nNumberOfLayers; +}; + +// MIRAMON OBJECT: Contains everything +struct MiraMonVectLayerInfo +{ + // Version of the structure + //GUInt32 Version; + + // Version of the layer + // MM_32BITS_LAYER_VERSION: less than 2 Gbyte files + // MM_64BITS_LAYER_VERSION: more than 2 Gbyte files + char LayerVersion; + + // Layer name + char *pszSrcLayerName; + + // Layer title in metadata + char *szLayerTitle; + + // Pointer to the main REL name (do not free it) + char *pszMainREL_LayerName; + +// To know if we are writing or reading +#define MM_READING_MODE 0 // Reading MiraMon layer +#define MM_WRITING_MODE 1 // Writing MiraMon layer + MM_BOOLEAN ReadOrWrite; + + char pszFlags[10]; // To Open the file + unsigned short int bIsPolygon; + unsigned short int bIsArc; // Also 1 in a polygon layer + unsigned short int bIsNode; // Not used in GDAL + unsigned short int bIsPoint; + unsigned short int bIsDBF; // When there is no geometry + + // In writing mode when one of the features is 3D, the MM layer will be 3D, + // but if none of the features are 3D, then the layer will not be 3D. + unsigned short int bIsReal3d; + + // Final number of elements of the layer. + MM_INTERNAL_FID nFinalElemCount; // Real element count after conversion + + // Header of the layer + size_t nHeaderDiskSize; + struct MM_TH TopHeader; + + int eLT; // Type of layer: Point, line or polygon (3D or not) + int bIsBeenInit; // 1 if layer has already been initialized + + // Point layer + struct MiraMonPointLayer MMPoint; + + // Arc layer + struct MiraMonArcLayer MMArc; + + // Polygon layer + struct MiraMonPolygonLayer MMPolygon; + + // Offset used to write features. + MM_FILE_OFFSET OffsetCheck; + + // EPSG code of the spatial reference system. + char *pSRS; + int nSRS_EPSG; // Ref. system if has EPSG code. + + // In GDAL->MiraMon sense: + // Transformed table from input layer to a MiraMon table. + // This table has to be merged with private MiraMon fields to obtain + // a MiraMon extended DBF + struct MiraMonDataBase *pLayerDB; + + // In MiraMon->GDAL sense: + // MiraMon extended DBF header + // In GDAL->MiraMon, used when there is no geometry + struct MM_DATA_BASE_XP *pMMBDXP; + + // In GDAL->MiraMon, used when there is no geometry + struct MMAdmDatabase MMAdmDBWriting; + + // Offset of every FID in the table + MM_BOOLEAN + isListField; // It determines if fields are list or simple (multirecord). + MM_EXT_DBF_N_RECORDS + nMaxN; // Max number of elements in a field features list + struct MM_ID_GRAFIC_MULTIPLE_RECORD *pMultRecordIndex; +// In case of multirecord, if user wants only one Record 'iMultiRecord' +// specifies which one: 0, 1, 2,... or "Last". There is also the "JSON" option +// that writes a serialized JSON array like (``[1,2]``). +#define MM_MULTIRECORD_LAST -1 +#define MM_MULTIRECORD_NO_MULTIRECORD -2 +#define MM_MULTIRECORD_JSON -3 + int iMultiRecord; + + // Charset of DBF files (same for all) when writing it. + // MM_JOC_CARAC_UTF8_DBF + // MM_JOC_CARAC_ANSI_DBASE; + MM_BYTE nCharSet; + + // Language in REL files: + // It is the language of the MiraMon generated descriptors. + // Metadata will not be translated but these descriptors are + // generated from scratch and it is good to use a custom language. + char nMMLanguage; + + // This is used only to write temporary stuff + char szNFieldAux[MM_MAX_AMPLADA_CAMP_N_DBF]; + // Dynamic string that is used as temporary buffer + // with variable size as needed. Its value is + // highly temporary. Copy in a safe place to save its value. + GUInt64 nNumStringToOperate; + char *szStringToOperate; + + // Temporary elements when reading features from MiraMon files + struct MiraMonFeature ReadFeature; + + MM_SELEC_COORDZ_TYPE nSelectCoordz; // MM_SELECT_FIRST_COORDZ + // MM_SELECT_HIGHEST_COORDZ + // MM_SELECT_LOWEST_COORDZ + + // For polygon layers this is an efficient space to read + // the PAL section + MM_POLYGON_ARCS_COUNT nMaxArcs; + MM_POLYGON_ARCS_COUNT nNumArcs; + struct MM_PAL_MEM *pArcs; + + struct MM_FLUSH_INFO FlushPAL; + + struct MiraMonVectMapInfo *MMMap; // Do not free +}; + +enum DataType +{ + MMDTByte, + MMDTInteger, + MMDTuInteger, + MMDTLong, + MMDTReal, + MMDTDouble, + MMDT4bits +}; + +enum TreatmentVariable +{ + MMTVQuantitativeContinuous, + MMTVOrdinal, + MMTVCategorical +}; + +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif +#endif //__MM_GDAL_DRIVER_STRUCTS_H diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c new file mode 100644 index 000000000000..4ce989685b9c --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c @@ -0,0 +1,2906 @@ +/****************************************************************************** + * + * Project: OpenGIS Simple Features Reference Implementation + * Purpose: C MiraMon code adapted to be used in GDAL + * Author: Abel Pau, a.pau@creaf.uab.cat, based on the MiraMon codes, + * mainly written by Xavier Pons, Joan Maso (correctly written + * "Mas0xF3"), Abel Pau, Nuria Julia (N0xFAria Juli0xE0), + * Xavier Calaf, Lluis (Llu0xEDs) Pesquer and Alaitz Zabala, from + * CREAF and Universitat Autonoma (Aut0xF2noma) de Barcelona. + * For a complete list of contributors: + * https://www.miramon.cat/eng/QuiSom.htm + ****************************************************************************** + * Copyright (c) 2024, Xavier Pons + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + ****************************************************************************/ + +#ifdef GDAL_COMPILATION +#include "ogr_api.h" // For CPL_C_START +#include "mm_gdal_functions.h" // For CPLStrlcpy() +#include "mm_wrlayr.h" // For calloc_function()... +#else +#include "CmptCmp.h" +#include "mm_gdal\mm_gdal_functions.h" // For CPLStrlcpy() +#include "mm_gdal\mm_wrlayr.h" // For calloc_function()... +#endif // GDAL_COMPILATION + +#ifdef GDAL_COMPILATION +CPL_C_START // Necessary for compiling in GDAL project +#include "cpl_string.h" // For CPL_ENC_UTF8 +#else +#ifdef _WIN64 +#include "gdal\release-1911-x64\cpl_string.h" // For CPL_ENC_UTF8 +#else +#include "gdal\release-1911-32\cpl_string.h" // For CPL_ENC_UTF8szNumberOfVerticesEsp +#endif +#endif + + char szInternalGraphicIdentifierEng[MM_MAX_IDENTIFIER_SIZE]; +char szInternalGraphicIdentifierCat[MM_MAX_IDENTIFIER_SIZE]; +char szInternalGraphicIdentifierSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szNumberOfVerticesEng[MM_MAX_IDENTIFIER_SIZE]; +char szNumberOfVerticesCat[MM_MAX_IDENTIFIER_SIZE]; +char szNumberOfVerticesSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szLengthOfAarcEng[MM_MAX_IDENTIFIER_SIZE]; +char szLengthOfAarcCat[MM_MAX_IDENTIFIER_SIZE]; +char szLengthOfAarcSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szInitialNodeEng[MM_MAX_IDENTIFIER_SIZE]; +char szInitialNodeCat[MM_MAX_IDENTIFIER_SIZE]; +char szInitialNodeSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szFinalNodeEng[MM_MAX_IDENTIFIER_SIZE]; +char szFinalNodeCat[MM_MAX_IDENTIFIER_SIZE]; +char szFinalNodeSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szNumberOfArcsToNodeEng[MM_MAX_IDENTIFIER_SIZE]; +char szNumberOfArcsToNodeCat[MM_MAX_IDENTIFIER_SIZE]; +char szNumberOfArcsToNodeSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szNodeTypeEng[MM_MAX_IDENTIFIER_SIZE]; +char szNodeTypeCat[MM_MAX_IDENTIFIER_SIZE]; +char szNodeTypeSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szPerimeterOfThePolygonEng[MM_MAX_IDENTIFIER_SIZE]; +char szPerimeterOfThePolygonCat[MM_MAX_IDENTIFIER_SIZE]; +char szPerimeterOfThePolygonSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szAreaOfThePolygonEng[MM_MAX_IDENTIFIER_SIZE]; +char szAreaOfThePolygonCat[MM_MAX_IDENTIFIER_SIZE]; +char szAreaOfThePolygonSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szNumberOfArcsEng[MM_MAX_IDENTIFIER_SIZE]; +char szNumberOfArcsCat[MM_MAX_IDENTIFIER_SIZE]; +char szNumberOfArcsSpa[MM_MAX_IDENTIFIER_SIZE]; + +char szNumberOfElementaryPolygonsEng[MM_MAX_IDENTIFIER_SIZE]; +char szNumberOfElementaryPolygonsCat[MM_MAX_IDENTIFIER_SIZE]; +char szNumberOfElementaryPolygonsSpa[MM_MAX_IDENTIFIER_SIZE]; + +void MM_FillFieldDescriptorByLanguage(void) +{ + CPLStrlcpy(szInternalGraphicIdentifierEng, "Internal Graphic identifier", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szInternalGraphicIdentifierCat, "Identificador Grafic intern", + MM_MAX_IDENTIFIER_SIZE); + *(unsigned char *)&szInternalGraphicIdentifierCat[16] = MM_a_WITH_GRAVE; + CPLStrlcpy(szInternalGraphicIdentifierSpa, "Identificador Grafico interno", + MM_MAX_IDENTIFIER_SIZE); + *(unsigned char *)&szInternalGraphicIdentifierSpa[16] = MM_a_WITH_ACUTE; + + CPLStrlcpy(szNumberOfVerticesEng, "Number of vertices", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNumberOfVerticesCat, "Nombre de vertexs", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNumberOfVerticesSpa, "Numero de vertices", + MM_MAX_IDENTIFIER_SIZE); + *(unsigned char *)&szNumberOfVerticesSpa[1] = MM_u_WITH_ACUTE; + *(unsigned char *)&szNumberOfVerticesSpa[11] = MM_e_WITH_ACUTE; + + CPLStrlcpy(szLengthOfAarcEng, "Length of arc", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szLengthOfAarcCat, "Longitud de l'arc", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szLengthOfAarcSpa, "Longitud del arco", MM_MAX_IDENTIFIER_SIZE); + + CPLStrlcpy(szInitialNodeEng, "Initial node", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szInitialNodeCat, "Node inicial", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szInitialNodeSpa, "Nodo inicial", MM_MAX_IDENTIFIER_SIZE); + + CPLStrlcpy(szFinalNodeEng, "Final node", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szFinalNodeCat, "Node final", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szFinalNodeSpa, "Nodo final", MM_MAX_IDENTIFIER_SIZE); + + CPLStrlcpy(szNumberOfArcsToNodeEng, "Number of arcs to node", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNumberOfArcsToNodeCat, "Nombre d'arcs al node", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNumberOfArcsToNodeSpa, "Numero de arcos al nodo", + MM_MAX_IDENTIFIER_SIZE); + *(unsigned char *)&szNumberOfArcsToNodeSpa[1] = MM_u_WITH_ACUTE; + + CPLStrlcpy(szNodeTypeEng, "Node type", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNodeTypeCat, "Tipus de node", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNodeTypeSpa, "Tipo de nodo", MM_MAX_IDENTIFIER_SIZE); + + CPLStrlcpy(szPerimeterOfThePolygonEng, "Perimeter of the polygon", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szPerimeterOfThePolygonCat, "Perimetre del poligon", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szPerimeterOfThePolygonSpa, "Perimetro del poligono", + MM_MAX_IDENTIFIER_SIZE); + + *(unsigned char *)&szPerimeterOfThePolygonCat[3] = MM_i_WITH_ACUTE; + *(unsigned char *)&szPerimeterOfThePolygonSpa[3] = MM_i_WITH_ACUTE; + *(unsigned char *)&szPerimeterOfThePolygonCat[17] = MM_i_WITH_ACUTE; + *(unsigned char *)&szPerimeterOfThePolygonSpa[17] = MM_i_WITH_ACUTE; + + CPLStrlcpy(szAreaOfThePolygonEng, "Area of the polygon", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szAreaOfThePolygonCat, "Area del poligon", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szAreaOfThePolygonSpa, "Area del poligono", + MM_MAX_IDENTIFIER_SIZE); + + *(unsigned char *)&szAreaOfThePolygonCat[0] = MM_A_WITH_GRAVE; + *(unsigned char *)&szAreaOfThePolygonSpa[0] = MM_A_WITH_ACUTE; + *(unsigned char *)&szAreaOfThePolygonCat[12] = MM_i_WITH_ACUTE; + *(unsigned char *)&szAreaOfThePolygonSpa[12] = MM_i_WITH_ACUTE; + + CPLStrlcpy(szNumberOfArcsEng, "Number of arcs", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNumberOfArcsCat, "Nombre d'arcs", MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNumberOfArcsSpa, "Numero de arcos", MM_MAX_IDENTIFIER_SIZE); + + *(unsigned char *)&szNumberOfArcsSpa[1] = MM_u_WITH_ACUTE; + + CPLStrlcpy(szNumberOfElementaryPolygonsEng, "Number of elementary polygons", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNumberOfElementaryPolygonsCat, "Nombre de poligons elementals", + MM_MAX_IDENTIFIER_SIZE); + CPLStrlcpy(szNumberOfElementaryPolygonsSpa, + "Numero de poligonos elementales", MM_MAX_IDENTIFIER_SIZE); + + *(unsigned char *)&szNumberOfElementaryPolygonsSpa[1] = MM_u_WITH_ACUTE; + *(unsigned char *)&szNumberOfElementaryPolygonsCat[13] = MM_i_WITH_ACUTE; + *(unsigned char *)&szNumberOfElementaryPolygonsSpa[13] = MM_i_WITH_ACUTE; +} + +const char *MM_pszLogFilename = nullptr; + +// Logging +const char *MMLog(const char *pszMsg, int nLineNumber) +{ + FILE *f; + + if (MM_pszLogFilename == nullptr) + return pszMsg; + f = fopen(MM_pszLogFilename, "at"); + if (f == nullptr) + return pszMsg; + fprintf(f, "%d: %s\n", nLineNumber, pszMsg); /*ok*/ + fclose(f); + return pszMsg; +} + +static const char MM_EmptyString[] = {""}; +#define MM_SetEndOfString (*MM_EmptyString) +static const char MM_BlankString[] = {" "}; + +void fclose_and_nullify(FILE_TYPE **pFunc) +{ + if (!pFunc || !(*pFunc)) + return; + fclose_function(*pFunc); + *pFunc = nullptr; +} + +// CREATING AN EXTENDED MIRAMON DBF +void MM_InitializeField(struct MM_FIELD *pField) +{ + memset(pField, '\0', sizeof(*pField)); + pField->FieldType = 'C'; + pField->GeoTopoTypeField = MM_NO_ES_CAMP_GEOTOPO; +} + +#define MM_ACCEPTABLE_NUMBER_OF_FIELDS 20000 + +struct MM_FIELD *MM_CreateAllFields(MM_EXT_DBF_N_FIELDS nFields) +{ + struct MM_FIELD *camp; + MM_EXT_DBF_N_FIELDS i; + + // MiraMon could accept a number of fields 13.4 million + // but GDAL prefers to limit that to 20.000 to avoid + // too large memory allocation attempts with corrupted datasets + if (nFields > MM_ACCEPTABLE_NUMBER_OF_FIELDS) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "More than 20000 fields not accepted"); + return nullptr; + } + +#ifndef FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION + if (nFields >= UINT32_MAX / sizeof(*camp)) + return nullptr; +#else + if (nFields >= (1000U * 1000 * 1000) / sizeof(*camp)) + return nullptr; +#endif + + if ((camp = calloc_function(nFields * sizeof(*camp))) == nullptr) + return nullptr; + + for (i = 0; i < nFields; i++) + MM_InitializeField(camp + i); + return camp; +} + +static struct MM_DATA_BASE_XP *MM_CreateEmptyHeader(MM_EXT_DBF_N_FIELDS nFields) +{ + struct MM_DATA_BASE_XP *data_base_XP; + + if ((data_base_XP = (struct MM_DATA_BASE_XP *)calloc_function( + sizeof(struct MM_DATA_BASE_XP))) == nullptr) + return nullptr; + + if (nFields == 0) + { + ; + } + else + { + data_base_XP->pField = (struct MM_FIELD *)MM_CreateAllFields(nFields); + if (!data_base_XP->pField) + { + free_function(data_base_XP); + return nullptr; + } + } + data_base_XP->nFields = nFields; + return data_base_XP; +} + +struct MM_DATA_BASE_XP *MM_CreateDBFHeader(MM_EXT_DBF_N_FIELDS n_camps, + MM_BYTE charset) +{ + struct MM_DATA_BASE_XP *bd_xp; + struct MM_FIELD *camp; + MM_EXT_DBF_N_FIELDS i; + + if (nullptr == (bd_xp = MM_CreateEmptyHeader(n_camps))) + return nullptr; + + bd_xp->CharSet = charset; + + strcpy(bd_xp->ReadingMode, "a+b"); + + bd_xp->IdGraficField = n_camps; + bd_xp->IdEntityField = MM_MAX_EXT_DBF_N_FIELDS_TYPE; + bd_xp->dbf_version = (MM_BYTE)((n_camps > MM_MAX_N_CAMPS_DBF_CLASSICA) + ? MM_MARCA_VERSIO_1_DBF_ESTESA + : MM_MARCA_DBASE4); + + for (i = 0, camp = bd_xp->pField; i < n_camps; i++, camp++) + { + MM_InitializeField(camp); + if (i < 99999) + snprintf(camp->FieldName, sizeof(camp->FieldName), "CAMP%05u", + (unsigned)(i + 1)); + else + snprintf(camp->FieldName, sizeof(camp->FieldName), "CM%u", + (unsigned)(i + 1)); + camp->FieldType = 'C'; + camp->DecimalsIfFloat = 0; + camp->BytesPerField = 50; + } + return bd_xp; +} + +MM_BYTE MM_DBFFieldTypeToVariableProcessing(MM_BYTE tipus_camp_DBF) +{ + switch (tipus_camp_DBF) + { + case 'N': + return MM_QUANTITATIVE_CONTINUOUS_FIELD; + case 'D': + case 'C': + case 'L': + return MM_CATEGORICAL_FIELD; + } + return MM_CATEGORICAL_FIELD; +} + +static MM_BYTE MM_GetDefaultDesiredDBFFieldWidth(const struct MM_FIELD *camp) +{ + size_t a, b, c, d, e; + + b = strlen(camp->FieldName); + c = strlen(camp->FieldDescription[0]); + + if (camp->FieldType == 'D') + { + d = (b > c ? b : c); + a = (size_t)camp->BytesPerField + 2; + return (MM_BYTE)(a > d ? a : d); + } + a = camp->BytesPerField; + d = (unsigned int)(b > c ? b : c); + e = (a > d ? a : d); + return (MM_BYTE)(e < 80 ? e : 80); +} + +static MM_BOOLEAN MM_is_field_name_lowercase(const char *cadena) +{ + const char *p; + + for (p = cadena; *p; p++) + { + if ((*p >= 'a' && *p <= 'z')) + return TRUE; + } + return FALSE; +} + +static MM_BOOLEAN +MM_Is_classical_DBF_field_name_or_lowercase(const char *cadena) +{ + const char *p; + + for (p = cadena; *p; p++) + { + if ((*p >= 'a' && *p <= 'z') || (*p >= 'A' && *p <= 'Z') || + (*p >= '0' && *p <= '9') || *p == '_') + ; + else + return FALSE; + } + if (cadena[0] == '_') + return FALSE; + return TRUE; +} + +static MM_BOOLEAN +MM_Is_character_valid_for_extended_DBF_field_name(int valor, + int *valor_substitut) +{ + if (valor_substitut) + { + switch (valor) + { + case 32: + *valor_substitut = '_'; + return FALSE; + case 91: + *valor_substitut = '('; + return FALSE; + case 93: + *valor_substitut = ')'; + return FALSE; + case 96: + *valor_substitut = '\''; + return FALSE; + case 127: + *valor_substitut = '_'; + return FALSE; + case 168: + *valor_substitut = '-'; + return FALSE; + } + } + else + { + if (valor < 32 || valor == 91 || valor == 93 || valor == 96 || + valor == 127 || valor == 168) + return FALSE; + } + return TRUE; +} + +static int MM_ISExtendedNameBD_XP(const char *nom_camp) +{ + size_t mida, j; + + mida = strlen(nom_camp); + if (mida >= MM_MAX_LON_FIELD_NAME_DBF) + return MM_DBF_NAME_NO_VALID; + + for (j = 0; j < mida; j++) + { + if (!MM_Is_character_valid_for_extended_DBF_field_name( + (unsigned char)nom_camp[j], nullptr)) + return MM_DBF_NAME_NO_VALID; + } + + if (mida >= MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF) + return MM_VALID_EXTENDED_DBF_NAME; + + if (!MM_Is_classical_DBF_field_name_or_lowercase(nom_camp)) + return MM_VALID_EXTENDED_DBF_NAME; + + if (MM_is_field_name_lowercase(nom_camp)) + return MM_DBF_NAME_LOWERCASE_AND_VALID; + + return NM_CLASSICAL_DBF_AND_VALID_NAME; +} + +static MM_BYTE MM_CalculateBytesExtendedFieldName(struct MM_FIELD *camp) +{ + camp->reserved_2[MM_OFFSET_RESERVED2_EXTENDED_NAME_SIZE] = + (MM_BYTE)strlen(camp->FieldName); + return MM_DonaBytesNomEstesCamp(camp); +} + +static MM_ACCUMULATED_BYTES_TYPE_DBF +MM_CalculateBytesExtendedFieldNames(const struct MM_DATA_BASE_XP *bd_xp) +{ + MM_ACCUMULATED_BYTES_TYPE_DBF bytes_acumulats = 0; + MM_EXT_DBF_N_FIELDS i_camp; + + for (i_camp = 0; i_camp < bd_xp->nFields; i_camp++) + { + if (MM_VALID_EXTENDED_DBF_NAME == + MM_ISExtendedNameBD_XP(bd_xp->pField[i_camp].FieldName)) + bytes_acumulats += + MM_CalculateBytesExtendedFieldName(bd_xp->pField + i_camp); + } + + return bytes_acumulats; +} + +static MM_FIRST_RECORD_OFFSET_TYPE +MM_CalculateBytesFirstRecordOffset(struct MM_DATA_BASE_XP *bd_xp) +{ + if (bd_xp) + return (32 + 32 * bd_xp->nFields + 1 + + MM_CalculateBytesExtendedFieldNames(bd_xp)); + return 0; +} + +static void MM_CheckDBFHeader(struct MM_DATA_BASE_XP *bd_xp) +{ + struct MM_FIELD *camp; + MM_EXT_DBF_N_FIELDS i; + MM_BOOLEAN cal_DBF_estesa = FALSE; + + bd_xp->BytesPerRecord = 1; + for (i = 0, camp = bd_xp->pField; i < bd_xp->nFields; i++, camp++) + { + camp->AccumulatedBytes = bd_xp->BytesPerRecord; + bd_xp->BytesPerRecord += camp->BytesPerField; + if (camp->DesiredWidth == 0) + camp->DesiredWidth = camp->OriginalDesiredWidth = + MM_GetDefaultDesiredDBFFieldWidth(camp); //camp->BytesPerField; + if (camp->FieldType == 'C' && + camp->BytesPerField > MM_MAX_AMPLADA_CAMP_C_DBF_CLASSICA) + cal_DBF_estesa = TRUE; + if (MM_VALID_EXTENDED_DBF_NAME == + MM_ISExtendedNameBD_XP(camp->FieldName)) + cal_DBF_estesa = TRUE; + } + + bd_xp->FirstRecordOffset = MM_CalculateBytesFirstRecordOffset(bd_xp); + + if (cal_DBF_estesa || bd_xp->nFields > MM_MAX_N_CAMPS_DBF_CLASSICA || + bd_xp->nRecords > UINT32_MAX) + bd_xp->dbf_version = (MM_BYTE)MM_MARCA_VERSIO_1_DBF_ESTESA; + else + bd_xp->dbf_version = MM_MARCA_DBASE4; +} + +static void +MM_InitializeOffsetExtendedFieldNameFields(struct MM_DATA_BASE_XP *bd_xp, + MM_EXT_DBF_N_FIELDS i_camp) +{ + memset((char *)(&bd_xp->pField[i_camp].reserved_2) + + MM_OFFSET_RESERVAT2_OFFSET_NOM_ESTES, + 0, 4); +} + +static void +MM_InitializeBytesExtendedFieldNameFields(struct MM_DATA_BASE_XP *bd_xp, + MM_EXT_DBF_N_FIELDS i_camp) +{ + memset((char *)(&bd_xp->pField[i_camp].reserved_2) + + MM_OFFSET_RESERVED2_EXTENDED_NAME_SIZE, + 0, 1); +} + +static short int MM_return_common_valid_DBF_field_name_string(char *cadena) +{ + char *p; + short int error_retornat = 0; + + if (!cadena) + return 0; + //strupr(cadena); + for (p = cadena; *p; p++) + { + (*p) = (char)toupper((unsigned char)*p); + if ((*p >= 'A' && *p <= 'Z') || (*p >= '0' && *p <= '9') || *p == '_') + ; + else + { + *p = '_'; + error_retornat |= MM_FIELD_NAME_CHARACTER_INVALID; + } + } + if (cadena[0] == '_') + { + // To avoid having field names starting by '_' this case is + // substituted by a 0 (not a '\0'). + cadena[0] = '0'; + error_retornat |= MM_FIELD_NAME_FIRST_CHARACTER_; + } + return error_retornat; +} + +static short int MM_ReturnValidClassicDBFFieldName(char *cadena) +{ + size_t long_nom_camp; + short int error_retornat = 0; + + long_nom_camp = strlen(cadena); + if ((long_nom_camp < 1) || + (long_nom_camp >= MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF)) + { + cadena[MM_MAX_LON_FIELD_NAME_DBF - 1] = '\0'; + error_retornat |= MM_FIELD_NAME_TOO_LONG; + } + error_retornat |= MM_return_common_valid_DBF_field_name_string(cadena); + return error_retornat; +} + +static MM_BOOLEAN +MM_CheckClassicFieldNameEqual(const struct MM_DATA_BASE_XP *data_base_XP, + const char *classical_name) +{ + MM_EXT_DBF_N_FIELDS i; + + for (i = 0; i < data_base_XP->nFields; i++) + { + if ((strcasecmp(data_base_XP->pField[i].ClassicalDBFFieldName, + classical_name)) == 0 || + (strcasecmp(data_base_XP->pField[i].FieldName, classical_name)) == + 0) + return TRUE; + } + return FALSE; +} + +static char *MM_GiveNewStringWithCharacterInFront(const char *text, + char character) +{ + char *ptr; + size_t i; + + if (!text) + return nullptr; + + i = strlen(text); + if ((ptr = calloc_function(i + 2)) == nullptr) + return nullptr; + + *ptr = character; + memcpy(ptr + 1, text, i + 1); + return ptr; +} + +static char *MM_SetSubIndexFieldNam(const char *nom_camp, + MM_EXT_DBF_N_FIELDS index, + size_t ampladamax) +{ + char *NomCamp_SubIndex; + char *_subindex; + char subindex[19 + 1]; + size_t sizet_subindex; + size_t sizet_nomcamp; + + NomCamp_SubIndex = calloc_function(ampladamax); + if (!NomCamp_SubIndex) + return nullptr; + + CPLStrlcpy(NomCamp_SubIndex, nom_camp, ampladamax); + NomCamp_SubIndex[ampladamax - 1] = '\0'; + + snprintf(subindex, sizeof(subindex), sprintf_UINT64, (GUInt64)index); + + _subindex = MM_GiveNewStringWithCharacterInFront(subindex, '_'); + if (!_subindex) + { + free_function(NomCamp_SubIndex); + return nullptr; + } + + sizet_subindex = strlen(_subindex); + sizet_nomcamp = strlen(NomCamp_SubIndex); + + if (sizet_nomcamp + sizet_subindex > ampladamax - 1) + memcpy(NomCamp_SubIndex + ((ampladamax - 1) - sizet_subindex), + _subindex, strlen(_subindex)); + else + NomCamp_SubIndex = strcat(NomCamp_SubIndex, _subindex); + + free_function(_subindex); + + return NomCamp_SubIndex; +} + +MM_FIRST_RECORD_OFFSET_TYPE +MM_GiveOffsetExtendedFieldName(const struct MM_FIELD *camp) +{ + MM_FIRST_RECORD_OFFSET_TYPE offset_nom_camp; + + memcpy(&offset_nom_camp, + (char *)(&camp->reserved_2) + MM_OFFSET_RESERVAT2_OFFSET_NOM_ESTES, + 4); + return offset_nom_camp; +} + +int MM_WriteNRecordsMMBD_XPFile(struct MMAdmDatabase *MMAdmDB) +{ + if (!MMAdmDB->pMMBDXP || !MMAdmDB->pFExtDBF) + return 0; + + // Updating number of features in features table + fseek_function(MMAdmDB->pFExtDBF, MM_FIRST_OFFSET_to_N_RECORDS, SEEK_SET); + + if (MMAdmDB->pMMBDXP->nRecords > UINT32_MAX) + { + MMAdmDB->pMMBDXP->dbf_version = MM_MARCA_VERSIO_1_DBF_ESTESA; + } + else + { + MMAdmDB->pMMBDXP->dbf_version = MM_MARCA_DBASE4; + } + + { + GUInt32 nRecords32LowBits = + (GUInt32)(MMAdmDB->pMMBDXP->nRecords & UINT32_MAX); + if (fwrite_function(&nRecords32LowBits, 4, 1, MMAdmDB->pFExtDBF) != 1) + return 1; + } + + fseek_function(MMAdmDB->pFExtDBF, MM_SECOND_OFFSET_to_N_RECORDS, SEEK_SET); + if (MMAdmDB->pMMBDXP->dbf_version == MM_MARCA_VERSIO_1_DBF_ESTESA) + { + /* from 16 to 19, position MM_SECOND_OFFSET_to_N_RECORDS */ + GUInt32 nRecords32HighBits = + (GUInt32)(MMAdmDB->pMMBDXP->nRecords >> 32); + if (fwrite_function(&nRecords32HighBits, 4, 1, MMAdmDB->pFExtDBF) != 1) + return 1; + + /* from 20 to 27 */ + if (fwrite_function(&(MMAdmDB->pMMBDXP->dbf_on_a_LAN), 8, 1, + MMAdmDB->pFExtDBF) != 1) + return 1; + } + else + { + if (fwrite_function(&(MMAdmDB->pMMBDXP->dbf_on_a_LAN), 12, 1, + MMAdmDB->pFExtDBF) != 1) + return 1; + } + + return 0; +} + +static MM_BOOLEAN MM_UpdateEntireHeader(struct MM_DATA_BASE_XP *data_base_XP) +{ + MM_BYTE variable_byte; + MM_EXT_DBF_N_FIELDS i, j = 0; + char zero[11] = {0}; + const MM_BYTE byte_zero = 0; + char ModeLectura_previ[4] = ""; + MM_FIRST_RECORD_OFFSET_TYPE bytes_acumulats; + MM_BYTE name_size; + int estat; + char nom_camp[MM_MAX_LON_FIELD_NAME_DBF]; + size_t retorn_fwrite; + MM_BOOLEAN table_should_be_closed = FALSE; + + if (data_base_XP->pfDataBase == nullptr) + { + strcpy(ModeLectura_previ, data_base_XP->ReadingMode); + strcpy(data_base_XP->ReadingMode, "wb"); + + if ((data_base_XP->pfDataBase = + fopen_function(data_base_XP->szFileName, + data_base_XP->ReadingMode)) == nullptr) + { + return FALSE; + } + + table_should_be_closed = TRUE; + } + + if ((data_base_XP->nFields) > MM_MAX_N_CAMPS_DBF_CLASSICA) + data_base_XP->dbf_version = MM_MARCA_VERSIO_1_DBF_ESTESA; + else if ((data_base_XP->nRecords) > UINT32_MAX) + data_base_XP->dbf_version = MM_MARCA_VERSIO_1_DBF_ESTESA; + else + { + if (data_base_XP->dbf_version == MM_MARCA_VERSIO_1_DBF_ESTESA) + data_base_XP->dbf_version = MM_MARCA_DBASE4; + for (i = 0; i < data_base_XP->nFields; i++) + { + if (data_base_XP->pField[i].FieldType == 'C' && + data_base_XP->pField[i].BytesPerField > + MM_MAX_AMPLADA_CAMP_C_DBF_CLASSICA) + { + data_base_XP->dbf_version = MM_MARCA_VERSIO_1_DBF_ESTESA; + break; + } + if (MM_VALID_EXTENDED_DBF_NAME == + MM_ISExtendedNameBD_XP(data_base_XP->pField[i].FieldName)) + { + data_base_XP->dbf_version = MM_MARCA_VERSIO_1_DBF_ESTESA; + break; + } + } + } + + // Writing header + fseek_function(data_base_XP->pfDataBase, 0, SEEK_SET); + + /* Byte 0 */ + if (fwrite_function(&(data_base_XP->dbf_version), 1, 1, + data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + + /* MM_BYTE from 1 to 3 */ + variable_byte = (MM_BYTE)(data_base_XP->year - 1900); + if (fwrite_function(&variable_byte, 1, 1, data_base_XP->pfDataBase) != 1) + return FALSE; + if (fwrite_function(&(data_base_XP->month), 1, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + if (fwrite_function(&(data_base_XP->day), 1, 1, data_base_XP->pfDataBase) != + 1) + return FALSE; + + /* from 4 a 7, position MM_FIRST_OFFSET_to_N_RECORDS */ + { + GUInt32 nRecords32LowBits = + (GUInt32)(data_base_XP->nRecords & UINT32_MAX); + if (fwrite_function(&nRecords32LowBits, 4, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + } + + /* from 8 a 9, position MM_PRIMER_OFFSET_a_OFFSET_1a_FITXA */ + if (fwrite_function(&(data_base_XP->FirstRecordOffset), 2, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + /* from 10 to 11, & from 12 to 13 */ + if (MM_ES_DBF_ESTESA(data_base_XP->dbf_version)) + { + if (fwrite_function(&(data_base_XP->BytesPerRecord), + sizeof(MM_ACCUMULATED_BYTES_TYPE_DBF), 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + } + else + { + /* from 10 to 11 */ + if (fwrite_function(&(data_base_XP->BytesPerRecord), 2, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + /* from 12 to 13 */ + if (fwrite_function(&(data_base_XP->reserved_1), 2, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + } + /* byte 14 */ + if (fwrite_function(&(data_base_XP->transaction_flag), 1, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + /* byte 15 */ + if (fwrite_function(&(data_base_XP->encryption_flag), 1, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + + /* from 16 to 27 */ + if (data_base_XP->nRecords > UINT32_MAX) + { + /* from 16 to 19, position MM_SECOND_OFFSET_to_N_RECORDS */ + GUInt32 nRecords32HighBits = (GUInt32)(data_base_XP->nRecords >> 32); + if (fwrite_function(&nRecords32HighBits, 4, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + + /* from 20 to 27 */ + if (fwrite_function(&(data_base_XP->dbf_on_a_LAN), 8, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + } + else + { + /* from 16 to 27 */ + if (fwrite_function(&(data_base_XP->dbf_on_a_LAN), 12, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + } + /* byte 28 */ + if (fwrite_function(&(data_base_XP->MDX_flag), 1, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + + /* Byte 29 */ + if (fwrite_function(&(data_base_XP->CharSet), 1, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + + /* Bytes from 30 to 31, in position MM_SEGON_OFFSET_a_OFFSET_1a_FITXA */ + if (MM_ES_DBF_ESTESA(data_base_XP->dbf_version)) + { + if (fwrite_function(((char *)&(data_base_XP->FirstRecordOffset)) + 2, 2, + 1, data_base_XP->pfDataBase) != 1) + return FALSE; + } + else + { + if (fwrite_function(&(data_base_XP->reserved_2), 2, 1, + data_base_XP->pfDataBase) != 1) + return FALSE; + } + + /* At 32th byte fields description begins */ + /* Every description is 32 bytes long */ + bytes_acumulats = 32 + 32 * (data_base_XP->nFields) + 1; + + for (i = 0; i < data_base_XP->nFields; i++) + { + /* Bytes from 0 to 10 -> Field name, \0 finished */ + estat = MM_ISExtendedNameBD_XP(data_base_XP->pField[i].FieldName); + if (estat == NM_CLASSICAL_DBF_AND_VALID_NAME || + estat == MM_DBF_NAME_LOWERCASE_AND_VALID) + { + j = (short)strlen(data_base_XP->pField[i].FieldName); + + retorn_fwrite = fwrite_function(&data_base_XP->pField[i].FieldName, + 1, j, data_base_XP->pfDataBase); + if (retorn_fwrite != (size_t)j) + { + return FALSE; + } + MM_InitializeOffsetExtendedFieldNameFields(data_base_XP, i); + MM_InitializeBytesExtendedFieldNameFields(data_base_XP, i); + } + else if (estat == MM_VALID_EXTENDED_DBF_NAME) + { + if (*(data_base_XP->pField[i].ClassicalDBFFieldName) == '\0') + { + char nom_temp[MM_MAX_LON_FIELD_NAME_DBF]; + + CPLStrlcpy(nom_temp, data_base_XP->pField[i].FieldName, + MM_MAX_LON_FIELD_NAME_DBF); + MM_ReturnValidClassicDBFFieldName(nom_temp); + nom_temp[MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF - 1] = '\0'; + if ((MM_CheckClassicFieldNameEqual(data_base_XP, nom_temp)) == + TRUE) + { + char *c; + + c = MM_SetSubIndexFieldNam( + nom_temp, i, MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF); + + if (c) + { + j = 0; + while (MM_CheckClassicFieldNameEqual(data_base_XP, c) == + TRUE && + j < data_base_XP->nFields) + { + free_function(c); + c = MM_SetSubIndexFieldNam( + nom_temp, ++j, + MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF); + } + if (c) + { + CPLStrlcpy( + data_base_XP->pField[i].ClassicalDBFFieldName, + c, + sizeof(data_base_XP->pField[i] + .ClassicalDBFFieldName)); + free_function(c); + } + } + } + else + CPLStrlcpy( + data_base_XP->pField[i].ClassicalDBFFieldName, nom_temp, + sizeof(data_base_XP->pField[i].ClassicalDBFFieldName)); + } + + // This is a 11-byte fixed size field consisting of the filename + // and it's been padding calculated some next lines. + j = (short)strlen(data_base_XP->pField[i].ClassicalDBFFieldName); + + retorn_fwrite = + fwrite_function(&data_base_XP->pField[i].ClassicalDBFFieldName, + 1, j, data_base_XP->pfDataBase); + if (retorn_fwrite != (size_t)j) + { + return FALSE; + } + + name_size = + MM_CalculateBytesExtendedFieldName(data_base_XP->pField + i); + MM_EscriuOffsetNomEstesBD_XP(data_base_XP, i, bytes_acumulats); + bytes_acumulats += name_size; + } + else + { + return FALSE; + } + + if (fwrite_function(zero, 1, 11 - j, data_base_XP->pfDataBase) != + 11 - (size_t)j) + { + return FALSE; + } + /* Byte 11, Field type */ + if (fwrite_function(&data_base_XP->pField[i].FieldType, 1, 1, + data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + /* Bytes 12 to 15 --> Reserved */ + if (fwrite_function(&data_base_XP->pField[i].reserved_1, 4, 1, + data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + /* Byte 16, or OFFSET_BYTESxCAMP_CAMP_CLASSIC --> BytesPerField */ + if (MM_ES_DBF_ESTESA(data_base_XP->dbf_version) && + data_base_XP->pField[i].FieldType == 'C') + { + if (fwrite_function((void *)&byte_zero, 1, 1, + data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + } + else + { + if (fwrite_function(&data_base_XP->pField[i].BytesPerField, 1, 1, + data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + } + /* 17th byte 17 --> In fields of type 'N' and 'F' indicates decimal places.*/ + if (data_base_XP->pField[i].FieldType == 'N' || + data_base_XP->pField[i].FieldType == 'F') + { + if (fwrite_function(&data_base_XP->pField[i].DecimalsIfFloat, 1, 1, + data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + } + else + { + if (fwrite_function(zero, 1, 1, data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + } + if (MM_ES_DBF_ESTESA(data_base_XP->dbf_version) && + data_base_XP->pField[i].FieldType == 'C') + { + /* Bytes from 18 to 20 --> Reserved */ + if (fwrite_function(&data_base_XP->pField[i].reserved_2, + 20 - 18 + 1, 1, data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + /* Bytes from 21 to 24 --> OFFSET_BYTESxCAMP_CAMP_ESPECIAL, special fields, like C + in extended DBF */ + if (fwrite_function(&data_base_XP->pField[i].BytesPerField, + sizeof(MM_BYTES_PER_FIELD_TYPE_DBF), 1, + data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + + /* Bytes from 25 to 30 --> Reserved */ + if (fwrite_function(&data_base_XP->pField[i].reserved_2[25 - 18], + 30 - 25 + 1, 1, data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + } + else + { + /* Bytes de 21 a 24 --> OFFSET_BYTESxCAMP_CAMP_ESPECIAL, special fields, like C */ + memset(data_base_XP->pField[i].reserved_2 + + MM_OFFSET_RESERVAT2_BYTESxCAMP_CAMP_ESPECIAL, + '\0', 4); + /* Bytes from 18 to 30 --> Reserved */ + if (fwrite_function(&data_base_XP->pField[i].reserved_2, 13, 1, + data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + } + /* Byte 31 --> MDX flag. */ + if (fwrite_function(&data_base_XP->pField[i].MDX_field_flag, 1, 1, + data_base_XP->pfDataBase) != 1) + { + return FALSE; + } + } + + variable_byte = 13; + if (fwrite_function(&variable_byte, 1, 1, data_base_XP->pfDataBase) != 1) + return FALSE; + + if (data_base_XP->FirstRecordOffset != bytes_acumulats) + return FALSE; + + // Extended fields + for (i = 0; i < data_base_XP->nFields; i++) + { + if (MM_VALID_EXTENDED_DBF_NAME == + MM_ISExtendedNameBD_XP(data_base_XP->pField[i].FieldName)) + { + bytes_acumulats = + MM_GiveOffsetExtendedFieldName(data_base_XP->pField + i); + name_size = MM_DonaBytesNomEstesCamp(data_base_XP->pField + i); + + fseek_function(data_base_XP->pfDataBase, bytes_acumulats, SEEK_SET); + + strcpy(nom_camp, data_base_XP->pField[i].FieldName); + //CanviaJocCaracPerEscriureDBF(nom_camp, JocCaracDBFaMM(data_base_XP->CharSet, ParMM.JocCaracDBFPerDefecte)); + + retorn_fwrite = fwrite_function(nom_camp, 1, name_size, + data_base_XP->pfDataBase); + + if (retorn_fwrite != (size_t)name_size) + return FALSE; + } + } + + if (table_should_be_closed) + { + fclose_and_nullify(&data_base_XP->pfDataBase); + } + + return TRUE; +} /* End of MM_UpdateEntireHeader() */ + +MM_BOOLEAN MM_CreateDBFFile(struct MM_DATA_BASE_XP *bd_xp, + const char *NomFitxer) +{ + if (!NomFitxer || MMIsEmptyString(NomFitxer) || !bd_xp) + return FALSE; + + MM_CheckDBFHeader(bd_xp); + CPLStrlcpy(bd_xp->szFileName, NomFitxer, sizeof(bd_xp->szFileName)); + return MM_UpdateEntireHeader(bd_xp); +} + +void MM_ReleaseMainFields(struct MM_DATA_BASE_XP *data_base_XP) +{ + MM_EXT_DBF_N_FIELDS i; + size_t j; + char **cadena; + + if (data_base_XP->pField) + { + for (i = 0; i < data_base_XP->nFields; i++) + { + for (j = 0; j < MM_NUM_IDIOMES_MD_MULTIDIOMA; j++) + { + cadena = data_base_XP->pField[i].Separator; + if (cadena[j]) + { + free_function(cadena[j]); + cadena[j] = nullptr; + } + } + } + free_function(data_base_XP->pField); + data_base_XP->pField = nullptr; + data_base_XP->nFields = 0; + } + return; +} + +// READING THE HEADER OF AN EXTENDED DBF +// Free with MM_ReleaseDBFHeader() +int MM_ReadExtendedDBFHeaderFromFile(const char *szFileName, + struct MM_DATA_BASE_XP *pMMBDXP, + const char *pszRelFile) +{ + MM_BYTE variable_byte; + FILE_TYPE *pf; + unsigned short int two_bytes; + MM_EXT_DBF_N_FIELDS nIField; + MM_FIRST_RECORD_OFFSET_TYPE offset_primera_fitxa; + MM_FIRST_RECORD_OFFSET_TYPE offset_fals = 0; + MM_BOOLEAN incoherent_record_size = FALSE; + MM_BYTE un_byte; + MM_BYTES_PER_FIELD_TYPE_DBF bytes_per_camp; + MM_BYTE tretze_bytes[13]; + MM_FIRST_RECORD_OFFSET_TYPE offset_possible; + MM_BYTE some_problems_when_reading = 0; + MM_FILE_OFFSET offset_reintent = 0; // For retrying + char cpg_file[MM_CPL_PATH_BUF_SIZE]; + char *pszDesc; + char section[MM_MAX_LON_FIELD_NAME_DBF + 25]; // TAULA_PRINCIPAL:field_name + GUInt32 nRecords32LowBits; + char *pszString; + + if (!szFileName) + return 1; + + CPLStrlcpy(pMMBDXP->szFileName, szFileName, sizeof(pMMBDXP->szFileName)); + strcpy(pMMBDXP->ReadingMode, "rb"); + + if ((pMMBDXP->pfDataBase = fopen_function(pMMBDXP->szFileName, + pMMBDXP->ReadingMode)) == nullptr) + return 1; + + pf = pMMBDXP->pfDataBase; + + fseek_function(pf, 0, SEEK_SET); + /* ====== Header reading (32 bytes) =================== */ + offset_primera_fitxa = 0; + + if (1 != fread_function(&(pMMBDXP->dbf_version), 1, 1, pf) || + 1 != fread_function(&variable_byte, 1, 1, pf) || + 1 != fread_function(&(pMMBDXP->month), 1, 1, pf) || + 1 != fread_function(&(pMMBDXP->day), 1, 1, pf)) + { + fclose_and_nullify(&pMMBDXP->pfDataBase); + return 1; + } + + if (1 != fread_function(&nRecords32LowBits, 4, 1, pf)) + { + fclose_and_nullify(&pMMBDXP->pfDataBase); + return 1; + } + + if (1 != fread_function(&offset_primera_fitxa, 2, 1, pf)) + { + fclose_and_nullify(&pMMBDXP->pfDataBase); + return 1; + } + + pMMBDXP->year = (short)(1900 + variable_byte); +reintenta_lectura_per_si_error_CreaCampBD_XP: + + if (some_problems_when_reading > 0) + { + if (!MM_ES_DBF_ESTESA(pMMBDXP->dbf_version)) + { + offset_fals = + offset_primera_fitxa & (MM_FIRST_RECORD_OFFSET_TYPE)(~31); + } + } + else + offset_reintent = ftell_function(pf); + + if (1 != fread_function(&two_bytes, 2, 1, pf) || + 1 != fread_function(&(pMMBDXP->reserved_1), 2, 1, pf) || + 1 != fread_function(&(pMMBDXP->transaction_flag), 1, 1, pf) || + 1 != fread_function(&(pMMBDXP->encryption_flag), 1, 1, pf) || + 1 != fread_function(&(pMMBDXP->dbf_on_a_LAN), 12, 1, pf)) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_and_nullify(&pMMBDXP->pfDataBase); + return 1; + } + + if (MM_ES_DBF_ESTESA(pMMBDXP->dbf_version)) + { + GUInt32 nRecords32HighBits; + + // Getting 4 bytes of the 8 bytes variable + memcpy(&nRecords32HighBits, &pMMBDXP->dbf_on_a_LAN, 4); + + // Getting other 4 bytes of the 8 bytes variable + // The cast to GUInt64 of the high 32 bits is important to + // make sure the left bit shift is done correctly + pMMBDXP->nRecords = + ((GUInt64)nRecords32HighBits << 32) | nRecords32LowBits; + } + else + pMMBDXP->nRecords = nRecords32LowBits; + + if (1 != fread_function(&(pMMBDXP->MDX_flag), 1, 1, pf) || + 1 != fread_function(&(pMMBDXP->CharSet), 1, 1, pf) || + 1 != fread_function(&(pMMBDXP->reserved_2), 2, 1, pf)) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_and_nullify(&pMMBDXP->pfDataBase); + return 1; + } + + // Checking for a cpg file + if (pMMBDXP->CharSet == 0) + { + FILE_TYPE *f_cpg; + char charset_cpg[11]; + + strcpy(cpg_file, pMMBDXP->szFileName); + CPLStrlcpy(cpg_file, reset_extension(cpg_file, "cpg"), + sizeof(cpg_file)); + f_cpg = fopen_function(cpg_file, "r"); + if (f_cpg) + { + char *p; + size_t read_bytes; + fseek_function(f_cpg, 0L, SEEK_SET); + if (11 > (read_bytes = fread_function(charset_cpg, 1, 10, f_cpg))) + { + charset_cpg[read_bytes] = '\0'; + p = strstr(charset_cpg, "UTF-8"); + if (p) + pMMBDXP->CharSet = MM_JOC_CARAC_UTF8_DBF; + p = strstr(charset_cpg, "UTF8"); + if (p) + pMMBDXP->CharSet = MM_JOC_CARAC_UTF8_DBF; + p = strstr(charset_cpg, "ISO-8859-1"); + if (p) + pMMBDXP->CharSet = MM_JOC_CARAC_ANSI_DBASE; + } + fclose_function(f_cpg); + } + } + if (MM_ES_DBF_ESTESA(pMMBDXP->dbf_version)) + { + unsigned short FirstRecordOffsetLow16Bits; + unsigned short FirstRecordOffsetHigh16Bits; + + memcpy(&FirstRecordOffsetLow16Bits, &offset_primera_fitxa, 2); + memcpy(&FirstRecordOffsetHigh16Bits, &pMMBDXP->reserved_2, 2); + + pMMBDXP->FirstRecordOffset = + ((GUInt32)FirstRecordOffsetHigh16Bits << 16) | + FirstRecordOffsetLow16Bits; + + if (some_problems_when_reading > 0) + offset_fals = pMMBDXP->FirstRecordOffset; + + memcpy(&FirstRecordOffsetLow16Bits, &two_bytes, 2); + memcpy(&FirstRecordOffsetHigh16Bits, &pMMBDXP->reserved_1, 2); + + pMMBDXP->BytesPerRecord = ((GUInt32)FirstRecordOffsetHigh16Bits << 16) | + FirstRecordOffsetLow16Bits; + } + else + { + pMMBDXP->FirstRecordOffset = offset_primera_fitxa; + pMMBDXP->BytesPerRecord = two_bytes; + } + + /* ====== Record structure ========================= */ + + if (some_problems_when_reading > 0) + { + if ((offset_fals - 1) - 32 < 0) + pMMBDXP->nFields = 0; + else + pMMBDXP->nFields = + (MM_EXT_DBF_N_FIELDS)(((offset_fals - 1) - 32) / 32); + } + else + { + // There's a chance that bytes_acumulats could overflow if it's GUInt32. + // For that reason it's better to promote to GUInt64. + GUInt64 bytes_acumulats = 1; + + pMMBDXP->nFields = 0; + + fseek_function(pf, 0, SEEK_END); + if (32 - 1 < ftell_function(pf)) + { + fseek_function(pf, 32, SEEK_SET); + do + { + bytes_per_camp = 0; + fseek_function( + pf, + 32 + (MM_FILE_OFFSET)pMMBDXP->nFields * 32 + + (MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF + 1 + 4), + SEEK_SET); + if (1 != fread_function(&bytes_per_camp, 1, 1, pf) || + 1 != fread_function(&un_byte, 1, 1, pf) || + 1 != fread_function(&tretze_bytes, + 3 + sizeof(bytes_per_camp), 1, pf)) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_and_nullify(&pMMBDXP->pfDataBase); + return 1; + } + if (bytes_per_camp == 0) + memcpy(&bytes_per_camp, (char *)(&tretze_bytes) + 3, + sizeof(bytes_per_camp)); + + bytes_acumulats += bytes_per_camp; + pMMBDXP->nFields++; + } while (bytes_acumulats < pMMBDXP->BytesPerRecord); + } + } + + if (pMMBDXP->nFields != 0) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = MM_CreateAllFields(pMMBDXP->nFields); + if (!pMMBDXP->pField) + { + pMMBDXP->nFields = 0; + fclose_and_nullify(&pMMBDXP->pfDataBase); + return 1; + } + } + else + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + } + + fseek_function(pf, 32, SEEK_SET); + for (nIField = 0; nIField < pMMBDXP->nFields; nIField++) + { + if (1 != fread_function(pMMBDXP->pField[nIField].FieldName, + MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF, 1, pf) || + 1 != fread_function(&(pMMBDXP->pField[nIField].FieldType), 1, 1, + pf) || + 1 != fread_function(&(pMMBDXP->pField[nIField].reserved_1), 4, 1, + pf) || + 1 != fread_function(&(pMMBDXP->pField[nIField].BytesPerField), 1, 1, + pf) || + 1 != fread_function(&(pMMBDXP->pField[nIField].DecimalsIfFloat), 1, + 1, pf) || + 1 != fread_function(&(pMMBDXP->pField[nIField].reserved_2), 13, 1, + pf) || + 1 != fread_function(&(pMMBDXP->pField[nIField].MDX_field_flag), 1, + 1, pf)) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_function(pf); + pMMBDXP->pfDataBase = nullptr; + return 1; + } + + if (pMMBDXP->pField[nIField].FieldType == 'F') + pMMBDXP->pField[nIField].FieldType = 'N'; + + pMMBDXP->pField[nIField] + .FieldName[MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF - 1] = '\0'; + if (EQUAL(pMMBDXP->pField[nIField].FieldName, + szMMNomCampIdGraficDefecte)) + pMMBDXP->IdGraficField = nIField; + + if (pMMBDXP->pField[nIField].BytesPerField == 0) + { + if (!MM_ES_DBF_ESTESA(pMMBDXP->dbf_version)) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_function(pf); + pMMBDXP->pfDataBase = nullptr; + return 1; + } + if (pMMBDXP->pField[nIField].FieldType != 'C') + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_function(pf); + pMMBDXP->pfDataBase = nullptr; + return 1; + } + + memcpy(&pMMBDXP->pField[nIField].BytesPerField, + (char *)(&pMMBDXP->pField[nIField].reserved_2) + 3, + sizeof(MM_BYTES_PER_FIELD_TYPE_DBF)); + } + + if (nIField) + { + // To avoid overflow + if (pMMBDXP->pField[nIField - 1].AccumulatedBytes > + UINT32_MAX - pMMBDXP->pField[nIField - 1].BytesPerField) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_function(pf); + pMMBDXP->pfDataBase = nullptr; + return 1; + } + + pMMBDXP->pField[nIField].AccumulatedBytes = + (pMMBDXP->pField[nIField - 1].AccumulatedBytes + + pMMBDXP->pField[nIField - 1].BytesPerField); + } + else + { + pMMBDXP->pField[nIField].AccumulatedBytes = 1; + } + + if (pszRelFile) + { + // Usually, in multilingual MiraMon metadata files, the main + // language is the default one and has no "_cat", "_spa", or + // "_eng" suffix after the keyword. So, to retrieve all + // languages in a multilingual file, first, we'll identify + // the one with no suffix "_cat", "_spa", or "_eng", and then the + // others. If one of them lacks a value, it gets the default value. + snprintf(section, sizeof(section), "TAULA_PRINCIPAL:%s", + pMMBDXP->pField[nIField].FieldName); + + // MM_DEF_LANGUAGE + pszDesc = MMReturnValueFromSectionINIFile(pszRelFile, section, + "descriptor"); + if (pszDesc) + { + CPLStrlcpy( + pMMBDXP->pField[nIField].FieldDescription[MM_DEF_LANGUAGE], + pszDesc, MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + + free_function(pszDesc); + } + else + *pMMBDXP->pField[nIField].FieldDescription[MM_DEF_LANGUAGE] = + '\0'; + + // MM_ENG_LANGUAGE + pszDesc = MMReturnValueFromSectionINIFile(pszRelFile, section, + "descriptor_eng"); + if (pszDesc) + { + CPLStrlcpy( + pMMBDXP->pField[nIField].FieldDescription[MM_ENG_LANGUAGE], + pszDesc, MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + + if (*pMMBDXP->pField[nIField] + .FieldDescription[MM_DEF_LANGUAGE] == '\0') + { + CPLStrlcpy(pMMBDXP->pField[nIField] + .FieldDescription[MM_DEF_LANGUAGE], + pszDesc, MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + } + free_function(pszDesc); + } + else + { + // If there is no value descriptor_eng it's because it's the + // default one. So, it's taken from there. + CPLStrlcpy( + pMMBDXP->pField[nIField].FieldDescription[MM_ENG_LANGUAGE], + pMMBDXP->pField[nIField].FieldDescription[MM_DEF_LANGUAGE], + MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + } + + // MM_CAT_LANGUAGE + pszDesc = MMReturnValueFromSectionINIFile(pszRelFile, section, + "descriptor_cat"); + if (pszDesc) + { + CPLStrlcpy( + pMMBDXP->pField[nIField].FieldDescription[MM_CAT_LANGUAGE], + pszDesc, MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + + if (*pMMBDXP->pField[nIField] + .FieldDescription[MM_DEF_LANGUAGE] == '\0') + { + CPLStrlcpy(pMMBDXP->pField[nIField] + .FieldDescription[MM_DEF_LANGUAGE], + pszDesc, MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + } + + free_function(pszDesc); + } + else + { + // If there is no value descriptor_cat it's because it's the + // default one. So, it's taken from there. + CPLStrlcpy( + pMMBDXP->pField[nIField].FieldDescription[MM_CAT_LANGUAGE], + pMMBDXP->pField[nIField].FieldDescription[MM_DEF_LANGUAGE], + MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + } + + // MM_SPA_LANGUAGE + pszDesc = MMReturnValueFromSectionINIFile(pszRelFile, section, + "descriptor_spa"); + if (pszDesc) + { + CPLStrlcpy( + pMMBDXP->pField[nIField].FieldDescription[MM_SPA_LANGUAGE], + pszDesc, MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + + if (*pMMBDXP->pField[nIField] + .FieldDescription[MM_DEF_LANGUAGE] == '\0') + { + CPLStrlcpy(pMMBDXP->pField[nIField] + .FieldDescription[MM_DEF_LANGUAGE], + pszDesc, MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + } + + free_function(pszDesc); + } + else + { + // If there is no value descriptor_spa it's because it's the + // default one. So, it's taken from there. + CPLStrlcpy( + pMMBDXP->pField[nIField].FieldDescription[MM_SPA_LANGUAGE], + pMMBDXP->pField[nIField].FieldDescription[MM_DEF_LANGUAGE], + MM_MAX_LON_DESCRIPCIO_CAMP_DBF); + } + } + } + + if (!pMMBDXP->nFields) + { + if (pMMBDXP->BytesPerRecord) + incoherent_record_size = TRUE; + } + else + { + // To avoid overflow + if (pMMBDXP->pField[pMMBDXP->nFields - 1].AccumulatedBytes > + UINT32_MAX - pMMBDXP->pField[pMMBDXP->nFields - 1].BytesPerField) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_function(pf); + pMMBDXP->pfDataBase = nullptr; + return 1; + } + if (pMMBDXP->pField[pMMBDXP->nFields - 1].BytesPerField + + pMMBDXP->pField[pMMBDXP->nFields - 1].AccumulatedBytes > + pMMBDXP->BytesPerRecord) + incoherent_record_size = TRUE; + } + if (incoherent_record_size) + { + if (some_problems_when_reading == 0) + { + incoherent_record_size = FALSE; + fseek_function(pf, offset_reintent, SEEK_SET); + some_problems_when_reading++; + /* Reset IdGraficField as it might no longer be valid */ + pMMBDXP->IdGraficField = 0; + goto reintenta_lectura_per_si_error_CreaCampBD_XP; + } + else + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_function(pf); + pMMBDXP->pfDataBase = nullptr; + return 1; + } + } + + offset_possible = 32 + 32 * (pMMBDXP->nFields) + 1; + + if (!incoherent_record_size && + offset_possible != pMMBDXP->FirstRecordOffset) + { // Extended names + MM_FIRST_RECORD_OFFSET_TYPE offset_nom_camp; + int mida_nom; + + for (nIField = 0; nIField < pMMBDXP->nFields; nIField++) + { + offset_nom_camp = + MM_GiveOffsetExtendedFieldName(pMMBDXP->pField + nIField); + mida_nom = MM_DonaBytesNomEstesCamp(pMMBDXP->pField + nIField); + if (mida_nom > 0 && mida_nom < MM_MAX_LON_FIELD_NAME_DBF && + offset_nom_camp >= offset_possible && + offset_nom_camp < pMMBDXP->FirstRecordOffset) + { + CPLStrlcpy(pMMBDXP->pField[nIField].ClassicalDBFFieldName, + pMMBDXP->pField[nIField].FieldName, + MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF); + fseek_function(pf, offset_nom_camp, SEEK_SET); + if (1 != fread_function(pMMBDXP->pField[nIField].FieldName, + mida_nom, 1, pf)) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_function(pf); + pMMBDXP->pfDataBase = nullptr; + return 1; + } + pMMBDXP->pField[nIField].FieldName[mida_nom] = '\0'; + + // All field names to UTF-8 + if (pMMBDXP->CharSet == MM_JOC_CARAC_ANSI_DBASE) + { + pszString = + CPLRecode_function(pMMBDXP->pField[nIField].FieldName, + CPL_ENC_ISO8859_1, CPL_ENC_UTF8); + CPLStrlcpy(pMMBDXP->pField[nIField].FieldName, pszString, + MM_MAX_LON_FIELD_NAME_DBF); + CPLFree_function(pszString); + } + else if (pMMBDXP->CharSet == MM_JOC_CARAC_OEM850_DBASE) + { + MM_oemansi(pMMBDXP->pField[nIField].FieldName); + pszString = + CPLRecode_function(pMMBDXP->pField[nIField].FieldName, + CPL_ENC_ISO8859_1, CPL_ENC_UTF8); + CPLStrlcpy(pMMBDXP->pField[nIField].FieldName, pszString, + MM_MAX_LON_FIELD_NAME_DBF - 1); + CPLFree_function(pszString); + } + } + } + } + + pMMBDXP->IdEntityField = MM_MAX_EXT_DBF_N_FIELDS_TYPE; + return 0; +} // End of MM_ReadExtendedDBFHeaderFromFile() + +void MM_ReleaseDBFHeader(struct MM_DATA_BASE_XP *data_base_XP) +{ + if (data_base_XP) + { + MM_ReleaseMainFields(data_base_XP); + free_function(data_base_XP); + } + return; +} + +int MM_ModifyFieldNameAndDescriptorIfPresentBD_XP( + struct MM_FIELD *camp, struct MM_DATA_BASE_XP *bd_xp, + MM_BOOLEAN no_modifica_descriptor, size_t mida_nom) +{ + MM_EXT_DBF_N_FIELDS i_camp; + unsigned n_digits_i = 0, i; + int retorn = 0; + + if (mida_nom == 0) + mida_nom = MM_MAX_LON_FIELD_NAME_DBF; + + for (i_camp = 0; i_camp < bd_xp->nFields; i_camp++) + { + if (bd_xp->pField + i_camp == camp) + continue; + if (!strcasecmp(bd_xp->pField[i_camp].FieldName, camp->FieldName)) + break; + } + if (i_camp < bd_xp->nFields) + { + retorn = 1; + if (strlen(camp->FieldName) > mida_nom - 2) + camp->FieldName[mida_nom - 2] = '\0'; + strcat(camp->FieldName, "0"); + for (i = 2; i < (size_t)10; i++) + { + snprintf(camp->FieldName + strlen(camp->FieldName) - 1, + sizeof(camp->FieldName) - strlen(camp->FieldName) + 1, + "%u", i); + for (i_camp = 0; i_camp < bd_xp->nFields; i_camp++) + { + if (bd_xp->pField + i_camp == camp) + continue; + if (!strcasecmp(bd_xp->pField[i_camp].FieldName, + camp->FieldName)) + break; + } + if (i_camp == bd_xp->nFields) + { + n_digits_i = 1; + break; + } + } + if (i == 10) + { + camp->FieldName[strlen(camp->FieldName) - 1] = '\0'; + if (strlen(camp->FieldName) > mida_nom - 3) + camp->FieldName[mida_nom - 3] = '\0'; + strcat(camp->FieldName, "00"); + for (i = 10; i < (size_t)100; i++) + { + snprintf(camp->FieldName + strlen(camp->FieldName) - 2, + sizeof(camp->FieldName) - strlen(camp->FieldName) + 2, + "%u", i); + for (i_camp = 0; i_camp < bd_xp->nFields; i_camp++) + { + if (bd_xp->pField + i_camp == camp) + continue; + if (!strcasecmp(bd_xp->pField[i_camp].FieldName, + camp->FieldName)) + break; + } + if (i_camp == bd_xp->nFields) + { + n_digits_i = 2; + break; + } + } + if (i == 100) + { + camp->FieldName[strlen(camp->FieldName) - 2] = '\0'; + if (strlen(camp->FieldName) > mida_nom - 4) + camp->FieldName[mida_nom - 4] = '\0'; + strcat(camp->FieldName, "000"); + for (i = 100; i < (size_t)256 + 2; i++) + { + snprintf(camp->FieldName + strlen(camp->FieldName) - 3, + sizeof(camp->FieldName) - strlen(camp->FieldName) + + 3, + "%u", i); + for (i_camp = 0; i_camp < bd_xp->nFields; i_camp++) + { + if (bd_xp->pField + i_camp == camp) + continue; + if (!strcasecmp(bd_xp->pField[i_camp].FieldName, + camp->FieldName)) + break; + } + if (i_camp == bd_xp->nFields) + { + n_digits_i = 3; + break; + } + } + if (i == 256) + return 2; + } + } + } + else + { + i = 1; + } + + if ((*(camp->FieldDescription[0]) == '\0') || no_modifica_descriptor) + return retorn; + + for (i_camp = 0; i_camp < bd_xp->nFields; i_camp++) + { + if (bd_xp->pField + i_camp == camp) + continue; + if (!strcasecmp(bd_xp->pField[i_camp].FieldDescription[0], + camp->FieldDescription[0])) + break; + } + if (i_camp == bd_xp->nFields) + return retorn; + + if (retorn == 1) + { + if (strlen(camp->FieldDescription[0]) > + MM_MAX_LON_DESCRIPCIO_CAMP_DBF - 4 - n_digits_i) + camp->FieldDescription[0][mida_nom - 4 - n_digits_i] = '\0'; + + snprintf(camp->FieldDescription[0] + strlen(camp->FieldDescription[0]), + sizeof(camp->FieldDescription[0]) - + strlen(camp->FieldDescription[0]), + " (%u)", i); + for (i_camp = 0; i_camp < bd_xp->nFields; i_camp++) + { + if (bd_xp->pField + i_camp == camp) + continue; + if (!strcasecmp(bd_xp->pField[i_camp].FieldDescription[0], + camp->FieldDescription[0])) + break; + } + if (i_camp == bd_xp->nFields) + return retorn; + } + + retorn = 1; + if (strlen(camp->FieldDescription[0]) > + MM_MAX_LON_DESCRIPCIO_CAMP_DBF - 4 - n_digits_i) + camp->FieldDescription[0][mida_nom - 4 - n_digits_i] = '\0'; + camp->FieldDescription[0][strlen(camp->FieldDescription[0]) - 4 - + n_digits_i + 1] = '\0'; + if (strlen(camp->FieldDescription[0]) > MM_MAX_LON_DESCRIPCIO_CAMP_DBF - 7) + camp->FieldDescription[0][mida_nom - 7] = '\0'; + for (i++; i < (size_t)256; i++) + { + //if (camp->FieldDescription[0] + strlen(camp->FieldDescription[0])) + snprintf(camp->FieldDescription[0] + strlen(camp->FieldDescription[0]), + sizeof(camp->FieldDescription[0]) - + strlen(camp->FieldDescription[0]), + " (%u)", i); + for (i_camp = 0; i_camp < bd_xp->nFields; i_camp++) + { + if (bd_xp->pField + i_camp == camp) + continue; + if (!strcasecmp(bd_xp->pField[i_camp].FieldName, camp->FieldName)) + break; + } + if (i_camp == bd_xp->nFields) + return retorn; + } + return 2; +} // End of MM_ModifyFieldNameAndDescriptorIfPresentBD_XP() + +static int MM_DuplicateMultilingualString( + char *(cadena_final[MM_NUM_IDIOMES_MD_MULTIDIOMA]), + const char *const(cadena_inicial[MM_NUM_IDIOMES_MD_MULTIDIOMA])) +{ + size_t i; + + for (i = 0; i < MM_NUM_IDIOMES_MD_MULTIDIOMA; i++) + { + if (cadena_inicial[i]) + { + if (nullptr == (cadena_final[i] = strdup(cadena_inicial[i]))) + return 1; + } + else + cadena_final[i] = nullptr; + } + return 0; +} + +int MM_DuplicateFieldDBXP(struct MM_FIELD *camp_final, + const struct MM_FIELD *camp_inicial) +{ + *camp_final = *camp_inicial; + + if (0 != MM_DuplicateMultilingualString( + camp_final->Separator, + (const char *const(*))camp_inicial->Separator)) + return 1; + + return 0; +} + +#ifndef GDAL_COMPILATION +size_t CPLStrlcpy(char *pszDest, const char *pszSrc, size_t nDestSize) +{ + if (nDestSize == 0) + return strlen(pszSrc); + + char *pszDestIter = pszDest; + const char *pszSrcIter = pszSrc; + + --nDestSize; + while (nDestSize != 0 && *pszSrcIter != '\0') + { + *pszDestIter = *pszSrcIter; + ++pszDestIter; + ++pszSrcIter; + --nDestSize; + } + *pszDestIter = '\0'; + return pszSrcIter - pszSrc + strlen(pszSrcIter); +} +#endif + +// If n_bytes==SIZE_MAX, the parameter is ignored ant, then, +// it's assumed that szcadena is NUL terminated +char *MM_oemansi_n(char *szcadena, size_t n_bytes) +{ + size_t u_i; + unsigned char *punter_bait; + unsigned char t_oemansi[128] = { + 199, 252, 233, 226, 228, 224, 229, 231, 234, 235, 232, 239, 238, + 236, 196, 197, 201, 230, 198, 244, 246, 242, 251, 249, 255, 214, + 220, 248, 163, 216, 215, 131, 225, 237, 243, 250, 241, 209, 170, + 186, 191, 174, 172, 189, 188, 161, 171, 187, 164, 164, 164, 166, + 166, 193, 194, 192, 169, 166, 166, 164, 164, 162, 165, 164, 164, + 164, 164, 164, 164, 164, 227, 195, 164, 164, 164, 164, 166, 164, + 164, 164, 240, 208, 202, 203, 200, 180, 205, 206, 207, 164, 164, + 164, 164, 166, 204, 164, 211, 223, 212, 210, 245, 213, 181, 254, + 222, 218, 219, 217, 253, 221, 175, 180, 173, 177, 164, 190, 182, + 167, 247, 184, 176, 168, 183, 185, 179, 178, 164, 183}; + if (n_bytes == SIZE_MAX) + { + for (punter_bait = (unsigned char *)szcadena; *punter_bait; + punter_bait++) + { + if (*punter_bait > 127) + *punter_bait = t_oemansi[*punter_bait - 128]; + } + } + else + { + for (u_i = 0, punter_bait = (unsigned char *)szcadena; u_i < n_bytes; + punter_bait++, u_i++) + { + if (*punter_bait > 127) + *punter_bait = t_oemansi[*punter_bait - 128]; + } + } + return szcadena; +} + +char *MM_oemansi(char *szcadena) +{ + return MM_oemansi_n(szcadena, SIZE_MAX); +} + +static MM_BOOLEAN MM_FillFieldDB_XP( + struct MM_FIELD *camp, const char *FieldName, + const char *FieldDescriptionEng, const char *FieldDescriptionCat, + const char *FieldDescriptionSpa, char FieldType, + MM_BYTES_PER_FIELD_TYPE_DBF BytesPerField, MM_BYTE DecimalsIfFloat) +{ + char nom_temp[MM_MAX_LON_FIELD_NAME_DBF]; + int retorn_valida_nom_camp; + + if (FieldName) + { + retorn_valida_nom_camp = MM_ISExtendedNameBD_XP(FieldName); + if (retorn_valida_nom_camp == MM_DBF_NAME_NO_VALID) + return FALSE; + CPLStrlcpy(camp->FieldName, FieldName, MM_MAX_LON_FIELD_NAME_DBF); + + if (retorn_valida_nom_camp == MM_VALID_EXTENDED_DBF_NAME) + { + MM_CalculateBytesExtendedFieldName(camp); + CPLStrlcpy(nom_temp, FieldName, MM_MAX_LON_FIELD_NAME_DBF); + MM_ReturnValidClassicDBFFieldName(nom_temp); + nom_temp[MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF] = '\0'; + CPLStrlcpy(camp->ClassicalDBFFieldName, nom_temp, + MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF); + } + } + + if (FieldDescriptionEng) + CPLStrlcpy(camp->FieldDescription[MM_DEF_LANGUAGE], FieldDescriptionEng, + sizeof(camp->FieldDescription[MM_DEF_LANGUAGE])); + else + strcpy(camp->FieldDescription[MM_DEF_LANGUAGE], "\0"); + + if (FieldDescriptionEng) + CPLStrlcpy(camp->FieldDescription[MM_ENG_LANGUAGE], FieldDescriptionEng, + sizeof(camp->FieldDescription[MM_ENG_LANGUAGE])); + else + strcpy(camp->FieldDescription[MM_ENG_LANGUAGE], "\0"); + + if (FieldDescriptionCat) + CPLStrlcpy(camp->FieldDescription[MM_CAT_LANGUAGE], FieldDescriptionCat, + sizeof(camp->FieldDescription[MM_CAT_LANGUAGE])); + else + strcpy(camp->FieldDescription[MM_CAT_LANGUAGE], "\0"); + + if (FieldDescriptionSpa) + CPLStrlcpy(camp->FieldDescription[MM_SPA_LANGUAGE], FieldDescriptionSpa, + sizeof(camp->FieldDescription[MM_SPA_LANGUAGE])); + else + strcpy(camp->FieldDescription[MM_SPA_LANGUAGE], "\0"); + + camp->FieldType = FieldType; + camp->DecimalsIfFloat = DecimalsIfFloat; + camp->BytesPerField = BytesPerField; + return TRUE; +} + +size_t MM_DefineFirstPolygonFieldsDB_XP(struct MM_DATA_BASE_XP *bd_xp) +{ + MM_EXT_DBF_N_FIELDS i_camp = 0; + + MM_FillFieldDB_XP( + bd_xp->pField + i_camp, szMMNomCampIdGraficDefecte, + szInternalGraphicIdentifierEng, szInternalGraphicIdentifierCat, + szInternalGraphicIdentifierSpa, 'N', MM_MIN_WIDTH_ID_GRAFIC, 0); + bd_xp->IdGraficField = 0; + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_ID_GRAFIC; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampNVertexsDefecte, + szNumberOfVerticesEng, szNumberOfVerticesCat, + szNumberOfVerticesSpa, 'N', MM_MIN_WIDTH_N_VERTEXS, 0); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_N_VERTEXS; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampPerimetreDefecte, + szPerimeterOfThePolygonEng, szPerimeterOfThePolygonCat, + szPerimeterOfThePolygonSpa, 'N', MM_MIN_WIDTH_LONG, 9); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_PERIMETRE; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampAreaDefecte, + szAreaOfThePolygonEng, szAreaOfThePolygonCat, + szAreaOfThePolygonSpa, 'N', MM_MIN_WIDTH_AREA, 12); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_AREA; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampNArcsDefecte, + szNumberOfArcsEng, szNumberOfArcsCat, szNumberOfArcsSpa, + 'N', MM_MIN_WIDTH_N_ARCS, 0); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_N_ARCS; + i_camp++; + + MM_FillFieldDB_XP( + bd_xp->pField + i_camp, szMMNomCampNPoligonsDefecte, + szNumberOfElementaryPolygonsEng, szNumberOfElementaryPolygonsCat, + szNumberOfElementaryPolygonsSpa, 'N', MM_MIN_WIDTH_N_POLIG, 0); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_N_POLIG; + i_camp++; + + return i_camp; +} + +size_t MM_DefineFirstArcFieldsDB_XP(struct MM_DATA_BASE_XP *bd_xp) +{ + MM_EXT_DBF_N_FIELDS i_camp; + + i_camp = 0; + MM_FillFieldDB_XP( + bd_xp->pField + i_camp, szMMNomCampIdGraficDefecte, + szInternalGraphicIdentifierEng, szInternalGraphicIdentifierCat, + szInternalGraphicIdentifierSpa, 'N', MM_MIN_WIDTH_ID_GRAFIC, 0); + bd_xp->IdGraficField = 0; + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_ID_GRAFIC; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampNVertexsDefecte, + szNumberOfVerticesEng, szNumberOfVerticesCat, + szNumberOfVerticesSpa, 'N', MM_MIN_WIDTH_N_VERTEXS, 0); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_N_VERTEXS; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampLongitudArcDefecte, + szLengthOfAarcEng, szLengthOfAarcCat, szLengthOfAarcSpa, + 'N', MM_MIN_WIDTH_LONG, 9); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_LONG_ARC; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampNodeIniDefecte, + szInitialNodeEng, szInitialNodeCat, szInitialNodeSpa, 'N', + MM_MIN_WIDTH_INITIAL_NODE, 0); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_NODE_INI; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampNodeFiDefecte, + szFinalNodeEng, szFinalNodeCat, szFinalNodeSpa, 'N', + MM_MIN_WIDTH_FINAL_NODE, 0); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_NODE_FI; + i_camp++; + + return i_camp; +} + +size_t MM_DefineFirstNodeFieldsDB_XP(struct MM_DATA_BASE_XP *bd_xp) +{ + MM_EXT_DBF_N_FIELDS i_camp; + + i_camp = 0; + + MM_FillFieldDB_XP( + bd_xp->pField + i_camp, szMMNomCampIdGraficDefecte, + szInternalGraphicIdentifierEng, szInternalGraphicIdentifierCat, + szInternalGraphicIdentifierSpa, 'N', MM_MIN_WIDTH_ID_GRAFIC, 0); + bd_xp->IdGraficField = 0; + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_ID_GRAFIC; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampArcsANodeDefecte, + szNumberOfArcsToNodeEng, szNumberOfArcsToNodeCat, + szNumberOfArcsToNodeSpa, 'N', MM_MIN_WIDTH_ARCS_TO_NODE, + 0); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_ARCS_A_NOD; + i_camp++; + + MM_FillFieldDB_XP(bd_xp->pField + i_camp, szMMNomCampTipusNodeDefecte, + szNodeTypeEng, szNodeTypeCat, szNodeTypeSpa, 'N', 1, 0); + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_TIPUS_NODE; + i_camp++; + + return i_camp; +} + +size_t MM_DefineFirstPointFieldsDB_XP(struct MM_DATA_BASE_XP *bd_xp) +{ + size_t i_camp = 0; + + MM_FillFieldDB_XP( + bd_xp->pField + i_camp, szMMNomCampIdGraficDefecte, + szInternalGraphicIdentifierEng, szInternalGraphicIdentifierCat, + szInternalGraphicIdentifierSpa, 'N', MM_MIN_WIDTH_ID_GRAFIC, 0); + bd_xp->IdGraficField = 0; + (bd_xp->pField + i_camp)->GeoTopoTypeField = (MM_BYTE)MM_CAMP_ES_ID_GRAFIC; + i_camp++; + + return i_camp; +} + +static int MM_SprintfDoubleWidth(char *cadena, size_t cadena_size, int amplada, + int n_decimals, double valor_double, + MM_BOOLEAN *Error_sprintf_n_decimals) +{ +#define VALOR_LIMIT_IMPRIMIR_EN_FORMAT_E 1E+17 +#define VALOR_MASSA_PETIT_PER_IMPRIMIR_f 1E-17 + char cadena_treball[MM_CHARACTERS_DOUBLE + 1]; + int retorn_printf; + + if (MM_IsNANDouble(valor_double)) + { + if (amplada < 3) + { + *cadena = *MM_EmptyString; + return EOF; + } + return snprintf(cadena, cadena_size, "NAN"); + } + if (MM_IsDoubleInfinite(valor_double)) + { + if (amplada < 3) + { + *cadena = *MM_EmptyString; + return EOF; + } + return snprintf(cadena, cadena_size, "INF"); + } + + *Error_sprintf_n_decimals = FALSE; + if (valor_double == 0) + { + retorn_printf = snprintf(cadena_treball, sizeof(cadena_treball), + "%*.*f", amplada, n_decimals, valor_double); + if (retorn_printf >= (int)sizeof(cadena_treball)) + { + *cadena = *MM_EmptyString; + return retorn_printf; + } + + if (retorn_printf > amplada) + { + int escurcament = retorn_printf - amplada; + if (escurcament > n_decimals) + { + *cadena = *MM_EmptyString; + return EOF; + } + *Error_sprintf_n_decimals = TRUE; + n_decimals = n_decimals - escurcament; + retorn_printf = snprintf(cadena, cadena_size, "%*.*f", amplada, + n_decimals, valor_double); + } + else + CPLStrlcpy(cadena, cadena_treball, cadena_size); + + return retorn_printf; + } + + if (valor_double > VALOR_LIMIT_IMPRIMIR_EN_FORMAT_E || + valor_double < -VALOR_LIMIT_IMPRIMIR_EN_FORMAT_E || + (valor_double < VALOR_MASSA_PETIT_PER_IMPRIMIR_f && + valor_double > -VALOR_MASSA_PETIT_PER_IMPRIMIR_f)) + { + retorn_printf = snprintf(cadena_treball, sizeof(cadena_treball), + "%*.*E", amplada, n_decimals, valor_double); + + if (retorn_printf >= (int)sizeof(cadena_treball)) + { + *cadena = *MM_EmptyString; + return retorn_printf; + } + if (retorn_printf > amplada) + { + int escurcament = retorn_printf - amplada; + if (escurcament > n_decimals) + { + *cadena = *MM_EmptyString; + return EOF; + } + *Error_sprintf_n_decimals = TRUE; + n_decimals = n_decimals - escurcament; + retorn_printf = snprintf(cadena, cadena_size, "%*.*E", amplada, + n_decimals, valor_double); + } + else + CPLStrlcpy(cadena, cadena_treball, cadena_size); + + return retorn_printf; + } + + retorn_printf = snprintf(cadena_treball, sizeof(cadena_treball), "%*.*f", + amplada, n_decimals, valor_double); + + if (retorn_printf >= (int)sizeof(cadena_treball)) + { + *cadena = *MM_EmptyString; + return retorn_printf; + } + + if (retorn_printf > amplada) + { + int escurcament = retorn_printf - amplada; + if (escurcament > n_decimals) + { + *cadena = *MM_EmptyString; + return EOF; + } + *Error_sprintf_n_decimals = TRUE; + n_decimals = n_decimals - escurcament; + retorn_printf = snprintf(cadena, cadena_size, "%*.*f", amplada, + n_decimals, valor_double); + } + else + CPLStrlcpy(cadena, cadena_treball, cadena_size); + + return retorn_printf; + +#undef VALOR_LIMIT_IMPRIMIR_EN_FORMAT_E +#undef VALOR_MASSA_PETIT_PER_IMPRIMIR_f +} // End of MM_SprintfDoubleWidth() + +static MM_BOOLEAN MM_EmptyString_function(const char *cadena) +{ + const char *ptr = cadena; + + for (; *ptr; ptr++) + { + if (*ptr != ' ' && *ptr != '\t') + { + return FALSE; + } + } + + return TRUE; +} + +int MM_SecureCopyStringFieldValue(char **pszStringDst, const char *pszStringSrc, + MM_EXT_DBF_N_FIELDS *nStringCurrentLength) +{ + + if (!pszStringSrc) + { + if (1 >= *nStringCurrentLength) + { + void *new_ptr = realloc_function(*pszStringDst, 2); + if (!new_ptr) + return 1; + *pszStringDst = new_ptr; + *nStringCurrentLength = (MM_EXT_DBF_N_FIELDS)2; + } + strcpy(*pszStringDst, "\0"); + return 0; + } + + if (strlen(pszStringSrc) >= *nStringCurrentLength) + { + void *new_ptr = + realloc_function(*pszStringDst, strlen(pszStringSrc) + 1); + if (!new_ptr) + return 1; + (*pszStringDst) = new_ptr; + *nStringCurrentLength = (MM_EXT_DBF_N_FIELDS)(strlen(pszStringSrc) + 1); + } + strcpy(*pszStringDst, pszStringSrc); + return 0; +} + +// This function assumes that all the file is saved in disk and closed. +int MM_ChangeDBFWidthField(struct MM_DATA_BASE_XP *data_base_XP, + MM_EXT_DBF_N_FIELDS nIField, + MM_BYTES_PER_FIELD_TYPE_DBF nNewWidth, + MM_BYTE nNewPrecision, + MM_BYTE que_fer_amb_reformatat_decimals) +{ + char *record, *whites = nullptr; + MM_BYTES_PER_FIELD_TYPE_DBF l_glop1, l_glop2, i_glop2; + MM_EXT_DBF_N_RECORDS nfitx, i_reg; + int canvi_amplada; // change width + GInt32 j; + MM_EXT_DBF_N_FIELDS i_camp; + size_t retorn_fwrite; + int retorn_TruncaFitxer; + + MM_BOOLEAN error_sprintf_n_decimals = FALSE; + + canvi_amplada = nNewWidth - data_base_XP->pField[nIField].BytesPerField; + + if (data_base_XP->nRecords != 0) + { + l_glop1 = data_base_XP->pField[nIField].AccumulatedBytes; + i_glop2 = l_glop1 + data_base_XP->pField[nIField].BytesPerField; + if (nIField == data_base_XP->nFields - 1) + l_glop2 = 0; + else + l_glop2 = data_base_XP->BytesPerRecord - + data_base_XP->pField[nIField + 1].AccumulatedBytes; + + if ((record = calloc_function((size_t)data_base_XP->BytesPerRecord)) == + nullptr) + return 1; + + record[data_base_XP->BytesPerRecord - 1] = MM_SetEndOfString; + + if ((whites = (char *)calloc_function((size_t)nNewWidth)) == nullptr) + { + free_function(record); + return 1; + } + memset(whites, ' ', nNewWidth); + + nfitx = data_base_XP->nRecords; + i_reg = (canvi_amplada < 0 ? 0 : nfitx - 1); + while (TRUE) + { + if (0 != fseek_function(data_base_XP->pfDataBase, + data_base_XP->FirstRecordOffset + + (MM_FILE_OFFSET)i_reg * + data_base_XP->BytesPerRecord, + SEEK_SET)) + { + free_function(whites); + free_function(record); + return 1; + } + + if (1 != fread_function(record, data_base_XP->BytesPerRecord, 1, + data_base_XP->pfDataBase)) + { + free_function(whites); + free_function(record); + return 1; + } + + if (0 != + fseek_function( + data_base_XP->pfDataBase, + (MM_FILE_OFFSET)data_base_XP->FirstRecordOffset + + i_reg * ((MM_FILE_OFFSET)data_base_XP->BytesPerRecord + + canvi_amplada), + SEEK_SET)) + { + free_function(whites); + free_function(record); + return 1; + } + + if (1 != + fwrite_function(record, l_glop1, 1, data_base_XP->pfDataBase)) + { + free_function(whites); + free_function(record); + return 1; + } + + switch (data_base_XP->pField[nIField].FieldType) + { + case 'C': + case 'L': + memcpy(whites, record + l_glop1, + (canvi_amplada < 0 + ? nNewWidth + : data_base_XP->pField[nIField].BytesPerField)); + retorn_fwrite = fwrite_function(whites, nNewWidth, 1, + data_base_XP->pfDataBase); + + if (1 != retorn_fwrite) + { + free_function(whites); + free_function(record); + return 1; + } + break; + case 'N': + if (nNewPrecision == + data_base_XP->pField[nIField].DecimalsIfFloat || + que_fer_amb_reformatat_decimals == + MM_NOU_N_DECIMALS_NO_APLICA) + que_fer_amb_reformatat_decimals = + MM_NOMES_DOCUMENTAR_NOU_N_DECIMALS; + else if (que_fer_amb_reformatat_decimals == + MM_PREGUNTA_SI_APLICAR_NOU_N_DECIM) + que_fer_amb_reformatat_decimals = + MM_NOMES_DOCUMENTAR_NOU_N_DECIMALS; + + if (que_fer_amb_reformatat_decimals == + MM_NOMES_DOCUMENTAR_NOU_N_DECIMALS) + { + if (canvi_amplada >= 0) + { + if (1 != + fwrite_function(whites, canvi_amplada, 1, + data_base_XP->pfDataBase) || + 1 != fwrite_function( + record + l_glop1, + data_base_XP->pField[nIField] + .BytesPerField, + 1, data_base_XP->pfDataBase)) + { + free_function(whites); + free_function(record); + return 1; + } + } + else if (canvi_amplada < 0) + { + j = (GInt32)(l_glop1 + + (data_base_XP->pField[nIField] + .BytesPerField - + 1)); + while (TRUE) + { + j--; + + if (j < (GInt32)l_glop1 || record[j] == ' ') + { + j++; + break; + } + } + + if ((data_base_XP->pField[nIField].BytesPerField + + l_glop1 - j) < nNewWidth) + j -= (GInt32)(nNewWidth - + (data_base_XP->pField[nIField] + .BytesPerField + + l_glop1 - j)); + + retorn_fwrite = + fwrite_function(record + j, nNewWidth, 1, + data_base_XP->pfDataBase); + if (1 != retorn_fwrite) + { + free_function(whites); + free_function(record); + return 1; + } + } + } + else // MM_APLICAR_NOU_N_DECIMALS + { + double valor; + char *sz_valor; + size_t sz_valor_size = + max_function( + nNewWidth, + data_base_XP->pField[nIField].BytesPerField) + + 1; + + if ((sz_valor = calloc_function(sz_valor_size)) == + nullptr) // Sumo 1 per poder posar-hi el \0 + { + free_function(whites); + free_function(record); + return 1; + } + memcpy(sz_valor, record + l_glop1, + data_base_XP->pField[nIField].BytesPerField); + sz_valor[data_base_XP->pField[nIField].BytesPerField] = + 0; + + if (!MM_EmptyString_function(sz_valor)) + { + if (sscanf(sz_valor, "%lf", &valor) != 1) + memset( + sz_valor, *MM_BlankString, + max_function(nNewWidth, + data_base_XP->pField[nIField] + .BytesPerField)); + else + { + MM_SprintfDoubleWidth( + sz_valor, sz_valor_size, nNewWidth, + nNewPrecision, valor, + &error_sprintf_n_decimals); + } + + retorn_fwrite = + fwrite_function(sz_valor, nNewWidth, 1, + data_base_XP->pfDataBase); + if (1 != retorn_fwrite) + { + free_function(whites); + free_function(record); + free_function(sz_valor); + return 1; + } + } + else + { + memset(sz_valor, *MM_BlankString, nNewWidth); + retorn_fwrite = + fwrite_function(sz_valor, nNewWidth, 1, + data_base_XP->pfDataBase); + if (1 != retorn_fwrite) + { + free_function(whites); + free_function(record); + free_function(sz_valor); + return 1; + } + } + free_function(sz_valor); + } + break; + default: + free_function(whites); + free_function(record); + return 1; + } + if (l_glop2) + { + retorn_fwrite = fwrite_function(record + i_glop2, l_glop2, 1, + data_base_XP->pfDataBase); + if (1 != retorn_fwrite) + { + free_function(whites); + free_function(record); + return 1; + } + } + + if (canvi_amplada < 0) + { + if (i_reg + 1 == nfitx) + break; + i_reg++; + } + else + { + if (i_reg == 0) + break; + i_reg--; + } + } + + free_function(whites); + free_function(record); + + retorn_TruncaFitxer = TruncateFile_function( + data_base_XP->pfDataBase, + (MM_FILE_OFFSET)data_base_XP->FirstRecordOffset + + (MM_FILE_OFFSET)data_base_XP->nRecords * + ((MM_FILE_OFFSET)data_base_XP->BytesPerRecord + + canvi_amplada)); + if (canvi_amplada < 0 && retorn_TruncaFitxer) + return 1; + } /* Fi de registres de != 0*/ + + if (canvi_amplada != 0) + { + data_base_XP->pField[nIField].BytesPerField = nNewWidth; + data_base_XP->BytesPerRecord += canvi_amplada; + for (i_camp = (MM_EXT_DBF_N_FIELDS)(nIField + 1); + i_camp < data_base_XP->nFields; i_camp++) + data_base_XP->pField[i_camp].AccumulatedBytes += canvi_amplada; + } + data_base_XP->pField[nIField].DecimalsIfFloat = nNewPrecision; + + //DonaData(&(data_base_XP->day), &(data_base_XP->month), &(data_base_XP->year)); + + if ((MM_UpdateEntireHeader(data_base_XP)) == FALSE) + return 1; + + return 0; +} /* End of MMChangeCFieldWidthDBF() */ + +static void MM_AdoptHeight(double *desti, const double *proposta, uint32_t flag) +{ + if (*proposta == MM_NODATA_COORD_Z) + return; + + if (flag & MM_STRING_HIGHEST_ALTITUDE) + { + if (*desti == MM_NODATA_COORD_Z || *desti < *proposta) + *desti = *proposta; + } + else if (flag & MM_STRING_LOWEST_ALTITUDE) + { + if (*desti == MM_NODATA_COORD_Z || *desti > *proposta) + *desti = *proposta; + } + else + { + // First coordinate of this vertice + if (*desti == MM_NODATA_COORD_Z) + *desti = *proposta; + } +} + +int MM_GetArcHeights(double *coord_z, FILE_TYPE *pF, MM_N_VERTICES_TYPE n_vrt, + struct MM_ZD *pZDescription, uint32_t flag) +{ + MM_N_HEIGHT_TYPE i; + MM_N_VERTICES_TYPE i_vrt; + double *pcoord_z; + MM_N_HEIGHT_TYPE n_alcada, n_h_total; + int tipus; + double *alcada = nullptr, *palcada, *palcada_i; +#define MM_N_ALCADA_LOCAL 50 // Nr of local heights + double local_CinquantaAlcades[MM_N_ALCADA_LOCAL]; + + for (i_vrt = 0; i_vrt < n_vrt; i_vrt++) + coord_z[i_vrt] = MM_NODATA_COORD_Z; + + if (pZDescription->nZCount == INT_MIN) + return 0; + tipus = MM_ARC_HEIGHT_TYPE(pZDescription->nZCount); + n_alcada = MM_ARC_N_HEIGHTS(pZDescription->nZCount); + if (n_vrt == 0 || n_alcada == 0) + return 0; + + if (tipus == MM_ARC_HEIGHT_FOR_EACH_VERTEX) + { + if (n_vrt > (unsigned)(INT_MAX / n_alcada)) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, "Integer overflow"); + return 1; + } + n_h_total = (MM_N_HEIGHT_TYPE)n_vrt * n_alcada; + } + else + n_h_total = n_alcada; + + if (n_h_total <= MM_N_ALCADA_LOCAL) + palcada = local_CinquantaAlcades; + else + { + if (MMCheckSize_t(n_h_total, sizeof(double))) + return 1; + if (nullptr == (palcada = alcada = calloc_function((size_t)n_h_total * + sizeof(double)))) + return 1; + } + + if (fseek_function(pF, pZDescription->nOffsetZ, SEEK_SET)) + { + if (alcada) + free_function(alcada); + return 1; + } + if (n_h_total != (MM_N_HEIGHT_TYPE)fread_function(palcada, sizeof(double), + n_h_total, pF)) + { + if (alcada) + free_function(alcada); + return 1; + } + + if (tipus == MM_ARC_HEIGHT_FOR_EACH_VERTEX) + { + palcada_i = palcada; + for (i = 0; i < n_alcada; i++) + { + for (i_vrt = 0, pcoord_z = coord_z; i_vrt < n_vrt; + i_vrt++, pcoord_z++, palcada_i++) + MM_AdoptHeight(pcoord_z, palcada_i, flag); + } + } + else + { + palcada_i = palcada; + pcoord_z = coord_z; + for (i = 0; i < n_alcada; i++, palcada_i++) + MM_AdoptHeight(pcoord_z, palcada_i, flag); + + if (*pcoord_z != MM_NODATA_COORD_Z) + { + /*Copio el mateix valor a totes les alcades.*/ + for (i_vrt = 1, pcoord_z++; i_vrt < (size_t)n_vrt; + i_vrt++, pcoord_z++) + *pcoord_z = *coord_z; + } + } + if (alcada) + free_function(alcada); + return 0; +} // End of MM_GetArcHeights() + +static char *MM_l_RemoveWhitespacesFromEndOfString(char *punter, + size_t l_cadena) +{ + size_t longitud_cadena = l_cadena; + while (longitud_cadena > 0) + { + longitud_cadena--; + if (punter[longitud_cadena] != ' ' && punter[longitud_cadena] != '\t') + { + break; + } + punter[longitud_cadena] = '\0'; + } + return punter; +} + +char *MM_RemoveInitial_and_FinalQuotationMarks(char *cadena) +{ + char *ptr1, *ptr2; + char cometa = '"'; + + if (*cadena == cometa) + { + ptr1 = cadena; + ptr2 = ptr1 + 1; + if (*ptr2) + { + while (*ptr2) + { + *ptr1 = *ptr2; + ptr1++; + ptr2++; + } + if (*ptr1 == cometa) + *(ptr1 - 1) = 0; + else + *ptr1 = 0; + } + } + return cadena; +} /* End of MM_RemoveInitial_and_FinalQuotationMarks() */ + +char *MM_RemoveLeadingWhitespaceOfString(char *cadena) +{ + char *ptr; + char *ptr2; + + if (cadena == nullptr) + return cadena; + + for (ptr = cadena; *ptr && (*ptr == ' ' || *ptr == '\t'); ptr++) + continue; + + if (ptr != cadena) + { + ptr2 = cadena; + while (*ptr) + { + *ptr2 = *ptr; + ptr2++; + ptr++; + } + *ptr2 = 0; + } + return cadena; +} + +char *MM_RemoveWhitespacesFromEndOfString(char *str) +{ + if (str == nullptr) + return str; + return MM_l_RemoveWhitespacesFromEndOfString(str, strlen(str)); +} + +struct MM_ID_GRAFIC_MULTIPLE_RECORD *MMCreateExtendedDBFIndex( + FILE_TYPE *f, MM_EXT_DBF_N_RECORDS nNumberOfRecords, + MM_FIRST_RECORD_OFFSET_TYPE offset_1era, + MM_ACCUMULATED_BYTES_TYPE_DBF bytes_per_fitxa, + MM_ACCUMULATED_BYTES_TYPE_DBF bytes_acumulats_id_grafic, + MM_BYTES_PER_FIELD_TYPE_DBF bytes_id_grafic, MM_BOOLEAN *isListField, + MM_EXT_DBF_N_RECORDS *nMaxN) +{ + struct MM_ID_GRAFIC_MULTIPLE_RECORD *id; + MM_EXT_DBF_N_RECORDS i_dbf; + MM_EXT_DBF_SIGNED_N_RECORDS i, id_grafic; + char *fitxa; + MM_BYTES_PER_FIELD_TYPE_DBF bytes_final_id_principi_id1 = + bytes_per_fitxa - bytes_id_grafic; + + *isListField = FALSE; + *nMaxN = 0; + if (!nNumberOfRecords) + return nullptr; // No elements to read + + if (MMCheckSize_t(nNumberOfRecords, sizeof(*id))) + return nullptr; + if (nullptr == (id = (struct MM_ID_GRAFIC_MULTIPLE_RECORD *)calloc_function( + (size_t)nNumberOfRecords * sizeof(*id)))) + return nullptr; + + if (bytes_id_grafic == UINT32_MAX) + { + free_function(id); + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Overflow in bytes_id_graphic"); + return nullptr; + } + + if (nullptr == + (fitxa = (char *)calloc_function((size_t)bytes_id_grafic + 1))) + { + free_function(id); + return nullptr; + } + fitxa[bytes_id_grafic] = '\0'; + + fseek_function(f, + (MM_FILE_OFFSET)offset_1era + + (MM_FILE_OFFSET)bytes_acumulats_id_grafic, + SEEK_SET); + + i_dbf = 0; + do + { + if (i_dbf == nNumberOfRecords || + fread_function(fitxa, 1, bytes_id_grafic, f) != + (size_t)bytes_id_grafic) + { + free_function(id); + free_function(fitxa); + return nullptr; + } + i_dbf++; + } while (1 != + sscanf(fitxa, scanf_MM_EXT_DBF_SIGNED_N_RECORDS, &id_grafic) || + id_grafic < 0); + i = 0; + + while (TRUE) + { + if (i > id_grafic) + { + free_function(id); + free_function(fitxa); + return nullptr; + } + i = id_grafic; + if (i >= (MM_EXT_DBF_SIGNED_N_RECORDS)nNumberOfRecords) + { + free_function(fitxa); + return id; + } + id[(size_t)i].offset = (MM_FILE_OFFSET)offset_1era + + (MM_FILE_OFFSET)(i_dbf - 1) * bytes_per_fitxa; + do + { + id[(size_t)i].nMR++; + if (!(*isListField) && id[(size_t)i].nMR > 1) + *isListField = TRUE; + if (*nMaxN < id[(size_t)i].nMR) + *nMaxN = id[(size_t)i].nMR; + + if (i_dbf == nNumberOfRecords) + { + free_function(fitxa); + return id; + } + fseek_function(f, bytes_final_id_principi_id1, SEEK_CUR); + if (fread_function(fitxa, 1, bytes_id_grafic, f) != + (size_t)bytes_id_grafic) + { + free_function(id); + free_function(fitxa); + return nullptr; + } + if (1 != sscanf(fitxa, scanf_MM_EXT_DBF_SIGNED_N_RECORDS, + &id_grafic) || + id_grafic >= (MM_EXT_DBF_SIGNED_N_RECORDS)nNumberOfRecords) + { + free_function(fitxa); + return id; + } + i_dbf++; + } while (id_grafic == i); + } +} // End of MMCreateExtendedDBFIndex() + +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h new file mode 100644 index 000000000000..a0feda45dbda --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h @@ -0,0 +1,164 @@ +#ifndef __MM_GDAL_FUNCTIONS_H +#define __MM_GDAL_FUNCTIONS_H +/* -------------------------------------------------------------------- */ +/* Constants used in GDAL and in MiraMon */ +/* -------------------------------------------------------------------- */ + +#ifdef GDAL_COMPILATION +#include "mm_gdal_constants.h" // MM_BYTE +#include "mm_gdal_structures.h" // struct BASE_DADES_XP +#include "mm_gdal_driver_structs.h" // struct MMAdmDatabase +CPL_C_START // Necessary for compiling in GDAL project +#else +#include "mm_constants.h" // MM_BYTE +#include "mm_gdal\mm_gdal_structures.h" // struct BASE_DADES_XP +#include "mm_gdal\mm_gdal_driver_structs.h" // struct MMAdmDatabase +#endif + +#define nullptr NULL + + // Log. It should be temporal + extern const char *MM_pszLogFilename; + +#define LOG_STR(str) (MMLog((str), __LINE__)) +#define LOG_ACTION(action) ((void)MMLog(#action, __LINE__), (action)) + +const char *MMLog(const char *pszMsg, int nLineNumber); + +void fclose_and_nullify(FILE_TYPE **pFunc); + +// MiraMon feature table descriptors +#define MM_MAX_IDENTIFIER_SIZE 50 +#define MM_a_WITH_GRAVE 224 +#define MM_a_WITH_ACUTE 225 +#define MM_e_WITH_GRAVE 232 +#define MM_e_WITH_ACUTE 233 +#define MM_i_WITH_ACUTE 237 +#define MM_o_WITH_GRAVE 242 +#define MM_o_WITH_ACUTE 243 +#define MM_u_WITH_ACUTE 250 + +#define MM_A_WITH_GRAVE 192 +#define MM_A_WITH_ACUTE 193 +#define MM_E_WITH_GRAVE 200 +#define MM_E_WITH_ACUTE 201 +#define MM_I_WITH_ACUTE 205 +#define MM_O_WITH_GRAVE 210 +#define MM_O_WITH_ACUTE 211 +#define MM_U_WITH_ACUTE 218 + +// In case of diaeresis use "_WITH_DIAERESIS" +// In case of cedilla use "_WITH_CEDILLA" +// In case of tilde use "_WITH_TILDE" +// In case of middle dot use "_MIDDLE_DOT" + +void MM_FillFieldDescriptorByLanguage(void); + +extern char szInternalGraphicIdentifierEng[]; +extern char szInternalGraphicIdentifierCat[]; +extern char szInternalGraphicIdentifierSpa[]; + +extern char szNumberOfVerticesEng[]; +extern char szNumberOfVerticesCat[]; +extern char szNumberOfVerticesSpa[]; + +extern char szLengthOfAarcEng[]; +extern char szLengthOfAarcCat[]; +extern char szLengthOfAarcSpa[]; + +extern char szInitialNodeEng[]; +extern char szInitialNodeCat[]; +extern char szInitialNodeSpa[]; + +extern char szFinalNodeEng[]; +extern char szFinalNodeCat[]; +extern char szFinalNodeSpa[]; + +extern char szNumberOfArcsToNodeEng[]; +extern char szNumberOfArcsToNodeCat[]; +extern char szNumberOfArcsToNodeSpa[]; + +extern char szNodeTypeEng[]; +extern char szNodeTypeCat[]; +extern char szNodeTypeSpa[]; + +extern char szPerimeterOfThePolygonEng[]; +extern char szPerimeterOfThePolygonCat[]; +extern char szPerimeterOfThePolygonSpa[]; + +extern char szAreaOfThePolygonEng[]; +extern char szAreaOfThePolygonCat[]; +extern char szAreaOfThePolygonSpa[]; + +extern char szNumberOfArcsEng[]; +extern char szNumberOfArcsCat[]; +extern char szNumberOfArcsSpa[]; + +extern char szNumberOfElementaryPolygonsEng[]; +extern char szNumberOfElementaryPolygonsCat[]; +extern char szNumberOfElementaryPolygonsSpa[]; + +#ifndef GDAL_COMPILATION +char *CPLStrlcpy(char *dest, const char *src, size_t maxlen); +#endif +char *MM_oemansi(char *szcadena); +char *MM_oemansi_n(char *szcadena, size_t n_bytes); +void MM_InitializeField(struct MM_FIELD *camp); +struct MM_FIELD *MM_CreateAllFields(MM_EXT_DBF_N_FIELDS ncamps); +MM_FIRST_RECORD_OFFSET_TYPE +MM_GiveOffsetExtendedFieldName(const struct MM_FIELD *camp); +struct MM_DATA_BASE_XP *MM_CreateDBFHeader(MM_EXT_DBF_N_FIELDS n_camps, + MM_BYTE nCharSet); +MM_BYTE MM_DBFFieldTypeToVariableProcessing(MM_BYTE tipus_camp_DBF); +void MM_ReleaseMainFields(struct MM_DATA_BASE_XP *data_base_XP); +void MM_ReleaseDBFHeader(struct MM_DATA_BASE_XP *data_base_XP); +MM_BOOLEAN MM_CreateDBFFile(struct MM_DATA_BASE_XP *bd_xp, + const char *NomFitxer); +int MM_DuplicateFieldDBXP(struct MM_FIELD *camp_final, + const struct MM_FIELD *camp_inicial); +int MM_WriteNRecordsMMBD_XPFile(struct MMAdmDatabase *MMAdmDB); + +size_t MM_DefineFirstPolygonFieldsDB_XP(struct MM_DATA_BASE_XP *bd_xp); +size_t MM_DefineFirstArcFieldsDB_XP(struct MM_DATA_BASE_XP *bd_xp); +size_t MM_DefineFirstNodeFieldsDB_XP(struct MM_DATA_BASE_XP *bd_xp); +size_t MM_DefineFirstPointFieldsDB_XP(struct MM_DATA_BASE_XP *bd_xp); +int MM_ModifyFieldNameAndDescriptorIfPresentBD_XP( + struct MM_FIELD *camp, struct MM_DATA_BASE_XP *bd_xp, + MM_BOOLEAN no_modifica_descriptor, size_t mida_nom); + +int MMWriteValueToRecordDBXP(struct MiraMonVectLayerInfo *hMiraMonLayer, + char *registre, const struct MM_FIELD *camp, + const void *valor, MM_BOOLEAN is_64); +int MM_SecureCopyStringFieldValue(char **pszStringDst, const char *pszStringSrc, + MM_EXT_DBF_N_FIELDS *nStringCurrentLength); +int MM_ChangeDBFWidthField(struct MM_DATA_BASE_XP *data_base_XP, + MM_EXT_DBF_N_FIELDS quincamp, + MM_BYTES_PER_FIELD_TYPE_DBF novaamplada, + MM_BYTE nou_decimals, + MM_BYTE que_fer_amb_reformatat_decimals); + +int MM_GetArcHeights(double *coord_z, FILE_TYPE *pF, MM_N_VERTICES_TYPE n_vrt, + struct MM_ZD *pZDescription, uint32_t flag); + +// Strings +char *MM_RemoveInitial_and_FinalQuotationMarks(char *cadena); +char *MM_RemoveWhitespacesFromEndOfString(char *str); +char *MM_RemoveLeadingWhitespaceOfString(char *cadena); + +// DBF +struct MM_ID_GRAFIC_MULTIPLE_RECORD *MMCreateExtendedDBFIndex( + FILE_TYPE *f, MM_EXT_DBF_N_RECORDS n_dbf, + MM_FIRST_RECORD_OFFSET_TYPE offset_1era, + MM_ACCUMULATED_BYTES_TYPE_DBF bytes_per_fitxa, + MM_ACCUMULATED_BYTES_TYPE_DBF bytes_acumulats_id_grafic, + MM_BYTES_PER_FIELD_TYPE_DBF bytes_id_grafic, MM_BOOLEAN *isListField, + MM_EXT_DBF_N_RECORDS *nMaxN); + +int MM_ReadExtendedDBFHeaderFromFile(const char *szFileName, + struct MM_DATA_BASE_XP *pMMBDXP, + const char *pszRelFile); + +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif +#endif //__MM_GDAL_FUNCTIONS_H diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_structures.h b/ogr/ogrsf_frmts/miramon/mm_gdal_structures.h new file mode 100644 index 000000000000..58133dca3ff1 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_structures.h @@ -0,0 +1,116 @@ +#ifndef __MM_GDAL_STRUCTURES_H +#define __MM_GDAL_STRUCTURES_H +/* -------------------------------------------------------------------- */ +/* Constants used in GDAL and in MiraMon */ +/* -------------------------------------------------------------------- */ +#ifdef GDAL_COMPILATION +#include "cpl_conv.h" // For FILE_TYPE +#include "mm_gdal_constants.h" +#else +#include "F64_str.h" // For FILE_64 +#include "mm_gdal\mm_gdal_constants.h" +#endif + +#include "mm_constants.h" + +#ifdef GDAL_COMPILATION +CPL_C_START // Necessary for compiling in GDAL project +#endif + +#ifdef GDAL_COMPILATION +#define FILE_TYPE VSILFILE +#else +#define FILE_TYPE FILE_64 +#endif + + /* Internal field of an extended DBF. It is a copy of a MiraMon internal +structure but translated to be understood by anyone who wants to +review the code of the driver. +*/ + + struct MM_FIELD // In MiraMon code: MM_CAMP +{ + // Name of the field + char FieldName[MM_MAX_LON_FIELD_NAME_DBF]; // In MiraMon code: NomCamp + + // Name of the field in dBASEIII + char ClassicalDBFFieldName + [MM_MAX_LON_CLASSICAL_FIELD_NAME_DBF]; // In MiraMon code: + // NomCampDBFClassica + + // Type of the field C, N, D, L, M, F, G and B + char FieldType; // In MiraMon code: TipusDeCamp + MM_BOOLEAN Is64; // Is an signed 64 bit integer + + // Number of decimal places if it is a float + MM_BYTE DecimalsIfFloat; // In MiraMon code: DecimalsSiEsFloat + + // Number of bytes of a field + MM_BYTES_PER_FIELD_TYPE_DBF + BytesPerField; // In MiraMon code: MM_TIPUS_BYTES_PER_CAMP_DBF BytesPerCamp + + // Accumulated bytes before a field starts + MM_ACCUMULATED_BYTES_TYPE_DBF + AccumulatedBytes; // In MiraMon code: + // MM_TIPUS_BYTES_ACUMULATS_DBF BytesAcumulats + + // Not used in GDAL + char + *Separator[MM_NUM_IDIOMES_MD_MULTIDIOMA]; // In MiraMon code: separador + + // Description of the field (alternative name) + char FieldDescription + [MM_NUM_IDIOMES_MD_MULTIDIOMA] + [MM_MAX_LON_DESCRIPCIO_CAMP_DBF]; // In MiraMon code: DescripcioCamp + + MM_BYTE DesiredWidth; // In MiraMon code: AmpleDesitjat + MM_BYTE OriginalDesiredWidth; // In MiraMon code: AmpleDesitjatOriginal + + MM_BYTE reserved_1 + [MM_MAX_LON_RESERVAT_1_CAMP_BD_XP]; // In MiraMon code: reservat_1 + + MM_BYTE reserved_2 + [MM_MAX_LON_RESERVAT_2_CAMP_BD_XP]; // In MiraMon code: reservat_2 + MM_BYTE MDX_field_flag; // In MiraMon code: MDX_camp_flag + MM_BYTE GeoTopoTypeField; // In MiraMon code: TipusCampGeoTopo +}; + +struct MM_DATA_BASE_XP // MiraMon table Structure +{ + // Extended DBF file name + char szFileName[MM_CPL_PATH_BUF_SIZE]; // In MiraMon code: szNomFitxer + + FILE_TYPE *pfDataBase; // In MiraMon code: pfBaseDades + + // Charset of the DBF + MM_BYTE CharSet; + + char ReadingMode[4]; // In MiraMon code: ModeLectura + MM_EXT_DBF_N_RECORDS nRecords; // In MiraMon code: n_fitxes + MM_ACCUMULATED_BYTES_TYPE_DBF + BytesPerRecord; // In MiraMon code: BytesPerFitxa + MM_EXT_DBF_N_FIELDS nFields; // In MiraMon code: ncamps + struct MM_FIELD *pField; // In MiraMon code: Camp + MM_FIRST_RECORD_OFFSET_TYPE + FirstRecordOffset; // In MiraMon code: OffsetPrimeraFitxa + MM_EXT_DBF_N_FIELDS IdGraficField; // In MiraMon code: CampIdGrafic + MM_EXT_DBF_N_FIELDS IdEntityField; // In MiraMon code: CampIdEntitat + short int year; // In MiraMon code: any + MM_BYTE month; // In MiraMon code: mes + MM_BYTE day; // In MiraMon code: dia + + MM_BYTE dbf_version; // In MiraMon code: versio_dbf + + MM_BYTE reserved_1 // Used in extended DBF format to recompose BytesPerRecord + [MM_MAX_LON_RESERVAT_1_BASE_DADES_XP]; // In MiraMon code: reservat_1 + MM_BYTE transaction_flag; + MM_BYTE encryption_flag; + MM_BYTE dbf_on_a_LAN[MM_MAX_LON_DBF_ON_A_LAN_BASE_DADES_XP]; + MM_BYTE MDX_flag; + MM_BYTE reserved_2 // Used in extended DBF format to recompose BytesPerRecord + [MM_MAX_LON_RESERVAT_2_BASE_DADES_XP]; // In MiraMon code: reservat_2 +}; +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif +#endif //__MM_GDAL_STRUCTURES_H diff --git a/ogr/ogrsf_frmts/miramon/mm_rdlayr.c b/ogr/ogrsf_frmts/miramon/mm_rdlayr.c new file mode 100644 index 000000000000..4a8a2ec09126 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_rdlayr.c @@ -0,0 +1,707 @@ +/****************************************************************************** + * + * Project: OpenGIS Simple Features Reference Implementation + * Purpose: C API to read a MiraMon layer + * Author: Abel Pau, a.pau@creaf.uab.cat, based on the MiraMon codes, + * mainly written by Xavier Pons, Joan Maso (correctly written + * "Mas0xF3"), Abel Pau, Nuria Julia (N0xFAria Juli0xE0), + * Xavier Calaf, Lluis (Llu0xEDs) Pesquer and Alaitz Zabala, from + * CREAF and Universitat Autonoma (Aut0xF2noma) de Barcelona. + * For a complete list of contributors: + * https://www.miramon.cat/eng/QuiSom.htm + ****************************************************************************** + * Copyright (c) 2024, Xavier Pons + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + ****************************************************************************/ + +#ifdef GDAL_COMPILATION +#include "ogr_api.h" // For CPL_C_START +#include "mm_wrlayr.h" +#include "mm_wrlayr.h" // For MMReadHeader() +#include "mm_gdal_functions.h" +#include "mm_gdal_constants.h" +#else +#include "CmptCmp.h" // Compatibility between compilers +#include "mm_gdal\mm_wrlayr.h" // For MMReadHeader() +#include "mm_gdal\mm_gdal_functions.h" // For int MM_GetArcHeights() +#include "mm_constants.h" +#endif + +#include "mm_rdlayr.h" + +#ifdef GDAL_COMPILATION +CPL_C_START // Necessary for compiling in GDAL project +#endif + + /* -------------------------------------------------------------------- */ + /* Reading MiraMon format file functions */ + /* -------------------------------------------------------------------- */ + + // Initializes a MiraMon vector layer for reading + int + MMInitLayerToRead(struct MiraMonVectLayerInfo *hMiraMonLayer, + FILE_TYPE *m_fp, const char *pszFilename) +{ + char szResult[MM_MAX_ID_SNY + 10]; + char *pszSRS; + + memset(hMiraMonLayer, 0, sizeof(*hMiraMonLayer)); + if (MMReadHeader(m_fp, &hMiraMonLayer->TopHeader)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error reading header of file %s", pszFilename); + return 1; + } + hMiraMonLayer->ReadOrWrite = MM_READING_MODE; + strcpy(hMiraMonLayer->pszFlags, "rb"); + + hMiraMonLayer->pszSrcLayerName = strdup_function(pszFilename); + + hMiraMonLayer->LayerVersion = + (char)MMGetVectorVersion(&hMiraMonLayer->TopHeader); + if (hMiraMonLayer->LayerVersion == MM_UNKNOWN_VERSION) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "MiraMon version file unknown."); + return 1; + } + if (hMiraMonLayer->LayerVersion == MM_LAST_VERSION) + hMiraMonLayer->nHeaderDiskSize = MM_HEADER_SIZE_64_BITS; + else if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + hMiraMonLayer->nHeaderDiskSize = MM_HEADER_SIZE_32_BITS; + else + hMiraMonLayer->nHeaderDiskSize = MM_HEADER_SIZE_64_BITS; + + if (hMiraMonLayer->TopHeader.aFileType[0] == 'P' && + hMiraMonLayer->TopHeader.aFileType[1] == 'N' && + hMiraMonLayer->TopHeader.aFileType[2] == 'T') + { + if (hMiraMonLayer->TopHeader.Flag & MM_LAYER_3D_INFO) + { + hMiraMonLayer->TopHeader.bIs3d = 1; + hMiraMonLayer->eLT = MM_LayerType_Point3d; + } + else + hMiraMonLayer->eLT = MM_LayerType_Point; + + hMiraMonLayer->bIsPoint = TRUE; + } + else if (hMiraMonLayer->TopHeader.aFileType[0] == 'A' && + hMiraMonLayer->TopHeader.aFileType[1] == 'R' && + hMiraMonLayer->TopHeader.aFileType[2] == 'C') + { + if (hMiraMonLayer->TopHeader.Flag & MM_LAYER_3D_INFO) + { + hMiraMonLayer->TopHeader.bIs3d = 1; + hMiraMonLayer->eLT = MM_LayerType_Arc3d; + } + else + hMiraMonLayer->eLT = MM_LayerType_Arc; + + hMiraMonLayer->bIsArc = TRUE; + } + else if (hMiraMonLayer->TopHeader.aFileType[0] == 'P' && + hMiraMonLayer->TopHeader.aFileType[1] == 'O' && + hMiraMonLayer->TopHeader.aFileType[2] == 'L') + { + // 3D + if (hMiraMonLayer->TopHeader.Flag & MM_LAYER_3D_INFO) + { + hMiraMonLayer->TopHeader.bIs3d = 1; + hMiraMonLayer->eLT = MM_LayerType_Pol3d; + } + else + hMiraMonLayer->eLT = MM_LayerType_Pol; + + hMiraMonLayer->bIsPolygon = TRUE; + + if (hMiraMonLayer->TopHeader.Flag & MM_LAYER_MULTIPOLYGON) + hMiraMonLayer->TopHeader.bIsMultipolygon = 1; + } + + //hMiraMonLayer->Version = MM_VECTOR_LAYER_LAST_VERSION; + + if (MMInitLayerByType(hMiraMonLayer)) + return 1; + hMiraMonLayer->bIsBeenInit = 1; + + // Get the basic metadata + pszSRS = MMReturnValueFromSectionINIFile( + hMiraMonLayer->pszMainREL_LayerName, + "SPATIAL_REFERENCE_SYSTEM:HORIZONTAL", "HorizontalSystemIdentifier"); + if (pszSRS) + hMiraMonLayer->pSRS = pszSRS; + else + hMiraMonLayer->pSRS = nullptr; + + if (!hMiraMonLayer->pSRS && hMiraMonLayer->bIsPolygon) + { + pszSRS = MMReturnValueFromSectionINIFile( + hMiraMonLayer->MMPolygon.MMArc.pszREL_LayerName, + "SPATIAL_REFERENCE_SYSTEM:HORIZONTAL", + "HorizontalSystemIdentifier"); + + hMiraMonLayer->pSRS = pszSRS; + } + + if (!ReturnEPSGCodeSRSFromMMIDSRS(hMiraMonLayer->pSRS, szResult)) + { + if (MMIsEmptyString(szResult)) + hMiraMonLayer->nSRS_EPSG = 0; + else + hMiraMonLayer->nSRS_EPSG = atoi(szResult); + } + else + hMiraMonLayer->nSRS_EPSG = 0; + + if (hMiraMonLayer->nSRS_EPSG == 0) + { + if (hMiraMonLayer->pSRS && strcmp(hMiraMonLayer->pSRS, "plane")) + { + MMCPLWarning(CE_Warning, CPLE_NotSupported, + "The MiraMon layer SRS has no equivalent " + "in EPSG code"); + } + } + + // If more nNumStringToOperate is needed, it'll be increased. + hMiraMonLayer->nNumStringToOperate = 0; + if (MMResizeStringToOperateIfNeeded(hMiraMonLayer, 5000)) + return 1; + + return 0; +} + +// Reads stringline coordinates and puts them in a buffer +static int +MMAddStringLineCoordinates(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_INTERNAL_FID i_elem, uint32_t flag_z, + MM_N_VERTICES_TYPE nStartVertice, + MM_BOOLEAN bAvoidFirst, unsigned char VFG) +{ + FILE_TYPE *pF; + struct MM_AH *pArcHeader; + struct MiraMonArcLayer *pMMArc; + struct MM_ZD *pZDescription = nullptr; + + if (hMiraMonLayer->bIsPolygon) + pMMArc = &hMiraMonLayer->MMPolygon.MMArc; + else + pMMArc = &hMiraMonLayer->MMArc; + + pF = pMMArc->pF; + pArcHeader = pMMArc->pArcHeader; + if (hMiraMonLayer->TopHeader.bIs3d) + pZDescription = pMMArc->pZSection.pZDescription; + + fseek_function(pF, pArcHeader[i_elem].nOffset, SEEK_SET); + + if (hMiraMonLayer->bIsPolygon && (VFG & MM_POL_REVERSE_ARC)) // && + //nStartVertice > 0) + { + MM_N_VERTICES_TYPE nIVertice; + + // Reading arcs vertices in an inverse order + if (MMResizeMM_POINT2DPointer( + &hMiraMonLayer->ReadFeature.pCoord, + &hMiraMonLayer->ReadFeature.nMaxpCoord, + nStartVertice + pArcHeader[i_elem].nElemCount * + 2, // ask for twice memory to reverse + 0, 0)) + return 1; + + // Get the vertices far away from their place to be inverted later + if (pArcHeader[i_elem].nElemCount != + fread_function(hMiraMonLayer->ReadFeature.pCoord + nStartVertice + + pArcHeader[i_elem].nElemCount, + sizeof(*hMiraMonLayer->ReadFeature.pCoord), + (size_t)pArcHeader[i_elem].nElemCount, pF)) + { + return 1; + } + + if (hMiraMonLayer->TopHeader.bIs3d) + { + if (MMResizeDoublePointer( + &hMiraMonLayer->ReadFeature.pZCoord, + &hMiraMonLayer->ReadFeature.nMaxpZCoord, + nStartVertice + pArcHeader[i_elem].nElemCount * 2, 0, 0)) + return 1; + + // +nStartVertice + MM_GetArcHeights(hMiraMonLayer->ReadFeature.pZCoord + + nStartVertice + pArcHeader[i_elem].nElemCount, + pF, pArcHeader[i_elem].nElemCount, + pZDescription + i_elem, flag_z); + + // If there is a value for Z-nodata in GDAL this lines can be uncommented + // and MM_GDAL_NODATA_COORD_Z can be defined + /*if(!DOUBLES_DIFERENTS_DJ(punts_z[k], MM_NODATA_COORD_Z)) + { + MM_N_VERTICES_TYPE nIVertice; + for(nIVertice=0; nIVertice<pArcHeader[i_elem].nElemCount; nIVertice++) + hMiraMonLayer->ReadFeature.pZCoord[nIVertice]=MM_GDAL_NODATA_COORD_Z; + } + */ + } + + // Reverse the vertices while putting on their place + for (nIVertice = 0; nIVertice < pArcHeader[i_elem].nElemCount; + nIVertice++) + { + memcpy(hMiraMonLayer->ReadFeature.pCoord + nStartVertice - + ((nStartVertice > 0 && bAvoidFirst) ? 1 : 0) + nIVertice, + hMiraMonLayer->ReadFeature.pCoord + nStartVertice + + 2 * pArcHeader[i_elem].nElemCount - nIVertice - 1, + sizeof(*hMiraMonLayer->ReadFeature.pCoord)); + + if (hMiraMonLayer->TopHeader.bIs3d) + { + memcpy(hMiraMonLayer->ReadFeature.pZCoord + nStartVertice - + ((nStartVertice > 0 && bAvoidFirst) ? 1 : 0) + + nIVertice, + hMiraMonLayer->ReadFeature.pZCoord + nStartVertice + + 2 * pArcHeader[i_elem].nElemCount - nIVertice - 1, + sizeof(*hMiraMonLayer->ReadFeature.pZCoord)); + } + } + } + else + { + // Reading arcs vertices + if (MMResizeMM_POINT2DPointer( + &hMiraMonLayer->ReadFeature.pCoord, + &hMiraMonLayer->ReadFeature.nMaxpCoord, + nStartVertice + pArcHeader[i_elem].nElemCount, 0, 0)) + return 1; + + if (pArcHeader[i_elem].nElemCount != + fread_function(hMiraMonLayer->ReadFeature.pCoord + nStartVertice - + (bAvoidFirst ? 1 : 0), + sizeof(*hMiraMonLayer->ReadFeature.pCoord), + (size_t)pArcHeader[i_elem].nElemCount, pF)) + { + return 1; + } + + if (hMiraMonLayer->TopHeader.bIs3d) + { + if (MMResizeDoublePointer( + &hMiraMonLayer->ReadFeature.pZCoord, + &hMiraMonLayer->ReadFeature.nMaxpZCoord, + nStartVertice + pArcHeader[i_elem].nElemCount, 0, 0)) + return 1; + + // +nStartVertice + MM_GetArcHeights(hMiraMonLayer->ReadFeature.pZCoord + + nStartVertice - (bAvoidFirst ? 1 : 0), + pF, pArcHeader[i_elem].nElemCount, + pZDescription + i_elem, flag_z); + + // If there is a value for Z-nodata in GDAL this lines can be uncommented + // and MM_GDAL_NODATA_COORD_Z can be defined + /*if(!DOUBLES_DIFERENTS_DJ(punts_z[k], MM_NODATA_COORD_Z)) + { + MM_N_VERTICES_TYPE nIVertice; + for(nIVertice=0; nIVertice<pArcHeader[i_elem].nElemCount; nIVertice++) + hMiraMonLayer->ReadFeature.pZCoord[nIVertice]=MM_GDAL_NODATA_COORD_Z; + } + */ + } + } + hMiraMonLayer->ReadFeature.nNumpCoord = + pArcHeader[i_elem].nElemCount - (bAvoidFirst ? 1 : 0); + + return 0; +} + +// Reads Polygon coordinates and puts them in a buffer +static int +MMGetMultiPolygonCoordinates(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_INTERNAL_FID i_pol, uint32_t flag_z) +{ + struct MM_PH *pPolHeader; + struct MM_AH *pArcHeader; + char *pBuffer; + MM_POLYGON_ARCS_COUNT nIndex; + MM_BOOLEAN bAvoidFirst; + MM_N_VERTICES_TYPE nNAcumulVertices = 0; + + // Checking if the index of the polygon is in the correct range. + if (i_pol >= hMiraMonLayer->TopHeader.nElemCount) + return 1; + + MMResetFeatureGeometry(&hMiraMonLayer->ReadFeature); + MMResetFeatureRecord(&hMiraMonLayer->ReadFeature); + pPolHeader = hMiraMonLayer->MMPolygon.pPolHeader + i_pol; + + // It's accepted not having arcs in the universal polygon + if (!pPolHeader->nArcsCount) + { + if (i_pol == 0) + return 0; + else + return 1; + } + + if (MMResizeMiraMonPolygonArcs(&hMiraMonLayer->pArcs, + &hMiraMonLayer->nMaxArcs, + pPolHeader->nArcsCount, 0, 0)) + return 1; + + if (MMInitFlush(&hMiraMonLayer->FlushPAL, hMiraMonLayer->MMPolygon.pF, + hMiraMonLayer->MMPolygon.nPALElementSize * + pPolHeader->nArcsCount, + &pBuffer, pPolHeader->nOffset, 0)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + hMiraMonLayer->FlushPAL.pBlockWhereToSaveOrRead = (void *)pBuffer; + if (MMReadFlush(&hMiraMonLayer->FlushPAL)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + hMiraMonLayer->ReadFeature.nNRings = 0; + hMiraMonLayer->ReadFeature.nNumpCoord = 0; + if (MMResize_MM_N_VERTICES_TYPE_Pointer( + &hMiraMonLayer->ReadFeature.pNCoordRing, + &hMiraMonLayer->ReadFeature.nMaxpNCoordRing, + (MM_N_VERTICES_TYPE)hMiraMonLayer->ReadFeature.nNRings + 1, 10, 10)) + { + free_function(pBuffer); + return 1; + } + + if (MMResizeVFGPointer(&hMiraMonLayer->ReadFeature.flag_VFG, + &hMiraMonLayer->ReadFeature.nMaxVFG, + (MM_INTERNAL_FID)pPolHeader->nArcsCount, 0, + 0)) // Perhaps more memory than needed + { + free_function(pBuffer); + return 1; + } + + // Preparing memory for all coordinates + hMiraMonLayer->ReadFeature.pNCoordRing[hMiraMonLayer->ReadFeature.nNRings] = + 0; + for (nIndex = 0; nIndex < pPolHeader->nArcsCount; nIndex++) + { + hMiraMonLayer->FlushPAL.SizeOfBlockToBeSaved = + sizeof((hMiraMonLayer->pArcs + nIndex)->VFG); + hMiraMonLayer->FlushPAL.pBlockToBeSaved = + (void *)&(hMiraMonLayer->pArcs + nIndex)->VFG; + if (MMReadBlockFromBuffer(&hMiraMonLayer->FlushPAL)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Arc index + if (MMReadGUInt64DependingOnVersion( + hMiraMonLayer, &hMiraMonLayer->FlushPAL, + &((hMiraMonLayer->pArcs + nIndex)->nIArc))) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + if (hMiraMonLayer->MMPolygon.MMArc.pArcHeader == nullptr) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Checking if the index of the arc is in the correct range. + if ((hMiraMonLayer->pArcs + nIndex)->nIArc >= + hMiraMonLayer->MMPolygon.TopArcHeader.nElemCount) + { + free_function(pBuffer); + return 1; + } + + pArcHeader = hMiraMonLayer->MMPolygon.MMArc.pArcHeader + + (hMiraMonLayer->pArcs + nIndex)->nIArc; + hMiraMonLayer->ReadFeature + .pNCoordRing[hMiraMonLayer->ReadFeature.nNRings] += + pArcHeader->nElemCount; + } + if (MMResizeMM_POINT2DPointer( + &hMiraMonLayer->ReadFeature.pCoord, + &hMiraMonLayer->ReadFeature.nMaxpCoord, + hMiraMonLayer->ReadFeature + .pNCoordRing[hMiraMonLayer->ReadFeature.nNRings], + 0, 0)) + { + free_function(pBuffer); + return 1; + } + + hMiraMonLayer->FlushPAL.CurrentOffset = 0; + + // Real work + hMiraMonLayer->ReadFeature.pNCoordRing[hMiraMonLayer->ReadFeature.nNRings] = + 0; + for (nIndex = 0; nIndex < pPolHeader->nArcsCount; nIndex++) + { + hMiraMonLayer->FlushPAL.SizeOfBlockToBeSaved = + sizeof((hMiraMonLayer->pArcs + nIndex)->VFG); + hMiraMonLayer->FlushPAL.pBlockToBeSaved = + (void *)&(hMiraMonLayer->pArcs + nIndex)->VFG; + if (MMReadBlockFromBuffer(&hMiraMonLayer->FlushPAL)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Arc index + if (MMReadGUInt64DependingOnVersion( + hMiraMonLayer, &hMiraMonLayer->FlushPAL, + &((hMiraMonLayer->pArcs + nIndex)->nIArc))) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + bAvoidFirst = FALSE; + if (hMiraMonLayer->ReadFeature + .pNCoordRing[hMiraMonLayer->ReadFeature.nNRings] != 0) + bAvoidFirst = TRUE; + + // Add coordinates to hMiraMonLayer->ReadFeature.pCoord + if (MMAddStringLineCoordinates(hMiraMonLayer, + (hMiraMonLayer->pArcs + nIndex)->nIArc, + flag_z, nNAcumulVertices, bAvoidFirst, + (hMiraMonLayer->pArcs + nIndex)->VFG)) + { + free_function(pBuffer); + return 1; + } + + if (MMResize_MM_N_VERTICES_TYPE_Pointer( + &hMiraMonLayer->ReadFeature.pNCoordRing, + &hMiraMonLayer->ReadFeature.nMaxpNCoordRing, + (MM_N_VERTICES_TYPE)hMiraMonLayer->ReadFeature.nNRings + 1, 10, + 10)) + { + free_function(pBuffer); + return 1; + } + + hMiraMonLayer->ReadFeature + .pNCoordRing[hMiraMonLayer->ReadFeature.nNRings] += + hMiraMonLayer->ReadFeature.nNumpCoord; + nNAcumulVertices += hMiraMonLayer->ReadFeature.nNumpCoord; + if ((hMiraMonLayer->pArcs + nIndex)->VFG & MM_POL_END_RING) + { + hMiraMonLayer->ReadFeature + .flag_VFG[hMiraMonLayer->ReadFeature.nNRings] = + (hMiraMonLayer->pArcs + nIndex)->VFG; + hMiraMonLayer->ReadFeature.nNRings++; + hMiraMonLayer->ReadFeature + .pNCoordRing[hMiraMonLayer->ReadFeature.nNRings] = 0; + } + } + hMiraMonLayer->nNumArcs = pPolHeader->nArcsCount; + if (pBuffer) + free_function(pBuffer); + + return 0; +} + +// Reads the geographical part of a MiraMon layer feature +int MMGetGeoFeatureFromVector(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_INTERNAL_FID i_elem) +{ + FILE_TYPE *pF; + struct MM_ZD *pZDescription; + uint32_t flag_z; + int num; + double cz; + + if (hMiraMonLayer->nSelectCoordz == MM_SELECT_HIGHEST_COORDZ) + flag_z = MM_STRING_HIGHEST_ALTITUDE; + else if (hMiraMonLayer->nSelectCoordz == MM_SELECT_LOWEST_COORDZ) + flag_z = MM_STRING_LOWEST_ALTITUDE; + else + flag_z = 0L; + + if (hMiraMonLayer->bIsPoint) + { + pF = hMiraMonLayer->MMPoint.pF; + + // Getting to the i-th element offset + fseek_function(pF, + hMiraMonLayer->nHeaderDiskSize + + sizeof(MM_COORD_TYPE) * 2 * i_elem, + SEEK_SET); + + // Reading the point + if (MMResizeMM_POINT2DPointer(&hMiraMonLayer->ReadFeature.pCoord, + &hMiraMonLayer->ReadFeature.nMaxpCoord, + hMiraMonLayer->ReadFeature.nNumpCoord, 1, + 1)) + return 1; + + if (1 != fread_function(hMiraMonLayer->ReadFeature.pCoord, + sizeof(MM_COORD_TYPE) * 2, 1, pF)) + { + return 1; + } + + hMiraMonLayer->ReadFeature.nNRings = 1; + + if (MMResize_MM_N_VERTICES_TYPE_Pointer( + &hMiraMonLayer->ReadFeature.pNCoordRing, + &hMiraMonLayer->ReadFeature.nMaxpNCoordRing, 1, 0, 1)) + return 1; + + hMiraMonLayer->ReadFeature.pNCoordRing[0] = 1; + + if (hMiraMonLayer->TopHeader.bIs3d) + { + pZDescription = + hMiraMonLayer->MMPoint.pZSection.pZDescription + i_elem; + if (pZDescription->nZCount == INT_MIN) + return 1; + num = MM_ARC_TOTAL_N_HEIGHTS_DISK(pZDescription->nZCount, 1); + + if (MMResizeDoublePointer(&hMiraMonLayer->ReadFeature.pZCoord, + &hMiraMonLayer->ReadFeature.nMaxpZCoord, + 1, 1, 1)) + return 1; + + if (num == 0) + hMiraMonLayer->ReadFeature.pZCoord[0] = MM_NODATA_COORD_Z; + else + { + if (flag_z == MM_STRING_HIGHEST_ALTITUDE) // Max z + cz = pZDescription->dfBBmaxz; + else if (flag_z == MM_STRING_LOWEST_ALTITUDE) // Min z + cz = pZDescription->dfBBminz; + else + { + // Reading the first z coordinate + fseek_function(pF, pZDescription->nOffsetZ, SEEK_SET); + if ((size_t)1 != + fread_function( + &cz, sizeof(*hMiraMonLayer->ReadFeature.pZCoord), 1, + pF)) + { + return 1; + } + } + // If there is a value for Z-nodata in GDAL this lines can be uncommented + // and MM_GDAL_NODATA_COORD_Z can be defined + /*if(!DOUBLES_DIFERENTS_DJ(cz, MM_NODATA_COORD_Z)) + hMiraMonLayer->ReadFeature.pZCoord[0]=MM_GDAL_NODATA_COORD_Z; + else */ + hMiraMonLayer->ReadFeature.pZCoord[0] = cz; + } + } + + return 0; + } + + // Stringlines + if (hMiraMonLayer->bIsArc && !hMiraMonLayer->bIsPolygon) + { + if (MMAddStringLineCoordinates(hMiraMonLayer, i_elem, flag_z, 0, FALSE, + 0)) + return 1; + + if (MMResize_MM_N_VERTICES_TYPE_Pointer( + &hMiraMonLayer->ReadFeature.pNCoordRing, + &hMiraMonLayer->ReadFeature.nMaxpNCoordRing, 1, 0, 1)) + return 1; + + hMiraMonLayer->ReadFeature.pNCoordRing[0] = + hMiraMonLayer->ReadFeature.nNumpCoord; + + return 0; + } + + // Polygons or multipolygons + if (MMGetMultiPolygonCoordinates(hMiraMonLayer, i_elem, flag_z)) + return 1; + + return 0; +} + +// Reads the header of a MiraMon DBF +// Please read the format at this link: +// https://www.miramon.cat/new_note/usa/notes/DBF_estesa.pdf +int MM_ReadExtendedDBFHeader(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + const char *pszRelFile = nullptr; + struct MM_DATA_BASE_XP *pMMBDXP; + const char *szDBFFileName = nullptr; + + // If read don't read again. It happens when Polygon reads + // the database and then in initArc() it's read again. + if (hMiraMonLayer->pMMBDXP) + return 0; + + pMMBDXP = hMiraMonLayer->pMMBDXP = calloc_function(sizeof(*pMMBDXP)); + + if (hMiraMonLayer->bIsPoint) + { + hMiraMonLayer->MMPoint.MMAdmDB.pMMBDXP = pMMBDXP; + szDBFFileName = hMiraMonLayer->MMPoint.MMAdmDB.pszExtDBFLayerName; + pszRelFile = hMiraMonLayer->MMPoint.pszREL_LayerName; + } + else if (hMiraMonLayer->bIsArc && !hMiraMonLayer->bIsPolygon) + { + hMiraMonLayer->MMArc.MMAdmDB.pMMBDXP = pMMBDXP; + szDBFFileName = hMiraMonLayer->MMArc.MMAdmDB.pszExtDBFLayerName; + pszRelFile = hMiraMonLayer->MMArc.pszREL_LayerName; + } + else if (hMiraMonLayer->bIsPolygon) + { + hMiraMonLayer->MMPolygon.MMAdmDB.pMMBDXP = pMMBDXP; + szDBFFileName = hMiraMonLayer->MMPolygon.MMAdmDB.pszExtDBFLayerName; + pszRelFile = hMiraMonLayer->MMPolygon.pszREL_LayerName; + } + + if (MM_ReadExtendedDBFHeaderFromFile(szDBFFileName, pMMBDXP, pszRelFile)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error reading the format in the DBF file %s.", + szDBFFileName); + return 1; + } + + fclose_and_nullify(&pMMBDXP->pfDataBase); + return 0; +} + +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif diff --git a/ogr/ogrsf_frmts/miramon/mm_rdlayr.h b/ogr/ogrsf_frmts/miramon/mm_rdlayr.h new file mode 100644 index 000000000000..905e8ea52ec2 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_rdlayr.h @@ -0,0 +1,22 @@ +#ifndef __MMRDLAYR_H +#define __MMRDLAYR_H + +#ifndef GDAL_COMPILATION +#include "mm_gdal\mm_gdal_driver_structs.h" +#else +//#include "ogr_api.h" // For CPL_C_START +#include "mm_gdal_driver_structs.h" +CPL_C_START // Necessary for compiling in GDAL project +#endif + +int MMInitLayerToRead(struct MiraMonVectLayerInfo *hMiraMonLayer, + FILE_TYPE *m_fp, const char *pszFilename); + +int MMGetGeoFeatureFromVector(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_INTERNAL_FID i_elem); +int MM_ReadExtendedDBFHeader(struct MiraMonVectLayerInfo *hMiraMonLayer); + +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif +#endif //__MMRDLAYR_H diff --git a/ogr/ogrsf_frmts/miramon/mm_wrlayr.c b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c new file mode 100644 index 000000000000..adf5c5d295f7 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c @@ -0,0 +1,7456 @@ +/****************************************************************************** + * + * Project: OpenGIS Simple Features Reference Implementation + * Purpose: C API to create a MiraMon layer + * Author: Abel Pau, a.pau@creaf.uab.cat, based on the MiraMon codes, + * mainly written by Xavier Pons, Joan Maso (correctly written + * "Mas0xF3"), Abel Pau, Nuria Julia (N0xFAria Juli0xE0), + * Xavier Calaf, Lluis (Llu0xEDs) Pesquer and Alaitz Zabala, from + * CREAF and Universitat Autonoma (Aut0xF2noma) de Barcelona. + * For a complete list of contributors: + * https://www.miramon.cat/eng/QuiSom.htm + ****************************************************************************** + * Copyright (c) 2024, Xavier Pons + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + ****************************************************************************/ + +#ifdef GDAL_COMPILATION +#include "mm_wrlayr.h" +#include "mm_gdal_functions.h" +#include "mm_gdal_constants.h" +#include "mm_rdlayr.h" // For MM_ReadExtendedDBFHeader() +#include "gdal.h" // For GDALDatasetH +#include "ogr_srs_api.h" // For OSRGetAuthorityCode +#include "cpl_string.h" // For CPL_ENC_UTF8 +#else +#include "CmptCmp.h" // Compatibility between compilers +#include "PrjMMVGl.h" // For a DirectoriPrograma +#include "mm_gdal\mm_wrlayr.h" // For fseek_function() +#include "mm_gdal\mm_gdal_functions.h" // For CPLStrlcpy() +#include "mm_gdal\mm_rdlayr.h" // For MM_ReadExtendedDBFHeader() +#include "msg.h" // For ErrorMsg() +#ifdef _WIN64 +#include "gdal\release-1911-x64\cpl_string.h" // Per a CPL_ENC_UTF8 +#else +#include "gdal\release-1911-32\cpl_string.h" // Per a CPL_ENC_UTF8 +#endif +#endif + +#ifdef GDAL_COMPILATION +CPL_C_START // Necessary for compiling in GDAL project +#endif // GDAL_COMPILATION + + /* -------------------------------------------------------------------- */ + /* Header Functions */ + /* -------------------------------------------------------------------- */ + int + MMAppendBlockToBuffer(struct MM_FLUSH_INFO *FlushInfo); +void MMInitBoundingBox(struct MMBoundingBox *dfBB); +int MMWriteAHArcSection(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET DiskOffset); +int MMWriteNHNodeSection(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET DiskOffset); +int MMWritePHPolygonSection(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET DiskOffset); +int MMAppendIntegerDependingOnVersion( + struct MiraMonVectLayerInfo *hMiraMonLayer, struct MM_FLUSH_INFO *FlushInfo, + uint32_t *nUL32, GUInt64 nUI64); +int MMMoveFromFileToFile(FILE_TYPE *pSrcFile, FILE_TYPE *pDestFile, + MM_FILE_OFFSET *nOffset); +int MMResizeZSectionDescrPointer(struct MM_ZD **pZDescription, GUInt64 *nMax, + GUInt64 nNum, GUInt64 nIncr, + GUInt64 nProposedMax); +int MMResizeArcHeaderPointer(struct MM_AH **pArcHeader, GUInt64 *nMax, + GUInt64 nNum, GUInt64 nIncr, GUInt64 nProposedMax); +int MMResizeNodeHeaderPointer(struct MM_NH **pNodeHeader, GUInt64 *nMax, + GUInt64 nNum, GUInt64 nIncr, + GUInt64 nProposedMax); +int MMResizePolHeaderPointer(struct MM_PH **pPolHeader, GUInt64 *nMax, + GUInt64 nNum, GUInt64 nIncr, GUInt64 nProposedMax); +void MMUpdateBoundingBoxXY(struct MMBoundingBox *dfBB, + struct MM_POINT_2D *pCoord); +void MMUpdateBoundingBox(struct MMBoundingBox *dfBBToBeAct, + struct MMBoundingBox *dfBBWithData); +int MMCheckVersionFor3DOffset(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET nOffset, + MM_INTERNAL_FID nElemCount); +int MMCheckVersionOffset(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET OffsetToCheck); +int MMCheckVersionForFID(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_INTERNAL_FID FID); + +// Extended DBF functions +int MMCreateMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer); +int MMAddDBFRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature); +int MMAddPointRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature, + MM_INTERNAL_FID nElemCount); +int MMAddArcRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature, + MM_INTERNAL_FID nElemCount, struct MM_AH *pArcHeader); +int MMAddNodeRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_INTERNAL_FID nElemCount, + struct MM_NH *pNodeHeader); +int MMAddPolygonRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature, + MM_INTERNAL_FID nElemCount, + MM_N_VERTICES_TYPE nVerticesCount, + struct MM_PH *pPolHeader); +int MMCloseMMBD_XP(struct MiraMonVectLayerInfo *hMiraMonLayer); +void MMDestroyMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer); + +/* -------------------------------------------------------------------- */ +/* Managing errors and warnings */ +/* -------------------------------------------------------------------- */ + +#ifndef GDAL_COMPILATION +void MMCPLError(int code, const char *fmt, ...) +{ + char szBigEnoughBuffer[1024]; + + va_list args; + va_start(args, fmt); + vsnprintf(szBigEnoughBuffer, sizeof(szBigEnoughBuffer), fmt, args); + ErrorMsg(szBigEnoughBuffer); + va_end(args); +} + +void MMCPLWarning(int code, const char *fmt, ...) +{ + char szBigEnoughBuffer[1024]; + + va_list args; + va_start(args, fmt); + vsnprintf(szBigEnoughBuffer, sizeof(szBigEnoughBuffer), fmt, args); + InfoMsg(szBigEnoughBuffer); + va_end(args); +} + +void MMCPLDebug(int code, const char *fmt, ...) +{ + char szBigEnoughBuffer[1024]; + + va_list args; + va_start(args, fmt); + vsnprintf(szBigEnoughBuffer, sizeof(szBigEnoughBuffer), fmt, args); + printf(szBigEnoughBuffer); /*ok*/ + va_end(args); +} + +int snprintf(char *str, size_t size, const char *format, ...) +{ + int result; + va_list args; + + va_start(args, format); + result = vsnprintf(str, size, format, args); + va_end(args); + + return result; +} +#endif + +// Checks for potential arithmetic overflow when performing multiplication +// operations between two GUInt64 values and converting the result to size_t. +// Important for 32 vs. 64 bit compiling compatibility. +int MMCheckSize_t(GUInt64 nCount, GUInt64 nSize) +{ + if ((size_t)nCount != nCount) + return 1; + + if ((size_t)nSize != nSize) + return 1; + +#ifndef FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION + if (nCount != 0 && nSize > SIZE_MAX / nCount) +#else + if (nCount != 0 && nSize > (1000 * 1000 * 1000U) / nCount) +#endif + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, "Overflow in MMCheckSize_t()"); + return 1; + } + return 0; +} + +/* -------------------------------------------------------------------- */ +/* Layer Functions: Version */ +/* -------------------------------------------------------------------- */ +int MMGetVectorVersion(struct MM_TH *pTopHeader) +{ + if ((pTopHeader->aLayerVersion[0] == ' ' || + pTopHeader->aLayerVersion[0] == '0') && + pTopHeader->aLayerVersion[1] == '1' && + pTopHeader->aLayerSubVersion == '1') + return MM_32BITS_VERSION; + + if ((pTopHeader->aLayerVersion[0] == ' ' || + pTopHeader->aLayerVersion[0] == '0') && + pTopHeader->aLayerVersion[1] == '2' && + pTopHeader->aLayerSubVersion == '0') + return MM_64BITS_VERSION; + + return MM_UNKNOWN_VERSION; +} + +static void MMSet1_1Version(struct MM_TH *pTopHeader) +{ + pTopHeader->aLayerVersion[0] = ' '; + pTopHeader->aLayerVersion[1] = '1'; + pTopHeader->aLayerSubVersion = '1'; +} + +static void MMSet2_0Version(struct MM_TH *pTopHeader) +{ + pTopHeader->aLayerVersion[0] = ' '; + pTopHeader->aLayerVersion[1] = '2'; + pTopHeader->aLayerSubVersion = '0'; +} + +/* -------------------------------------------------------------------- */ +/* Layer Functions: Header */ +/* -------------------------------------------------------------------- */ +int MMReadHeader(FILE_TYPE *pF, struct MM_TH *pMMHeader) +{ + char dot; + uint32_t NCount; + int32_t reservat4 = 0L; + + pMMHeader->Flag = 0x0; + if (fseek_function(pF, 0, SEEK_SET)) + return 1; + if (fread_function(pMMHeader->aFileType, 1, 3, pF) != 3) + return 1; + if (fread_function(pMMHeader->aLayerVersion, 1, 2, pF) != 2) + return 1; + if (fread_function(&dot, 1, 1, pF) != 1) + return 1; + if (fread_function(&pMMHeader->aLayerSubVersion, 1, 1, pF) != 1) + return 1; + if (fread_function(&pMMHeader->Flag, sizeof(pMMHeader->Flag), 1, pF) != 1) + return 1; + if (fread_function(&pMMHeader->hBB.dfMinX, sizeof(pMMHeader->hBB.dfMinX), 1, + pF) != 1) + return 1; + if (fread_function(&pMMHeader->hBB.dfMaxX, sizeof(pMMHeader->hBB.dfMaxX), 1, + pF) != 1) + return 1; + if (fread_function(&pMMHeader->hBB.dfMinY, sizeof(pMMHeader->hBB.dfMinY), 1, + pF) != 1) + return 1; + if (fread_function(&pMMHeader->hBB.dfMaxY, sizeof(pMMHeader->hBB.dfMaxY), 1, + pF) != 1) + return 1; + if (pMMHeader->aLayerVersion[0] == ' ' && + pMMHeader->aLayerVersion[1] == '1') + { + if (fread_function(&NCount, sizeof(NCount), 1, pF) != 1) + return 1; + + pMMHeader->nElemCount = (MM_INTERNAL_FID)NCount; + + if (fread_function(&reservat4, 4, 1, pF) != 1) + return 1; + } + else if (pMMHeader->aLayerVersion[0] == ' ' && + pMMHeader->aLayerVersion[1] == '2') + { + if (fread_function(&(pMMHeader->nElemCount), + sizeof(pMMHeader->nElemCount), 1, pF) != 1) + return 1; + + if (fread_function(&reservat4, 4, 1, pF) != 1) + return 1; + if (fread_function(&reservat4, 4, 1, pF) != 1) + return 1; + } + + if (pMMHeader->Flag & MM_LAYER_3D_INFO) + pMMHeader->bIs3d = 1; + + if (pMMHeader->Flag & MM_LAYER_MULTIPOLYGON) + pMMHeader->bIsMultipolygon = 1; + + return 0; +} + +static int MMWriteHeader(FILE_TYPE *pF, struct MM_TH *pMMHeader) +{ + char dot = '.'; + uint32_t NCount; + int32_t reservat4 = 0L; + MM_INTERNAL_FID nNumber1 = 1, nNumber0 = 0; + + if (!pF) + return 0; + + pMMHeader->Flag = MM_CREATED_USING_MIRAMON; // Created from MiraMon + if (pMMHeader->bIs3d) + pMMHeader->Flag |= MM_LAYER_3D_INFO; // 3D + + if (pMMHeader->bIsMultipolygon) + pMMHeader->Flag |= MM_LAYER_MULTIPOLYGON; // Multipolygon. + + if (pMMHeader->aFileType[0] == 'P' && pMMHeader->aFileType[1] == 'O' && + pMMHeader->aFileType[2] == 'L') + pMMHeader->Flag |= MM_BIT_5_ON; // Explicital polygons + + if (fseek_function(pF, 0, SEEK_SET)) + return 1; + if (fwrite_function(pMMHeader->aFileType, 1, 3, pF) != 3) + return 1; + if (fwrite_function(pMMHeader->aLayerVersion, 1, 2, pF) != 2) + return 1; + if (fwrite_function(&dot, 1, 1, pF) != 1) + return 1; + if (fwrite_function(&pMMHeader->aLayerSubVersion, 1, 1, pF) != 1) + return 1; + if (fwrite_function(&pMMHeader->Flag, sizeof(pMMHeader->Flag), 1, pF) != 1) + return 1; + if (fwrite_function(&pMMHeader->hBB.dfMinX, sizeof(pMMHeader->hBB.dfMinX), + 1, pF) != 1) + return 1; + if (fwrite_function(&pMMHeader->hBB.dfMaxX, sizeof(pMMHeader->hBB.dfMaxX), + 1, pF) != 1) + return 1; + if (fwrite_function(&pMMHeader->hBB.dfMinY, sizeof(pMMHeader->hBB.dfMinY), + 1, pF) != 1) + return 1; + if (fwrite_function(&pMMHeader->hBB.dfMaxY, sizeof(pMMHeader->hBB.dfMaxY), + 1, pF) != 1) + return 1; + if (pMMHeader->aLayerVersion[0] == ' ' && + pMMHeader->aLayerVersion[1] == '1') + { + NCount = (uint32_t)pMMHeader->nElemCount; + if (fwrite_function(&NCount, sizeof(NCount), 1, pF) != 1) + return 1; + + if (fwrite_function(&reservat4, 4, 1, pF) != 1) + return 1; + } + else if (pMMHeader->aLayerVersion[0] == ' ' && + pMMHeader->aLayerVersion[1] == '2') + { + if (fwrite_function(&(pMMHeader->nElemCount), + sizeof(pMMHeader->nElemCount), 1, pF) != 1) + return 1; + + // Next part of the file (don't apply for the moment) + if (fwrite_function(&nNumber1, sizeof(nNumber1), 1, pF) != 1) + return 1; + if (fwrite_function(&nNumber0, sizeof(nNumber0), 1, pF) != 1) + return 1; + + // Reserved bytes + if (fwrite_function(&reservat4, 4, 1, pF) != 1) + return 1; + if (fwrite_function(&reservat4, 4, 1, pF) != 1) + return 1; + } + return 0; +} + +void MMInitHeader(struct MM_TH *pMMHeader, int layerType, int nVersion) +{ + memset(pMMHeader, 0, sizeof(*pMMHeader)); + switch (nVersion) + { + case MM_32BITS_VERSION: + pMMHeader->aLayerVersion[0] = '0'; + pMMHeader->aLayerVersion[1] = '1'; + pMMHeader->aLayerSubVersion = '1'; + break; + case MM_64BITS_VERSION: + case MM_LAST_VERSION: + default: + pMMHeader->aLayerVersion[0] = '0'; + pMMHeader->aLayerVersion[1] = '2'; + pMMHeader->aLayerSubVersion = '0'; + break; + } + switch (layerType) + { + case MM_LayerType_Point: + pMMHeader->aFileType[0] = 'P'; + pMMHeader->aFileType[1] = 'N'; + pMMHeader->aFileType[2] = 'T'; + break; + case MM_LayerType_Point3d: + pMMHeader->aFileType[0] = 'P'; + pMMHeader->aFileType[1] = 'N'; + pMMHeader->aFileType[2] = 'T'; + pMMHeader->bIs3d = 1; + break; + case MM_LayerType_Arc: + pMMHeader->aFileType[0] = 'A'; + pMMHeader->aFileType[1] = 'R'; + pMMHeader->aFileType[2] = 'C'; + break; + case MM_LayerType_Arc3d: + pMMHeader->aFileType[0] = 'A'; + pMMHeader->aFileType[1] = 'R'; + pMMHeader->aFileType[2] = 'C'; + pMMHeader->bIs3d = 1; + break; + case MM_LayerType_Pol: + pMMHeader->aFileType[0] = 'P'; + pMMHeader->aFileType[1] = 'O'; + pMMHeader->aFileType[2] = 'L'; + break; + case MM_LayerType_Pol3d: + pMMHeader->aFileType[0] = 'P'; + pMMHeader->aFileType[1] = 'O'; + pMMHeader->aFileType[2] = 'L'; + pMMHeader->bIs3d = 1; + break; + default: + break; + } + pMMHeader->nElemCount = 0; + pMMHeader->hBB.dfMinX = MM_UNDEFINED_STATISTICAL_VALUE; + pMMHeader->hBB.dfMaxX = -MM_UNDEFINED_STATISTICAL_VALUE; + pMMHeader->hBB.dfMinY = MM_UNDEFINED_STATISTICAL_VALUE; + pMMHeader->hBB.dfMaxY = -MM_UNDEFINED_STATISTICAL_VALUE; + + pMMHeader->Flag = MM_CREATED_USING_MIRAMON; // Created from MiraMon + if (pMMHeader->bIs3d) + pMMHeader->Flag |= MM_LAYER_3D_INFO; // 3D + + if (pMMHeader->bIsMultipolygon) + pMMHeader->Flag |= MM_LAYER_MULTIPOLYGON; // Multipolygon. + + if (pMMHeader->aFileType[0] == 'P' && pMMHeader->aFileType[1] == 'O' && + pMMHeader->aFileType[2] == 'L') + pMMHeader->Flag |= MM_BIT_5_ON; // Explicital polygons +} + +int MMWriteEmptyHeader(FILE_TYPE *pF, int layerType, int nVersion) +{ + struct MM_TH pMMHeader; + + memset(&pMMHeader, 0, sizeof(pMMHeader)); + switch (nVersion) + { + case MM_32BITS_VERSION: + pMMHeader.aLayerVersion[0] = '0'; + pMMHeader.aLayerVersion[1] = '1'; + pMMHeader.aLayerSubVersion = '1'; + break; + case MM_64BITS_VERSION: + case MM_LAST_VERSION: + default: + pMMHeader.aLayerVersion[0] = '0'; + pMMHeader.aLayerVersion[1] = '2'; + pMMHeader.aLayerSubVersion = '0'; + break; + } + switch (layerType) + { + case MM_LayerType_Point: + pMMHeader.aFileType[0] = 'P'; + pMMHeader.aFileType[1] = 'N'; + pMMHeader.aFileType[2] = 'T'; + break; + case MM_LayerType_Point3d: + pMMHeader.aFileType[0] = 'P'; + pMMHeader.aFileType[1] = 'N'; + pMMHeader.aFileType[2] = 'T'; + pMMHeader.bIs3d = 1; + break; + case MM_LayerType_Arc: + pMMHeader.aFileType[0] = 'A'; + pMMHeader.aFileType[1] = 'R'; + pMMHeader.aFileType[2] = 'C'; + break; + case MM_LayerType_Arc3d: + pMMHeader.aFileType[0] = 'A'; + pMMHeader.aFileType[1] = 'R'; + pMMHeader.aFileType[2] = 'C'; + pMMHeader.bIs3d = 1; + break; + case MM_LayerType_Pol: + pMMHeader.aFileType[0] = 'P'; + pMMHeader.aFileType[1] = 'O'; + pMMHeader.aFileType[2] = 'L'; + break; + case MM_LayerType_Pol3d: + pMMHeader.aFileType[0] = 'P'; + pMMHeader.aFileType[1] = 'O'; + pMMHeader.aFileType[2] = 'L'; + pMMHeader.bIs3d = 1; + break; + default: + break; + } + pMMHeader.nElemCount = 0; + pMMHeader.hBB.dfMinX = MM_UNDEFINED_STATISTICAL_VALUE; + pMMHeader.hBB.dfMaxX = -MM_UNDEFINED_STATISTICAL_VALUE; + pMMHeader.hBB.dfMinY = MM_UNDEFINED_STATISTICAL_VALUE; + pMMHeader.hBB.dfMaxY = -MM_UNDEFINED_STATISTICAL_VALUE; + + return MMWriteHeader(pF, &pMMHeader); +} + +/* -------------------------------------------------------------------- */ +/* Layer Functions: Z section */ +/* -------------------------------------------------------------------- */ +int MMReadZSection(struct MiraMonVectLayerInfo *hMiraMonLayer, FILE_TYPE *pF, + struct MM_ZSection *pZSection) +{ + int32_t reservat4 = 0L; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPoint) + { + if (MMCheckSize_t(hMiraMonLayer->TopHeader.nElemCount, MM_SIZE_OF_TL)) + return 1; + if (hMiraMonLayer->TopHeader.nElemCount * MM_SIZE_OF_TL > + UINT64_MAX - hMiraMonLayer->nHeaderDiskSize) + return 1; + pZSection->ZSectionOffset = + hMiraMonLayer->nHeaderDiskSize + + hMiraMonLayer->TopHeader.nElemCount * MM_SIZE_OF_TL; + } + else if (hMiraMonLayer->bIsArc && !(hMiraMonLayer->bIsPolygon) && + hMiraMonLayer->TopHeader.nElemCount > 0) + { + const struct MM_AH *pArcHeader = + &(hMiraMonLayer->MMArc + .pArcHeader[hMiraMonLayer->TopHeader.nElemCount - 1]); + if (MMCheckSize_t(pArcHeader->nElemCount, MM_SIZE_OF_COORDINATE)) + return 1; + if (pArcHeader->nElemCount * MM_SIZE_OF_COORDINATE > + UINT64_MAX - pArcHeader->nOffset) + return 1; + // Z section begins just after last coordinate of the last arc + pZSection->ZSectionOffset = + pArcHeader->nOffset + + pArcHeader->nElemCount * MM_SIZE_OF_COORDINATE; + } + else if (hMiraMonLayer->bIsPolygon && + hMiraMonLayer->MMPolygon.TopArcHeader.nElemCount > 0) + { + const struct MM_AH *pArcHeader = + &(hMiraMonLayer->MMPolygon.MMArc + .pArcHeader[hMiraMonLayer->MMPolygon.TopArcHeader.nElemCount - + 1]); + if (MMCheckSize_t(pArcHeader->nElemCount, MM_SIZE_OF_COORDINATE)) + return 1; + if (pArcHeader->nElemCount * MM_SIZE_OF_COORDINATE > + UINT64_MAX - pArcHeader->nOffset) + return 1; + // Z section begins just after last coordinate of the last arc + pZSection->ZSectionOffset = + pArcHeader->nOffset + + pArcHeader->nElemCount * MM_SIZE_OF_COORDINATE; + } + else + return 1; + + if (pF) + { + if (fseek_function(pF, pZSection->ZSectionOffset, SEEK_SET)) + return 1; + + if (fread_function(&reservat4, 4, 1, pF) != 1) + return 1; + pZSection->ZSectionOffset += 4; + if (fread_function(&reservat4, 4, 1, pF) != 1) + return 1; + pZSection->ZSectionOffset += 4; + if (fread_function(&reservat4, 4, 1, pF) != 1) + return 1; + pZSection->ZSectionOffset += 4; + if (fread_function(&reservat4, 4, 1, pF) != 1) + return 1; + pZSection->ZSectionOffset += 4; + + if (fread_function(&pZSection->ZHeader.dfBBminz, + sizeof(pZSection->ZHeader.dfBBminz), 1, pF) != 1) + return 1; + pZSection->ZSectionOffset += sizeof(pZSection->ZHeader.dfBBminz); + + if (fread_function(&pZSection->ZHeader.dfBBmaxz, + sizeof(pZSection->ZHeader.dfBBmaxz), 1, pF) != 1) + return 1; + pZSection->ZSectionOffset += sizeof(pZSection->ZHeader.dfBBmaxz); + } + return 0; +} + +static int MMWriteZSection(FILE_TYPE *pF, struct MM_ZSection *pZSection) +{ + int32_t reservat4 = 0L; + + if (fseek_function(pF, pZSection->ZSectionOffset, SEEK_SET)) + return 1; + + if (fwrite_function(&reservat4, 4, 1, pF) != 1) + return 1; + if (fwrite_function(&reservat4, 4, 1, pF) != 1) + return 1; + if (fwrite_function(&reservat4, 4, 1, pF) != 1) + return 1; + if (fwrite_function(&reservat4, 4, 1, pF) != 1) + return 1; + + pZSection->ZSectionOffset += 16; + + if (fwrite_function(&pZSection->ZHeader.dfBBminz, + sizeof(pZSection->ZHeader.dfBBminz), 1, pF) != 1) + return 1; + pZSection->ZSectionOffset += sizeof(pZSection->ZHeader.dfBBminz); + if (fwrite_function(&pZSection->ZHeader.dfBBmaxz, + sizeof(pZSection->ZHeader.dfBBmaxz), 1, pF) != 1) + return 1; + pZSection->ZSectionOffset += sizeof(pZSection->ZHeader.dfBBmaxz); + return 0; +} + +int MMReadZDescriptionHeaders(struct MiraMonVectLayerInfo *hMiraMonLayer, + FILE_TYPE *pF, MM_INTERNAL_FID nElements, + struct MM_ZSection *pZSection) +{ + struct MM_FLUSH_INFO FlushTMP; + char *pBuffer = nullptr; + MM_INTERNAL_FID nIndex = 0; + MM_FILE_OFFSET nBlockSize; + struct MM_ZD *pZDescription; + + if (!hMiraMonLayer) + return 1; + + if (!pZSection) + return 1; + + if (!nElements) + return 0; // No elements to read + + pZDescription = pZSection->pZDescription; + + nBlockSize = nElements * pZSection->nZDDiskSize; + + if (MMInitFlush(&FlushTMP, pF, nBlockSize, &pBuffer, + pZSection->ZSectionOffset, 0)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.pBlockWhereToSaveOrRead = (void *)pBuffer; + if (MMReadFlush(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + for (nIndex = 0; nIndex < nElements; nIndex++) + { + FlushTMP.SizeOfBlockToBeSaved = + sizeof((pZDescription + nIndex)->dfBBminz); + FlushTMP.pBlockToBeSaved = (void *)&(pZDescription + nIndex)->dfBBminz; + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.SizeOfBlockToBeSaved = + sizeof((pZDescription + nIndex)->dfBBmaxz); + FlushTMP.pBlockToBeSaved = (void *)&(pZDescription + nIndex)->dfBBmaxz; + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.SizeOfBlockToBeSaved = + sizeof((pZDescription + nIndex)->nZCount); + FlushTMP.pBlockToBeSaved = (void *)&(pZDescription + nIndex)->nZCount; + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + if (hMiraMonLayer->LayerVersion == MM_64BITS_VERSION) + { + FlushTMP.SizeOfBlockToBeSaved = 4; + FlushTMP.pBlockToBeSaved = (void *)nullptr; + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + + if (MMReadOffsetDependingOnVersion(hMiraMonLayer, &FlushTMP, + &(pZDescription + nIndex)->nOffsetZ)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + if (pBuffer) + free_function(pBuffer); + + return 0; +} + +static int +MMWriteZDescriptionHeaders(struct MiraMonVectLayerInfo *hMiraMonLayer, + FILE_TYPE *pF, MM_INTERNAL_FID nElements, + struct MM_ZSection *pZSection) +{ + struct MM_FLUSH_INFO FlushTMP; + char *pBuffer = nullptr; + uint32_t nUL32; + MM_INTERNAL_FID nIndex = 0; + MM_FILE_OFFSET nOffsetDiff; + struct MM_ZD *pZDescription; + + if (!hMiraMonLayer) + return 1; + + if (!pF) + return 1; + + if (!pZSection) + return 1; + + pZDescription = pZSection->pZDescription; + + nOffsetDiff = + pZSection->ZSectionOffset + nElements * pZSection->nZDDiskSize; + + if (MMInitFlush(&FlushTMP, pF, MM_1MB, &pBuffer, pZSection->ZSectionOffset, + 0)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.pBlockWhereToSaveOrRead = (void *)pBuffer; + for (nIndex = 0; nIndex < nElements; nIndex++) + { + FlushTMP.SizeOfBlockToBeSaved = + sizeof((pZDescription + nIndex)->dfBBminz); + FlushTMP.pBlockToBeSaved = (void *)&(pZDescription + nIndex)->dfBBminz; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.SizeOfBlockToBeSaved = + sizeof((pZDescription + nIndex)->dfBBmaxz); + FlushTMP.pBlockToBeSaved = (void *)&(pZDescription + nIndex)->dfBBmaxz; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.SizeOfBlockToBeSaved = + sizeof((pZDescription + nIndex)->nZCount); + FlushTMP.pBlockToBeSaved = (void *)&(pZDescription + nIndex)->nZCount; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + if (hMiraMonLayer->LayerVersion == MM_64BITS_VERSION) + { + FlushTMP.SizeOfBlockToBeSaved = 4; + FlushTMP.pBlockToBeSaved = (void *)nullptr; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + (pZDescription + nIndex)->nOffsetZ + nOffsetDiff)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + FlushTMP.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + pZSection->ZSectionOffset += FlushTMP.TotalSavedBytes; + + if (pBuffer) + free_function(pBuffer); + + return 0; +} + +static void MMDestroyZSectionDescription(struct MM_ZSection *pZSection) +{ + if (pZSection->pZL) + { + free_function(pZSection->pZL); + pZSection->pZL = nullptr; + } + + if (pZSection->pZDescription) + { + free_function(pZSection->pZDescription); + pZSection->pZDescription = nullptr; + } +} + +static int MMInitZSectionDescription(struct MM_ZSection *pZSection) +{ + if (MMCheckSize_t(pZSection->nMaxZDescription, + sizeof(*pZSection->pZDescription))) + return 1; + + if (!pZSection->nMaxZDescription) + { + pZSection->pZDescription = nullptr; + return 0; // No elements to read (or write) + } + + pZSection->pZDescription = + (struct MM_ZD *)calloc_function((size_t)pZSection->nMaxZDescription * + sizeof(*pZSection->pZDescription)); + if (!pZSection->pZDescription) + return 1; + return 0; +} + +static int MMInitZSectionLayer(struct MiraMonVectLayerInfo *hMiraMonLayer, + FILE_TYPE *pF3d, struct MM_ZSection *pZSection) +{ + if (!hMiraMonLayer) + return 1; + + // Zsection + if (!hMiraMonLayer->TopHeader.bIs3d) + { + pZSection->pZDescription = nullptr; + return 0; + } + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + pZSection->ZHeader.dfBBminz = STATISTICAL_UNDEF_VALUE; + pZSection->ZHeader.dfBBmaxz = -STATISTICAL_UNDEF_VALUE; + } + + // ZH + pZSection->ZHeader.nMyDiskSize = 32; + pZSection->ZSectionOffset = 0; + + // ZD + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + pZSection->nMaxZDescription = + MM_FIRST_NUMBER_OF_VERTICES * sizeof(double); + if (MMInitZSectionDescription(pZSection)) + return 1; + } + else + { + if (hMiraMonLayer->bIsPolygon) + { + if (MMCheckSize_t(hMiraMonLayer->MMPolygon.TopArcHeader.nElemCount, + sizeof(double))) + return 1; + + pZSection->nMaxZDescription = + hMiraMonLayer->MMPolygon.TopArcHeader.nElemCount * + sizeof(double); + } + else + { + if (MMCheckSize_t(hMiraMonLayer->TopHeader.nElemCount, + sizeof(double))) + return 1; + + pZSection->nMaxZDescription = + hMiraMonLayer->TopHeader.nElemCount * sizeof(double); + } + if (MMInitZSectionDescription(pZSection)) + return 1; + } + + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + pZSection->nZDDiskSize = MM_SIZE_OF_ZD_32_BITS; + else + pZSection->nZDDiskSize = MM_SIZE_OF_ZD_64_BITS; + + pZSection->ZDOffset = 0; + + // ZL + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + if (MMInitFlush(&pZSection->FlushZL, pF3d, MM_1MB, &pZSection->pZL, 0, + sizeof(double))) + return 1; + } + return 0; +} + +/* -------------------------------------------------------------------- */ +/* Layer Functions: Extensions */ +/* -------------------------------------------------------------------- */ + +/* Find the last occurrence of pszFinalPart in pszName + and changes it by pszNewPart. + + Examples of desired behavior + AA.pnt -> AAT.rel + AA.nod -> N.~idx + AA.nod -> N.dbf + AA.nod -> N.rel +*/ + +static int MMChangeFinalPartOfTheName(char *pszName, size_t nMaxSizeOfName, + const char *pszFinalPart, + const char *pszNewPart) +{ + char *pAux, *pszWhereToFind, *pszLastFound = nullptr; + ; + + if (!pszName || !pszFinalPart || !pszNewPart) + return 0; + if (MMIsEmptyString(pszName) || MMIsEmptyString(pszFinalPart) || + MMIsEmptyString(pszNewPart)) + return 0; + + if (strlen(pszName) - strlen(pszFinalPart) + strlen(pszNewPart) >= + nMaxSizeOfName) + return 1; // It's not possible to change the final part + + // It's the implementation on windows of the linux strrstr() + // pszLastFound = strrstr(pszWhereToFind, pszFinalPart); + pszWhereToFind = pszName; + while (nullptr != (pAux = strstr(pszWhereToFind, pszFinalPart))) + { + pszLastFound = pAux; + pszWhereToFind = pAux + strlen(pAux); + } + + if (!pszLastFound) + return 1; // Not found not changed + + memcpy(pszLastFound, pszNewPart, strlen(pszNewPart)); + + return 0; +} + +/* -------------------------------------------------------------------- */ +/* Layer Functions: initializing MiraMon layers */ +/* -------------------------------------------------------------------- */ +static int MMInitPointLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + if (!hMiraMonLayer) + return 1; + + hMiraMonLayer->bIsPoint = 1; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + // Geometrical part + // Init header structure + hMiraMonLayer->TopHeader.nElemCount = 0; + MMInitBoundingBox(&hMiraMonLayer->TopHeader.hBB); + + hMiraMonLayer->TopHeader.bIs3d = 1; // Read description of bRealIs3d + hMiraMonLayer->TopHeader.aFileType[0] = 'P'; + hMiraMonLayer->TopHeader.aFileType[1] = 'N'; + hMiraMonLayer->TopHeader.aFileType[2] = 'T'; + + // Opening the binary file where sections TH, TL[...] and ZH-ZD[...]-ZL[...] + // are going to be written. + + snprintf(hMiraMonLayer->MMPoint.pszLayerName, + sizeof(hMiraMonLayer->MMPoint.pszLayerName), "%s.pnt", + hMiraMonLayer->pszSrcLayerName); + } + if (nullptr == (hMiraMonLayer->MMPoint.pF = + fopen_function(hMiraMonLayer->MMPoint.pszLayerName, + hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error MMPoint.pF: Cannot open file %s.", + hMiraMonLayer->MMPoint.pszLayerName); + return 1; + } + fseek_function(hMiraMonLayer->MMPoint.pF, 0, SEEK_SET); + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + // TL + snprintf(hMiraMonLayer->MMPoint.pszTLName, + sizeof(hMiraMonLayer->MMPoint.pszTLName), "%sT.~xy", + hMiraMonLayer->pszSrcLayerName); + + if (nullptr == (hMiraMonLayer->MMPoint.pFTL = + fopen_function(hMiraMonLayer->MMPoint.pszTLName, + hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error MMPoint.pFTL: Cannot open file %s.", + hMiraMonLayer->MMPoint.pszTLName); + return 1; + } + fseek_function(hMiraMonLayer->MMPoint.pFTL, 0, SEEK_SET); + + if (MMInitFlush(&hMiraMonLayer->MMPoint.FlushTL, + hMiraMonLayer->MMPoint.pFTL, MM_1MB, + &hMiraMonLayer->MMPoint.pTL, 0, MM_SIZE_OF_TL)) + return 1; + + // 3D part + if (hMiraMonLayer->TopHeader.bIs3d) + { + snprintf(hMiraMonLayer->MMPoint.psz3DLayerName, + sizeof(hMiraMonLayer->MMPoint.psz3DLayerName), "%sT.~z", + hMiraMonLayer->pszSrcLayerName); + + if (nullptr == (hMiraMonLayer->MMPoint.pF3d = fopen_function( + hMiraMonLayer->MMPoint.psz3DLayerName, + hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error MMPoint.pF3d: Cannot open file %s.", + hMiraMonLayer->MMPoint.psz3DLayerName); + return 1; + } + fseek_function(hMiraMonLayer->MMPoint.pF3d, 0, SEEK_SET); + } + } + // Zsection + if (hMiraMonLayer->TopHeader.bIs3d) + { + if (MMInitZSectionLayer(hMiraMonLayer, hMiraMonLayer->MMPoint.pF3d, + &hMiraMonLayer->MMPoint.pZSection)) + return 1; + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + if (MMReadZSection(hMiraMonLayer, hMiraMonLayer->MMPoint.pF, + &hMiraMonLayer->MMPoint.pZSection)) + return 1; + + if (MMReadZDescriptionHeaders(hMiraMonLayer, + hMiraMonLayer->MMPoint.pF, + hMiraMonLayer->TopHeader.nElemCount, + &hMiraMonLayer->MMPoint.pZSection)) + return 1; + } + } + + // MiraMon metadata + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(hMiraMonLayer->MMPoint.pszREL_LayerName, + sizeof(hMiraMonLayer->MMPoint.pszREL_LayerName), "%sT.rel", + hMiraMonLayer->pszSrcLayerName); + } + else + { + CPLStrlcpy(hMiraMonLayer->MMPoint.pszREL_LayerName, + hMiraMonLayer->pszSrcLayerName, + sizeof(hMiraMonLayer->MMPoint.pszREL_LayerName)); + if (MMChangeFinalPartOfTheName(hMiraMonLayer->MMPoint.pszREL_LayerName, + MM_CPL_PATH_BUF_SIZE, ".pnt", "T.rel")) + return 1; + } + + hMiraMonLayer->pszMainREL_LayerName = + hMiraMonLayer->MMPoint.pszREL_LayerName; + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + // This file has to exist and be the appropriate version. + if (MMCheck_REL_FILE(hMiraMonLayer->MMPoint.pszREL_LayerName)) + return 1; + } + + // MIRAMON DATA BASE + // Creating the DBF file name + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(hMiraMonLayer->MMPoint.MMAdmDB.pszExtDBFLayerName, + sizeof(hMiraMonLayer->MMPoint.MMAdmDB.pszExtDBFLayerName), + "%sT.dbf", hMiraMonLayer->pszSrcLayerName); + } + else + { + CPLStrlcpy(hMiraMonLayer->MMPoint.MMAdmDB.pszExtDBFLayerName, + hMiraMonLayer->pszSrcLayerName, + sizeof(hMiraMonLayer->MMPoint.MMAdmDB.pszExtDBFLayerName)); + + if (MMChangeFinalPartOfTheName( + hMiraMonLayer->MMPoint.MMAdmDB.pszExtDBFLayerName, + MM_CPL_PATH_BUF_SIZE, ".pnt", "T.dbf")) + return 1; + } + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + if (MM_ReadExtendedDBFHeader(hMiraMonLayer)) + return 1; + } + + return 0; +} + +static int MMInitNodeLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + struct MiraMonArcLayer *pMMArcLayer; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + else + pMMArcLayer = &hMiraMonLayer->MMArc; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + // Init header structure + pMMArcLayer->TopNodeHeader.aFileType[0] = 'N'; + pMMArcLayer->TopNodeHeader.aFileType[1] = 'O'; + pMMArcLayer->TopNodeHeader.aFileType[2] = 'D'; + + pMMArcLayer->TopNodeHeader.bIs3d = 1; // Read description of bRealIs3d + MMInitBoundingBox(&pMMArcLayer->TopNodeHeader.hBB); + } + + // Opening the binary file where sections TH, NH and NL[...] + // are going to be written. + strcpy(pMMArcLayer->MMNode.pszLayerName, pMMArcLayer->pszLayerName); + CPLStrlcpy(pMMArcLayer->MMNode.pszLayerName, + reset_extension(pMMArcLayer->MMNode.pszLayerName, "nod"), + sizeof(pMMArcLayer->MMNode.pszLayerName)); + + if (nullptr == (pMMArcLayer->MMNode.pF = + fopen_function(pMMArcLayer->MMNode.pszLayerName, + hMiraMonLayer->pszFlags))) + { + + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error MMNode.pF: Cannot open file %s.", + pMMArcLayer->MMNode.pszLayerName); + return 1; + } + fseek_function(pMMArcLayer->MMNode.pF, 0, SEEK_SET); + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + // Node Header + pMMArcLayer->MMNode.nMaxNodeHeader = MM_FIRST_NUMBER_OF_NODES; + if (MMCheckSize_t(pMMArcLayer->MMNode.nMaxNodeHeader, + sizeof(*pMMArcLayer->MMNode.pNodeHeader))) + return 1; + + if (!pMMArcLayer->MMNode.nMaxNodeHeader) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Error in MiraMon " + "driver: no nodes to write?"); + return 1; + } + + if (nullptr == + (pMMArcLayer->MMNode.pNodeHeader = (struct MM_NH *)calloc_function( + (size_t)pMMArcLayer->MMNode.nMaxNodeHeader * + sizeof(*pMMArcLayer->MMNode.pNodeHeader)))) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMInitNodeLayer())"); + return 1; + } + + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + pMMArcLayer->MMNode.nSizeNodeHeader = MM_SIZE_OF_NH_32BITS; + else + pMMArcLayer->MMNode.nSizeNodeHeader = MM_SIZE_OF_NH_64BITS; + + // NL Section + strcpy(pMMArcLayer->MMNode.pszNLName, pMMArcLayer->MMNode.pszLayerName); + if (MMChangeFinalPartOfTheName(pMMArcLayer->MMNode.pszNLName, + MM_CPL_PATH_BUF_SIZE, ".nod", "N.~idx")) + return 1; + + if (nullptr == (pMMArcLayer->MMNode.pFNL = + fopen_function(pMMArcLayer->MMNode.pszNLName, + hMiraMonLayer->pszFlags))) + { + + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error MMNode.pFNL: Cannot open file %s.", + pMMArcLayer->MMNode.pszNLName); + return 1; + } + fseek_function(pMMArcLayer->MMNode.pFNL, 0, SEEK_SET); + + if (MMInitFlush(&pMMArcLayer->MMNode.FlushNL, pMMArcLayer->MMNode.pFNL, + MM_1MB, &pMMArcLayer->MMNode.pNL, 0, 0)) + return 1; + + // Creating the DBF file name + strcpy(pMMArcLayer->MMNode.MMAdmDB.pszExtDBFLayerName, + pMMArcLayer->MMNode.pszLayerName); + if (MMChangeFinalPartOfTheName( + pMMArcLayer->MMNode.MMAdmDB.pszExtDBFLayerName, + MM_CPL_PATH_BUF_SIZE, ".nod", "N.dbf")) + return 1; + + // MiraMon metadata + strcpy(pMMArcLayer->MMNode.pszREL_LayerName, + pMMArcLayer->MMNode.pszLayerName); + if (MMChangeFinalPartOfTheName(pMMArcLayer->MMNode.pszREL_LayerName, + MM_CPL_PATH_BUF_SIZE, ".nod", "N.rel")) + return 1; + } + return 0; +} + +static int MMInitArcLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + struct MiraMonArcLayer *pMMArcLayer; + struct MM_TH *pArcTopHeader; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + { + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + pArcTopHeader = &hMiraMonLayer->MMPolygon.TopArcHeader; + } + else + { + pMMArcLayer = &hMiraMonLayer->MMArc; + pArcTopHeader = &hMiraMonLayer->TopHeader; + } + + // Init header structure + hMiraMonLayer->bIsArc = 1; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + pArcTopHeader->bIs3d = 1; // Read description of bRealIs3d + MMInitBoundingBox(&pArcTopHeader->hBB); + + pArcTopHeader->aFileType[0] = 'A'; + pArcTopHeader->aFileType[1] = 'R'; + pArcTopHeader->aFileType[2] = 'C'; + + if (hMiraMonLayer->bIsPolygon) + { + snprintf(pMMArcLayer->pszLayerName, + sizeof(pMMArcLayer->pszLayerName), "%s_bound.arc", + hMiraMonLayer->pszSrcLayerName); + } + else + { + snprintf(pMMArcLayer->pszLayerName, + sizeof(pMMArcLayer->pszLayerName), "%s.arc", + hMiraMonLayer->pszSrcLayerName); + } + } + + if (nullptr == (pMMArcLayer->pF = fopen_function(pMMArcLayer->pszLayerName, + hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error pMMArcLayer->pF: Cannot open file %s.", + pMMArcLayer->pszLayerName); + return 1; + } + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE && + hMiraMonLayer->bIsPolygon) + { + fseek_function(pMMArcLayer->pF, 0, SEEK_SET); + if (MMReadHeader(pMMArcLayer->pF, + &hMiraMonLayer->MMPolygon.TopArcHeader)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error reading the format in file %s.", + pMMArcLayer->pszLayerName); + return 1; + } + // 3D information is in arcs file + hMiraMonLayer->TopHeader.bIs3d = + hMiraMonLayer->MMPolygon.TopArcHeader.bIs3d; + } + + // AH + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + pMMArcLayer->nSizeArcHeader = MM_SIZE_OF_AH_32BITS; + else + pMMArcLayer->nSizeArcHeader = MM_SIZE_OF_AH_64BITS; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + pMMArcLayer->nMaxArcHeader = MM_FIRST_NUMBER_OF_ARCS; + else + pMMArcLayer->nMaxArcHeader = pArcTopHeader->nElemCount; + + if (pMMArcLayer->nMaxArcHeader) + { + if (MMCheckSize_t(pMMArcLayer->nMaxArcHeader, + sizeof(*pMMArcLayer->pArcHeader))) + return 1; + if (nullptr == (pMMArcLayer->pArcHeader = (struct MM_AH *) + calloc_function((size_t)pMMArcLayer->nMaxArcHeader * + sizeof(*pMMArcLayer->pArcHeader)))) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMInitArcLayer())"); + return 1; + } + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + if (MMReadAHArcSection(hMiraMonLayer)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error reading the format in file %s.", + pMMArcLayer->pszLayerName); + return 1; + } + } + } + else + pMMArcLayer->pArcHeader = nullptr; + + // AL + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + pMMArcLayer->nALElementSize = MM_SIZE_OF_AL; + + if (hMiraMonLayer->bIsPolygon) + { + snprintf(pMMArcLayer->pszALName, sizeof(pMMArcLayer->pszALName), + "%s_boundA.~xy", hMiraMonLayer->pszSrcLayerName); + } + else + { + snprintf(pMMArcLayer->pszALName, sizeof(pMMArcLayer->pszALName), + "%sA.~xy", hMiraMonLayer->pszSrcLayerName); + } + + if (nullptr == (pMMArcLayer->pFAL = fopen_function( + pMMArcLayer->pszALName, hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error pMMArcLayer->pFAL: Cannot open file %s.", + pMMArcLayer->pszALName); + return 1; + } + fseek_function(pMMArcLayer->pFAL, 0, SEEK_SET); + + if (MMInitFlush(&pMMArcLayer->FlushAL, pMMArcLayer->pFAL, MM_1MB, + &pMMArcLayer->pAL, 0, 0)) + return 1; + } + + // 3D + if (pArcTopHeader->bIs3d) + { + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + if (hMiraMonLayer->bIsPolygon) + { + snprintf(pMMArcLayer->psz3DLayerName, + sizeof(pMMArcLayer->psz3DLayerName), "%s_boundA.~z", + hMiraMonLayer->pszSrcLayerName); + } + else + { + snprintf(pMMArcLayer->psz3DLayerName, + sizeof(pMMArcLayer->psz3DLayerName), "%sA.~z", + hMiraMonLayer->pszSrcLayerName); + } + + if (nullptr == + (pMMArcLayer->pF3d = fopen_function(pMMArcLayer->psz3DLayerName, + hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error pMMArcLayer->pF3d: Cannot open file %s.", + pMMArcLayer->psz3DLayerName); + return 1; + } + fseek_function(pMMArcLayer->pF3d, 0, SEEK_SET); + } + + if (MMInitZSectionLayer(hMiraMonLayer, pMMArcLayer->pF3d, + &pMMArcLayer->pZSection)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error reading the format in file %s %d.", + pMMArcLayer->pszLayerName, 6); + return 1; + } + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + if (MMReadZSection(hMiraMonLayer, pMMArcLayer->pF, + &pMMArcLayer->pZSection)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error reading the format in file %s.", + pMMArcLayer->pszLayerName); + return 1; + } + + if (MMReadZDescriptionHeaders(hMiraMonLayer, pMMArcLayer->pF, + pArcTopHeader->nElemCount, + &pMMArcLayer->pZSection)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error reading the format in file %s.", + pMMArcLayer->pszLayerName); + return 1; + } + } + } + // MiraMon metadata + if (hMiraMonLayer->bIsPolygon) + { + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(pMMArcLayer->pszREL_LayerName, + sizeof(pMMArcLayer->pszREL_LayerName), "%s_boundA.rel", + hMiraMonLayer->pszSrcLayerName); + } + else + { + strcpy(pMMArcLayer->pszREL_LayerName, pMMArcLayer->pszLayerName); + if (MMChangeFinalPartOfTheName(pMMArcLayer->pszREL_LayerName, + MM_CPL_PATH_BUF_SIZE, ".arc", + "A.rel")) + return 1; + } + } + else + { + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(pMMArcLayer->pszREL_LayerName, + sizeof(pMMArcLayer->pszREL_LayerName), "%sA.rel", + hMiraMonLayer->pszSrcLayerName); + } + else + { + CPLStrlcpy(pMMArcLayer->pszREL_LayerName, + hMiraMonLayer->pszSrcLayerName, + sizeof(pMMArcLayer->pszREL_LayerName)); + if (MMChangeFinalPartOfTheName(pMMArcLayer->pszREL_LayerName, + MM_CPL_PATH_BUF_SIZE, ".arc", + "A.rel")) + return 1; + } + } + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + // This file has to exist and be the appropriate version. + if (MMCheck_REL_FILE(pMMArcLayer->pszREL_LayerName)) + return 1; + } + + if (!hMiraMonLayer->bIsPolygon) + hMiraMonLayer->pszMainREL_LayerName = pMMArcLayer->pszREL_LayerName; + + // MIRAMON DATA BASE + // Creating the DBF file name + if (hMiraMonLayer->bIsPolygon) + { + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(pMMArcLayer->MMAdmDB.pszExtDBFLayerName, + sizeof(pMMArcLayer->MMAdmDB.pszExtDBFLayerName), + "%s_boundA.dbf", hMiraMonLayer->pszSrcLayerName); + } + else + { + strcpy(pMMArcLayer->MMAdmDB.pszExtDBFLayerName, + pMMArcLayer->pszLayerName); + if (MMChangeFinalPartOfTheName( + pMMArcLayer->MMAdmDB.pszExtDBFLayerName, + MM_CPL_PATH_BUF_SIZE, ".arc", "A.dbf")) + return 1; + } + } + else + { + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(pMMArcLayer->MMAdmDB.pszExtDBFLayerName, + sizeof(pMMArcLayer->MMAdmDB.pszExtDBFLayerName), "%sA.dbf", + hMiraMonLayer->pszSrcLayerName); + } + else + { + CPLStrlcpy(pMMArcLayer->MMAdmDB.pszExtDBFLayerName, + hMiraMonLayer->pszSrcLayerName, + sizeof(pMMArcLayer->MMAdmDB.pszExtDBFLayerName)); + if (MMChangeFinalPartOfTheName( + pMMArcLayer->MMAdmDB.pszExtDBFLayerName, + MM_CPL_PATH_BUF_SIZE, ".arc", "A.dbf")) + return 1; + } + } + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + if (MM_ReadExtendedDBFHeader(hMiraMonLayer)) + return 1; + } + + // Node part + if (MMInitNodeLayer(hMiraMonLayer)) + return 1; + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + MMSet1_1Version(&pMMArcLayer->TopNodeHeader); + else + MMSet2_0Version(&pMMArcLayer->TopNodeHeader); + + return 0; +} + +static int MMInitPolygonLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + struct MiraMonPolygonLayer *pMMPolygonLayer; + + if (!hMiraMonLayer) + return 1; + + pMMPolygonLayer = &hMiraMonLayer->MMPolygon; + + // Init header structure + hMiraMonLayer->bIsPolygon = 1; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + hMiraMonLayer->TopHeader.bIs3d = 1; // Read description of bRealIs3d + MMInitBoundingBox(&hMiraMonLayer->TopHeader.hBB); + + hMiraMonLayer->TopHeader.aFileType[0] = 'P'; + hMiraMonLayer->TopHeader.aFileType[1] = 'O'; + hMiraMonLayer->TopHeader.aFileType[2] = 'L'; + + snprintf(pMMPolygonLayer->pszLayerName, + sizeof(pMMPolygonLayer->pszLayerName), "%s.pol", + hMiraMonLayer->pszSrcLayerName); + } + + if (nullptr == + (pMMPolygonLayer->pF = fopen_function(pMMPolygonLayer->pszLayerName, + hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error pMMPolygonLayer->pF: Cannot open file %s.", + pMMPolygonLayer->pszLayerName); + return 1; + } + + // PS + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + pMMPolygonLayer->nPSElementSize = MM_SIZE_OF_PS_32BITS; + else + pMMPolygonLayer->nPSElementSize = MM_SIZE_OF_PS_64BITS; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(pMMPolygonLayer->pszPSName, sizeof(pMMPolygonLayer->pszPSName), + "%sP.~PS", hMiraMonLayer->pszSrcLayerName); + + if (nullptr == + (pMMPolygonLayer->pFPS = fopen_function(pMMPolygonLayer->pszPSName, + hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error pMMPolygonLayer->pFPS: Cannot open file %s.", + pMMPolygonLayer->pszPSName); + return 1; + } + fseek_function(pMMPolygonLayer->pFPS, 0, SEEK_SET); + + if (MMInitFlush(&pMMPolygonLayer->FlushPS, pMMPolygonLayer->pFPS, + MM_1MB, &pMMPolygonLayer->pPS, 0, + pMMPolygonLayer->nPSElementSize)) + return 1; + } + + // PH + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + pMMPolygonLayer->nPHElementSize = MM_SIZE_OF_PH_32BITS; + else + pMMPolygonLayer->nPHElementSize = MM_SIZE_OF_PH_64BITS; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + pMMPolygonLayer->nMaxPolHeader = MM_FIRST_NUMBER_OF_POLYGONS + 1; + else + pMMPolygonLayer->nMaxPolHeader = hMiraMonLayer->TopHeader.nElemCount; + + if (pMMPolygonLayer->nMaxPolHeader) + { + if (MMCheckSize_t(pMMPolygonLayer->nMaxPolHeader, + sizeof(*pMMPolygonLayer->pPolHeader))) + return 1; + if (nullptr == + (pMMPolygonLayer->pPolHeader = (struct MM_PH *)calloc_function( + (size_t)pMMPolygonLayer->nMaxPolHeader * + sizeof(*pMMPolygonLayer->pPolHeader)))) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMInitPolygonLayer())"); + return 1; + } + } + else + pMMPolygonLayer->pPolHeader = nullptr; + + // PAL + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + pMMPolygonLayer->nPALElementSize = MM_SIZE_OF_PAL_32BITS; + else + pMMPolygonLayer->nPALElementSize = MM_SIZE_OF_PAL_64BITS; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + // Universal polygon. + memset(pMMPolygonLayer->pPolHeader, 0, + sizeof(*pMMPolygonLayer->pPolHeader)); + hMiraMonLayer->TopHeader.nElemCount = 1; + + // PAL + snprintf(pMMPolygonLayer->pszPALName, + sizeof(pMMPolygonLayer->pszPALName), "%sP.~idx", + hMiraMonLayer->pszSrcLayerName); + + if (nullptr == (pMMPolygonLayer->pFPAL = + fopen_function(pMMPolygonLayer->pszPALName, + hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error pMMPolygonLayer->pFPAL: Cannot open file %s.", + pMMPolygonLayer->pszPALName); + return 1; + } + fseek_function(pMMPolygonLayer->pFPAL, 0, SEEK_SET); + + if (MMInitFlush(&pMMPolygonLayer->FlushPAL, pMMPolygonLayer->pFPAL, + MM_1MB, &pMMPolygonLayer->pPAL, 0, 0)) + return 1; + } + + // MiraMon metadata + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(hMiraMonLayer->MMPolygon.pszREL_LayerName, + sizeof(hMiraMonLayer->MMPolygon.pszREL_LayerName), "%sP.rel", + hMiraMonLayer->pszSrcLayerName); + } + else + { + CPLStrlcpy(hMiraMonLayer->MMPolygon.pszREL_LayerName, + hMiraMonLayer->pszSrcLayerName, + sizeof(hMiraMonLayer->MMPolygon.pszREL_LayerName)); + + if (MMChangeFinalPartOfTheName( + hMiraMonLayer->MMPolygon.pszREL_LayerName, MM_CPL_PATH_BUF_SIZE, + ".pol", "P.rel")) + return 1; + } + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + // This file has to exist and be the appropriate version. + if (MMCheck_REL_FILE(hMiraMonLayer->MMPolygon.pszREL_LayerName)) + return 1; + } + + hMiraMonLayer->pszMainREL_LayerName = + hMiraMonLayer->MMPolygon.pszREL_LayerName; + + // MIRAMON DATA BASE + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(pMMPolygonLayer->MMAdmDB.pszExtDBFLayerName, + sizeof(pMMPolygonLayer->MMAdmDB.pszExtDBFLayerName), "%sP.dbf", + hMiraMonLayer->pszSrcLayerName); + } + else + { + CPLStrlcpy(pMMPolygonLayer->MMAdmDB.pszExtDBFLayerName, + hMiraMonLayer->pszSrcLayerName, + sizeof(pMMPolygonLayer->MMAdmDB.pszExtDBFLayerName)); + if (MMChangeFinalPartOfTheName( + pMMPolygonLayer->MMAdmDB.pszExtDBFLayerName, + MM_CPL_PATH_BUF_SIZE, ".pol", "P.dbf")) + return 1; + } + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + if (MM_ReadExtendedDBFHeader(hMiraMonLayer)) + return 1; + } + + return 0; +} + +int MMInitLayerByType(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->eLT == MM_LayerType_Point || + hMiraMonLayer->eLT == MM_LayerType_Point3d) + { + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(hMiraMonLayer->MMPoint.pszLayerName, + sizeof(hMiraMonLayer->MMPoint.pszLayerName), "%s.pnt", + hMiraMonLayer->pszSrcLayerName); + } + else + { + CPLStrlcpy(hMiraMonLayer->MMPoint.pszLayerName, + hMiraMonLayer->pszSrcLayerName, + sizeof(hMiraMonLayer->MMPoint.pszLayerName)); + } + if (hMiraMonLayer->MMMap && hMiraMonLayer->MMMap->fMMMap) + { + hMiraMonLayer->MMMap->nNumberOfLayers++; + fprintf_function(hMiraMonLayer->MMMap->fMMMap, "[VECTOR_%d]\n", + hMiraMonLayer->MMMap->nNumberOfLayers); + fprintf_function(hMiraMonLayer->MMMap->fMMMap, "Fitxer=%s.pnt\n", + MM_CPLGetBasename(hMiraMonLayer->pszSrcLayerName)); + } + + if (MMInitPointLayer(hMiraMonLayer)) + { + // Error specified inside the function + return 1; + } + return 0; + } + if (hMiraMonLayer->eLT == MM_LayerType_Arc || + hMiraMonLayer->eLT == MM_LayerType_Arc3d) + { + struct MiraMonArcLayer *pMMArcLayer = &hMiraMonLayer->MMArc; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(pMMArcLayer->pszLayerName, + sizeof(pMMArcLayer->pszLayerName), "%s.arc", + hMiraMonLayer->pszSrcLayerName); + } + else + { + CPLStrlcpy(pMMArcLayer->pszLayerName, + hMiraMonLayer->pszSrcLayerName, + sizeof(pMMArcLayer->pszLayerName)); + } + + if (hMiraMonLayer->MMMap && hMiraMonLayer->MMMap->fMMMap) + { + hMiraMonLayer->MMMap->nNumberOfLayers++; + fprintf_function(hMiraMonLayer->MMMap->fMMMap, "[VECTOR_%d]\n", + hMiraMonLayer->MMMap->nNumberOfLayers); + fprintf_function(hMiraMonLayer->MMMap->fMMMap, "Fitxer=%s.arc\n", + MM_CPLGetBasename(hMiraMonLayer->pszSrcLayerName)); + } + + if (MMInitArcLayer(hMiraMonLayer)) + { + // Error specified inside the function + return 1; + } + return 0; + } + if (hMiraMonLayer->eLT == MM_LayerType_Pol || + hMiraMonLayer->eLT == MM_LayerType_Pol3d) + { + struct MiraMonPolygonLayer *pMMPolygonLayer = &hMiraMonLayer->MMPolygon; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + snprintf(pMMPolygonLayer->pszLayerName, + sizeof(pMMPolygonLayer->pszLayerName), "%s.pol", + hMiraMonLayer->pszSrcLayerName); + } + else + { + CPLStrlcpy(pMMPolygonLayer->pszLayerName, + hMiraMonLayer->pszSrcLayerName, + sizeof(pMMPolygonLayer->pszLayerName)); + } + + if (hMiraMonLayer->MMMap && hMiraMonLayer->MMMap->fMMMap) + { + hMiraMonLayer->MMMap->nNumberOfLayers++; + fprintf_function(hMiraMonLayer->MMMap->fMMMap, "[VECTOR_%d]\n", + hMiraMonLayer->MMMap->nNumberOfLayers); + fprintf_function(hMiraMonLayer->MMMap->fMMMap, "Fitxer=%s.pol\n", + MM_CPLGetBasename(hMiraMonLayer->pszSrcLayerName)); + } + + if (MMInitPolygonLayer(hMiraMonLayer)) + { + // Error specified inside the function + return 1; + } + + if (hMiraMonLayer->ReadOrWrite == MM_READING_MODE) + { + char *pszArcLayerName; + const char *pszExt; + // StringLine associated to the polygon + pszArcLayerName = MMReturnValueFromSectionINIFile( + pMMPolygonLayer->pszREL_LayerName, + SECTION_OVVW_ASPECTES_TECNICS, KEY_ArcSource); + if (pszArcLayerName) + { + MM_RemoveInitial_and_FinalQuotationMarks(pszArcLayerName); + + // If extension is not specified ".arc" will be used + pszExt = get_extension_function(pszArcLayerName); + if (MMIsEmptyString(pszExt)) + { + char *pszArcLayerNameAux = + calloc_function(strlen(pszArcLayerName) + 5); + if (!pszArcLayerNameAux) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMInitLayerByType())"); + free_function(pszArcLayerName); + return 1; + } + snprintf(pszArcLayerNameAux, strlen(pszArcLayerName) + 5, + "%s.arc", pszArcLayerName); + + free_function(pszArcLayerName); + pszArcLayerName = pszArcLayerNameAux; + } + + CPLStrlcpy( + pMMPolygonLayer->MMArc.pszLayerName, + form_filename_function( + get_path_function(hMiraMonLayer->pszSrcLayerName), + pszArcLayerName), + sizeof(pMMPolygonLayer->MMArc.pszLayerName)); + + free_function(pszArcLayerName); + } + else + { + // There is no arc layer on the metada file + MMCPLError( + CE_Failure, CPLE_OpenFailed, + "Error reading the ARC file in the metadata file %s.", + pMMPolygonLayer->pszREL_LayerName); + return 1; + } + + if (nullptr == (hMiraMonLayer->MMPolygon.MMArc.pF = fopen_function( + pMMPolygonLayer->MMArc.pszLayerName, + hMiraMonLayer->pszFlags))) + { + MMCPLError( + CE_Failure, CPLE_OpenFailed, + "Error pMMPolygonLayer.MMArc.pF: Cannot open file %s.", + pMMPolygonLayer->MMArc.pszLayerName); + return 1; + } + + if (MMReadHeader(hMiraMonLayer->MMPolygon.MMArc.pF, + &hMiraMonLayer->MMPolygon.TopArcHeader)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error reading the format in file %s.", + pMMPolygonLayer->MMArc.pszLayerName); + return 1; + } + + if (MMReadPHPolygonSection(hMiraMonLayer)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error reading the format in file %s.", + pMMPolygonLayer->MMArc.pszLayerName); + return 1; + } + + fclose_and_nullify(&hMiraMonLayer->MMPolygon.MMArc.pF); + } + else + { + // Creating the stringLine file associated to the polygon + snprintf(pMMPolygonLayer->MMArc.pszLayerName, + sizeof(pMMPolygonLayer->MMArc.pszLayerName), "%s.arc", + hMiraMonLayer->pszSrcLayerName); + } + + if (MMInitArcLayer(hMiraMonLayer)) + { + // Error specified inside the function + return 1; + } + + // Polygon is 3D if Arc is 3D, by definition. + hMiraMonLayer->TopHeader.bIs3d = + hMiraMonLayer->MMPolygon.TopArcHeader.bIs3d; + + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + MMSet1_1Version(&pMMPolygonLayer->TopArcHeader); + else + MMSet2_0Version(&pMMPolygonLayer->TopArcHeader); + } + else if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + // Trying to get DBF information + snprintf(hMiraMonLayer->MMAdmDBWriting.pszExtDBFLayerName, + sizeof(hMiraMonLayer->MMAdmDBWriting.pszExtDBFLayerName), + "%s.dbf", hMiraMonLayer->pszSrcLayerName); + } + + return 0; +} + +int MMInitLayer(struct MiraMonVectLayerInfo *hMiraMonLayer, + const char *pzFileName, int LayerVersion, char nMMRecode, + char nMMLanguage, struct MiraMonDataBase *pLayerDB, + MM_BOOLEAN ReadOrWrite, struct MiraMonVectMapInfo *MMMap) +{ + if (!hMiraMonLayer) + return 1; + + // Some variables must be initialized + MM_FillFieldDescriptorByLanguage(); + + memset(hMiraMonLayer, 0, sizeof(*hMiraMonLayer)); + + //hMiraMonLayer->Version = MM_VECTOR_LAYER_LAST_VERSION; + + hMiraMonLayer->ReadOrWrite = ReadOrWrite; + hMiraMonLayer->MMMap = MMMap; + + // Don't free in destructor + hMiraMonLayer->pLayerDB = pLayerDB; + + // Opening mode + strcpy(hMiraMonLayer->pszFlags, "wb+"); + + if (LayerVersion == MM_UNKNOWN_VERSION) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Unknown version in MiraMon driver."); + return 1; + } + if (LayerVersion == MM_LAST_VERSION) + { + MMSet1_1Version(&hMiraMonLayer->TopHeader); + hMiraMonLayer->nHeaderDiskSize = MM_HEADER_SIZE_64_BITS; + hMiraMonLayer->LayerVersion = MM_64BITS_VERSION; + } + else if (LayerVersion == MM_32BITS_VERSION) + { + MMSet1_1Version(&hMiraMonLayer->TopHeader); + hMiraMonLayer->nHeaderDiskSize = MM_HEADER_SIZE_32_BITS; + hMiraMonLayer->LayerVersion = MM_32BITS_VERSION; + } + else + { + MMSet2_0Version(&hMiraMonLayer->TopHeader); + hMiraMonLayer->nHeaderDiskSize = MM_HEADER_SIZE_64_BITS; + hMiraMonLayer->LayerVersion = MM_64BITS_VERSION; + } + + hMiraMonLayer->pszSrcLayerName = strdup_function(pzFileName); + hMiraMonLayer->szLayerTitle = + strdup_function(get_filename_function(pzFileName)); + + if (!hMiraMonLayer->bIsBeenInit && + hMiraMonLayer->eLT != MM_LayerType_Unknown) + { + if (MMInitLayerByType(hMiraMonLayer)) + { + // Error specified inside the function + return 1; + } + hMiraMonLayer->bIsBeenInit = 1; + } + + // If more nNumStringToOperate is needed, it'll be increased. + hMiraMonLayer->nNumStringToOperate = 0; + if (MMResizeStringToOperateIfNeeded(hMiraMonLayer, 500)) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMInitLayer())"); + return 1; + } + + hMiraMonLayer->nMMLanguage = nMMLanguage; + + if (nMMRecode == MM_RECODE_UTF8) + hMiraMonLayer->nCharSet = MM_JOC_CARAC_UTF8_DBF; + else //if(nMMRecode==MM_RECODE_ANSI) + hMiraMonLayer->nCharSet = MM_JOC_CARAC_ANSI_DBASE; + return 0; +} + +/* -------------------------------------------------------------------- */ +/* Layer Functions: Closing MiraMon layers */ +/* -------------------------------------------------------------------- */ +static int MMClose3DSectionLayer(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_INTERNAL_FID nElements, FILE_TYPE *pF, + FILE_TYPE *pF3d, const char *pszF3d, + struct MM_ZSection *pZSection, + MM_FILE_OFFSET FinalOffset) +{ + int ret_code = 1; + if (!hMiraMonLayer) + return 1; + + // Avoid closing when it has no sense. But it's not an error. + // Just return elegantly. + if (!pF || !pF3d || !pszF3d || !pZSection) + return 0; + + if (hMiraMonLayer->bIsReal3d) + { + pZSection->ZSectionOffset = FinalOffset; + if (MMWriteZSection(pF, pZSection)) + goto end_label; + + // Header 3D. Writes it after header + if (MMWriteZDescriptionHeaders(hMiraMonLayer, pF, nElements, pZSection)) + goto end_label; + + // ZL section + pZSection->FlushZL.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&pZSection->FlushZL)) + goto end_label; + + if (MMMoveFromFileToFile(pF3d, pF, &pZSection->ZSectionOffset)) + goto end_label; + } + + ret_code = 0; +end_label: + fclose_and_nullify(&pF3d); + if (pszF3d && *pszF3d != '\0') + remove_function(pszF3d); + + return ret_code; +} + +static int MMClosePointLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + int ret_code = 1; + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + hMiraMonLayer->nFinalElemCount = hMiraMonLayer->TopHeader.nElemCount; + hMiraMonLayer->TopHeader.bIs3d = hMiraMonLayer->bIsReal3d; + + if (MMWriteHeader(hMiraMonLayer->MMPoint.pF, &hMiraMonLayer->TopHeader)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + hMiraMonLayer->MMPoint.pszLayerName); + goto end_label; + } + hMiraMonLayer->OffsetCheck = hMiraMonLayer->nHeaderDiskSize; + + // TL Section + hMiraMonLayer->MMPoint.FlushTL.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&hMiraMonLayer->MMPoint.FlushTL)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + hMiraMonLayer->MMPoint.pszLayerName); + goto end_label; + } + if (MMMoveFromFileToFile(hMiraMonLayer->MMPoint.pFTL, + hMiraMonLayer->MMPoint.pF, + &hMiraMonLayer->OffsetCheck)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + hMiraMonLayer->MMPoint.pszLayerName); + goto end_label; + } + + fclose_and_nullify(&hMiraMonLayer->MMPoint.pFTL); + + if (*hMiraMonLayer->MMPoint.pszTLName != '\0') + remove_function(hMiraMonLayer->MMPoint.pszTLName); + + if (MMClose3DSectionLayer( + hMiraMonLayer, hMiraMonLayer->TopHeader.nElemCount, + hMiraMonLayer->MMPoint.pF, hMiraMonLayer->MMPoint.pF3d, + hMiraMonLayer->MMPoint.psz3DLayerName, + &hMiraMonLayer->MMPoint.pZSection, hMiraMonLayer->OffsetCheck)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + hMiraMonLayer->MMPoint.pszLayerName); + goto end_label; + } + } + + ret_code = 0; +end_label: + fclose_and_nullify(&hMiraMonLayer->MMPoint.pF); + return ret_code; +} + +static int MMCloseNodeLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + int ret_code = 1; + struct MiraMonArcLayer *pMMArcLayer; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + else + pMMArcLayer = &hMiraMonLayer->MMArc; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + hMiraMonLayer->TopHeader.bIs3d = hMiraMonLayer->bIsReal3d; + + if (MMWriteHeader(pMMArcLayer->MMNode.pF, &pMMArcLayer->TopNodeHeader)) + goto end_label; + hMiraMonLayer->OffsetCheck = hMiraMonLayer->nHeaderDiskSize; + + // NH Section + if (MMWriteNHNodeSection(hMiraMonLayer, hMiraMonLayer->nHeaderDiskSize)) + goto end_label; + + // NL Section + pMMArcLayer->MMNode.FlushNL.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&pMMArcLayer->MMNode.FlushNL)) + goto end_label; + if (MMMoveFromFileToFile(pMMArcLayer->MMNode.pFNL, + pMMArcLayer->MMNode.pF, + &hMiraMonLayer->OffsetCheck)) + goto end_label; + + fclose_and_nullify(&pMMArcLayer->MMNode.pFNL); + if (*pMMArcLayer->MMNode.pszNLName != '\0') + remove_function(pMMArcLayer->MMNode.pszNLName); + } + + ret_code = 0; +end_label: + fclose_and_nullify(&pMMArcLayer->MMNode.pFNL); + + fclose_and_nullify(&pMMArcLayer->MMNode.pF); + + return ret_code; +} + +static int MMCloseArcLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + int ret_code = 0; + struct MiraMonArcLayer *pMMArcLayer; + struct MM_TH *pArcTopHeader; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + { + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + pArcTopHeader = &hMiraMonLayer->MMPolygon.TopArcHeader; + } + else + { + pMMArcLayer = &hMiraMonLayer->MMArc; + pArcTopHeader = &hMiraMonLayer->TopHeader; + } + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + hMiraMonLayer->nFinalElemCount = pArcTopHeader->nElemCount; + pArcTopHeader->bIs3d = hMiraMonLayer->bIsReal3d; + + if (MMWriteHeader(pMMArcLayer->pF, pArcTopHeader)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", pMMArcLayer->pszLayerName); + goto end_label; + } + hMiraMonLayer->OffsetCheck = hMiraMonLayer->nHeaderDiskSize; + + // AH Section + if (MMWriteAHArcSection(hMiraMonLayer, hMiraMonLayer->OffsetCheck)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", pMMArcLayer->pszLayerName); + goto end_label; + } + + // AL Section + pMMArcLayer->FlushAL.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&pMMArcLayer->FlushAL)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", pMMArcLayer->pszLayerName); + goto end_label; + } + if (MMMoveFromFileToFile(pMMArcLayer->pFAL, pMMArcLayer->pF, + &hMiraMonLayer->OffsetCheck)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", pMMArcLayer->pszLayerName); + goto end_label; + } + fclose_and_nullify(&pMMArcLayer->pFAL); + + if (*pMMArcLayer->pszALName != '\0') + remove_function(pMMArcLayer->pszALName); + + // 3D Section + if (MMClose3DSectionLayer( + hMiraMonLayer, pArcTopHeader->nElemCount, pMMArcLayer->pF, + pMMArcLayer->pF3d, pMMArcLayer->psz3DLayerName, + &pMMArcLayer->pZSection, hMiraMonLayer->OffsetCheck)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", pMMArcLayer->pszLayerName); + goto end_label; + } + } + + ret_code = 0; +end_label: + fclose_and_nullify(&pMMArcLayer->pF); + + fclose_and_nullify(&pMMArcLayer->pFAL); + + if (MMCloseNodeLayer(hMiraMonLayer)) + ret_code = 1; + + return ret_code; +} + +static int MMClosePolygonLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + int ret_code = 0; + struct MiraMonPolygonLayer *pMMPolygonLayer; + + if (!hMiraMonLayer) + return 1; + + pMMPolygonLayer = &hMiraMonLayer->MMPolygon; + + MMCloseArcLayer(hMiraMonLayer); + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + hMiraMonLayer->nFinalElemCount = hMiraMonLayer->TopHeader.nElemCount; + hMiraMonLayer->TopHeader.bIs3d = hMiraMonLayer->bIsReal3d; + + if (MMWriteHeader(pMMPolygonLayer->pF, &hMiraMonLayer->TopHeader)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + pMMPolygonLayer->pszLayerName); + goto end_label; + } + hMiraMonLayer->OffsetCheck = hMiraMonLayer->nHeaderDiskSize; + + // PS Section + pMMPolygonLayer->FlushPS.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&pMMPolygonLayer->FlushPS)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + pMMPolygonLayer->pszLayerName); + goto end_label; + } + if (MMMoveFromFileToFile(pMMPolygonLayer->pFPS, pMMPolygonLayer->pF, + &hMiraMonLayer->OffsetCheck)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + pMMPolygonLayer->pszLayerName); + goto end_label; + } + + fclose_and_nullify(&pMMPolygonLayer->pFPS); + if (*pMMPolygonLayer->pszPSName != '\0') + remove_function(pMMPolygonLayer->pszPSName); + + // AH Section + if (MMWritePHPolygonSection(hMiraMonLayer, hMiraMonLayer->OffsetCheck)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + pMMPolygonLayer->pszLayerName); + goto end_label; + } + + // PAL Section + pMMPolygonLayer->FlushPAL.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&pMMPolygonLayer->FlushPAL)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + pMMPolygonLayer->pszLayerName); + goto end_label; + } + if (MMMoveFromFileToFile(pMMPolygonLayer->pFPAL, pMMPolygonLayer->pF, + &hMiraMonLayer->OffsetCheck)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Error writing to file %s", + pMMPolygonLayer->pszLayerName); + goto end_label; + } + fclose_and_nullify(&pMMPolygonLayer->pFPAL); + + if (*pMMPolygonLayer->pszPALName != '\0') + remove_function(pMMPolygonLayer->pszPALName); + } + + ret_code = 0; + +end_label: + fclose_and_nullify(&pMMPolygonLayer->pF); + + fclose_and_nullify(&pMMPolygonLayer->pFPAL); + + return ret_code; +} + +int MMCloseLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + int ret_code = 0; + //CheckMMVectorLayerVersion(hMiraMonLayer, 1) + + if (!hMiraMonLayer) + return 0; + + if (hMiraMonLayer->bIsPoint) + { + ret_code = MMClosePointLayer(hMiraMonLayer); + } + else if (hMiraMonLayer->bIsArc && !hMiraMonLayer->bIsPolygon) + { + ret_code = MMCloseArcLayer(hMiraMonLayer); + } + else if (hMiraMonLayer->bIsPolygon) + { + ret_code = MMClosePolygonLayer(hMiraMonLayer); + } + else if (hMiraMonLayer->bIsDBF) + { + // If no geometry, remove all created files + if (hMiraMonLayer->pszSrcLayerName) + remove_function(hMiraMonLayer->pszSrcLayerName); + if (hMiraMonLayer->szLayerTitle) + remove_function(hMiraMonLayer->szLayerTitle); + } + + // MiraMon metadata files + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + if (MMWriteVectorMetadata(hMiraMonLayer)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Some error writing in metadata file of the layer"); + ret_code = 1; + } + } + + // MiraMon database files + if (MMCloseMMBD_XP(hMiraMonLayer)) + { + MMCPLError(CE_Failure, CPLE_NoWriteAccess, + "Some error writing in DBF file of the layer"); + ret_code = 1; + } + return ret_code; +} + +/* -------------------------------------------------------------------- */ +/* Layer Functions: Destroying (allocated memory) */ +/* -------------------------------------------------------------------- */ +static void MMDestroyMMAdmDB(struct MMAdmDatabase *pMMAdmDB) +{ + if (pMMAdmDB->pRecList) + { + free_function(pMMAdmDB->pRecList); + pMMAdmDB->pRecList = nullptr; + } + + if (pMMAdmDB->szRecordOnCourse) + { + free_function(pMMAdmDB->szRecordOnCourse); + pMMAdmDB->szRecordOnCourse = nullptr; + pMMAdmDB->nNumRecordOnCourse = 0; + } +} + +static int MMDestroyPointLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->MMPoint.pTL) + { + free_function(hMiraMonLayer->MMPoint.pTL); + hMiraMonLayer->MMPoint.pTL = nullptr; + } + + MMDestroyZSectionDescription(&hMiraMonLayer->MMPoint.pZSection); + MMDestroyMMAdmDB(&hMiraMonLayer->MMPoint.MMAdmDB); + + return 0; +} + +static int MMDestroyNodeLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + struct MiraMonArcLayer *pMMArcLayer; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + else + pMMArcLayer = &hMiraMonLayer->MMArc; + + if (pMMArcLayer->MMNode.pNL) + { + free_function(pMMArcLayer->MMNode.pNL); + pMMArcLayer->MMNode.pNL = nullptr; + } + + if (pMMArcLayer->MMNode.pNodeHeader) + { + free_function(pMMArcLayer->MMNode.pNodeHeader); + pMMArcLayer->MMNode.pNodeHeader = nullptr; + } + + MMDestroyMMAdmDB(&hMiraMonLayer->MMArc.MMNode.MMAdmDB); + return 0; +} + +static int MMDestroyArcLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + struct MiraMonArcLayer *pMMArcLayer; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + else + pMMArcLayer = &hMiraMonLayer->MMArc; + + if (pMMArcLayer->pAL) + { + free_function(pMMArcLayer->pAL); + pMMArcLayer->pAL = nullptr; + } + if (pMMArcLayer->pArcHeader) + { + free_function(pMMArcLayer->pArcHeader); + pMMArcLayer->pArcHeader = nullptr; + } + + MMDestroyZSectionDescription(&pMMArcLayer->pZSection); + MMDestroyMMAdmDB(&pMMArcLayer->MMAdmDB); + + MMDestroyNodeLayer(hMiraMonLayer); + return 0; +} + +static int MMDestroyPolygonLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + struct MiraMonPolygonLayer *pMMPolygonLayer; + + if (!hMiraMonLayer) + return 1; + + pMMPolygonLayer = &hMiraMonLayer->MMPolygon; + + MMDestroyArcLayer(hMiraMonLayer); + + if (pMMPolygonLayer->pPAL) + { + free_function(pMMPolygonLayer->pPAL); + pMMPolygonLayer->pPAL = nullptr; + } + + if (pMMPolygonLayer->pPS) + { + free_function(pMMPolygonLayer->pPS); + pMMPolygonLayer->pPS = nullptr; + } + + if (pMMPolygonLayer->pPolHeader) + { + free_function(pMMPolygonLayer->pPolHeader); + pMMPolygonLayer->pPolHeader = nullptr; + } + + MMDestroyMMAdmDB(&pMMPolygonLayer->MMAdmDB); + + return 0; +} + +int MMDestroyLayer(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + //CheckMMVectorLayerVersion(hMiraMonLayer, 1) + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPoint) + MMDestroyPointLayer(hMiraMonLayer); + else if (hMiraMonLayer->bIsArc && !hMiraMonLayer->bIsPolygon) + MMDestroyArcLayer(hMiraMonLayer); + else if (hMiraMonLayer->bIsPolygon) + MMDestroyPolygonLayer(hMiraMonLayer); + + if (hMiraMonLayer->pszSrcLayerName) + { + free_function(hMiraMonLayer->pszSrcLayerName); + hMiraMonLayer->pszSrcLayerName = nullptr; + } + if (hMiraMonLayer->szLayerTitle) + { + free_function(hMiraMonLayer->szLayerTitle); + hMiraMonLayer->szLayerTitle = nullptr; + } + if (hMiraMonLayer->pSRS) + { + free_function(hMiraMonLayer->pSRS); + hMiraMonLayer->pSRS = nullptr; + } + + if (hMiraMonLayer->pMultRecordIndex) + { + free_function(hMiraMonLayer->pMultRecordIndex); + hMiraMonLayer->pMultRecordIndex = nullptr; + } + + if (hMiraMonLayer->ReadFeature.pNCoordRing) + { + free(hMiraMonLayer->ReadFeature.pNCoordRing); + hMiraMonLayer->ReadFeature.pNCoordRing = nullptr; + } + if (hMiraMonLayer->ReadFeature.pCoord) + { + free(hMiraMonLayer->ReadFeature.pCoord); + hMiraMonLayer->ReadFeature.pCoord = nullptr; + } + if (hMiraMonLayer->ReadFeature.pZCoord) + { + free(hMiraMonLayer->ReadFeature.pZCoord); + hMiraMonLayer->ReadFeature.pZCoord = nullptr; + } + if (hMiraMonLayer->ReadFeature.pRecords) + { + free(hMiraMonLayer->ReadFeature.pRecords); + hMiraMonLayer->ReadFeature.pRecords = nullptr; + } + if (hMiraMonLayer->ReadFeature.flag_VFG) + { + free(hMiraMonLayer->ReadFeature.flag_VFG); + hMiraMonLayer->ReadFeature.flag_VFG = nullptr; + } + + if (hMiraMonLayer->pArcs) + { + free_function(hMiraMonLayer->pArcs); + hMiraMonLayer->pArcs = nullptr; + } + + if (hMiraMonLayer->szStringToOperate) + { + free_function(hMiraMonLayer->szStringToOperate); + hMiraMonLayer->szStringToOperate = nullptr; + hMiraMonLayer->nNumStringToOperate = 0; + } + + if (hMiraMonLayer->pLayerDB) + { + if (hMiraMonLayer->pLayerDB->pFields) + { + free_function(hMiraMonLayer->pLayerDB->pFields); + hMiraMonLayer->pLayerDB->pFields = nullptr; + } + free_function(hMiraMonLayer->pLayerDB); + hMiraMonLayer->pLayerDB = nullptr; + } + + // Destroys all database objects + MMDestroyMMDB(hMiraMonLayer); + + return 0; +} + +/* -------------------------------------------------------------------- */ +/* Flush Layer Functions */ +/* -------------------------------------------------------------------- */ + +// Initializes a MM_FLUSH_INFO structure, which is used for buffering +// data before writing it to a file. +int MMInitFlush(struct MM_FLUSH_INFO *pFlush, FILE_TYPE *pF, GUInt64 nBlockSize, + char **pBuffer, MM_FILE_OFFSET DiskOffsetWhereToFlush, + GInt32 nMyDiskSize) +{ + memset(pFlush, 0, sizeof(*pFlush)); + *pBuffer = nullptr; + + pFlush->nMyDiskSize = nMyDiskSize; + pFlush->pF = pF; + pFlush->nBlockSize = nBlockSize; + pFlush->nNumBytes = 0; + if (MMCheckSize_t(nBlockSize, 1)) + return 1; + + if (!nBlockSize) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Error in MiraMon " + "driver: MMInitFlush() with no bytes to process"); + return 1; + } + + if (nullptr == (*pBuffer = (char *)calloc_function((size_t)nBlockSize))) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMInitFlush())"); + return 1; + } + pFlush->OffsetWhereToFlush = DiskOffsetWhereToFlush; + pFlush->CurrentOffset = 0; + return 0; +} + +// Reads data from a file into a buffer. +int MMReadFlush(struct MM_FLUSH_INFO *pFlush) +{ + fseek_function(pFlush->pF, pFlush->OffsetWhereToFlush, SEEK_SET); + if (pFlush->nBlockSize != + (GUInt64)(fread_function(pFlush->pBlockWhereToSaveOrRead, 1, + (size_t)pFlush->nBlockSize, pFlush->pF))) + return 1; + return 0; +} + +// Flushes data from a buffer to a disk file. +static int MMFlushToDisk(struct MM_FLUSH_INFO *FlushInfo) +{ + if (!FlushInfo->nNumBytes) + return 0; + // Just flush to the disk at the correct place. + fseek_function(FlushInfo->pF, FlushInfo->OffsetWhereToFlush, SEEK_SET); + + if (FlushInfo->nNumBytes != + (GUInt64)fwrite_function(FlushInfo->pBlockWhereToSaveOrRead, 1, + (size_t)FlushInfo->nNumBytes, FlushInfo->pF)) + return 1; + FlushInfo->OffsetWhereToFlush += FlushInfo->nNumBytes; + FlushInfo->NTimesFlushed++; + FlushInfo->TotalSavedBytes += FlushInfo->nNumBytes; + FlushInfo->nNumBytes = 0; + + return 0; +} + +// Reads a block of data from a buffer in memory +int MMReadBlockFromBuffer(struct MM_FLUSH_INFO *FlushInfo) +{ + if (!FlushInfo->SizeOfBlockToBeSaved) + return 0; + + if (FlushInfo->pBlockToBeSaved) + { + memcpy(FlushInfo->pBlockToBeSaved, + (void *)((char *)FlushInfo->pBlockWhereToSaveOrRead + + FlushInfo->CurrentOffset), + FlushInfo->SizeOfBlockToBeSaved); + } + FlushInfo->CurrentOffset += FlushInfo->SizeOfBlockToBeSaved; + + return 0; +} + +// Appends a block of data to a buffer in memory, which is +// used for later flushing to disk. +int MMAppendBlockToBuffer(struct MM_FLUSH_INFO *FlushInfo) +{ + if (FlushInfo->SizeOfBlockToBeSaved) + { + // If all the bloc itself does not fit to the buffer, + // then all the block is written directly to the disk + if (FlushInfo->nNumBytes == 0 && + FlushInfo->SizeOfBlockToBeSaved >= FlushInfo->nBlockSize) + { + if (MMFlushToDisk(FlushInfo)) + return 1; + return 0; + } + + // There is space in FlushInfo->pBlockWhereToSaveOrRead? + if (FlushInfo->nNumBytes + FlushInfo->SizeOfBlockToBeSaved <= + FlushInfo->nBlockSize) + { + if (FlushInfo->pBlockToBeSaved) + { + memcpy((void *)((char *)FlushInfo->pBlockWhereToSaveOrRead + + FlushInfo->nNumBytes), + FlushInfo->pBlockToBeSaved, + FlushInfo->SizeOfBlockToBeSaved); + } + else // Add zero characters + { + char zero_caracters[8] = {0, 0, 0, 0, 0, 0, 0, 0}; + memcpy((char *)FlushInfo->pBlockWhereToSaveOrRead + + FlushInfo->nNumBytes, + zero_caracters, FlushInfo->SizeOfBlockToBeSaved); + } + + FlushInfo->nNumBytes += FlushInfo->SizeOfBlockToBeSaved; + } + else + { + // Empty the buffer + if (MMFlushToDisk(FlushInfo)) + return 1; + // Append the pendant bytes + if (MMAppendBlockToBuffer(FlushInfo)) + return 1; + } + return 0; + } + // Just flush to the disc. + return MMFlushToDisk(FlushInfo); +} + +// Copy the contents of a temporary file to a final file. +// Used everywhere when closing layers. +int MMMoveFromFileToFile(FILE_TYPE *pSrcFile, FILE_TYPE *pDestFile, + MM_FILE_OFFSET *nOffset) +{ + size_t bufferSize = 1024 * 1024; // 1 MB buffer; + unsigned char *buffer; + size_t bytesRead, bytesWritten; + + if (!pSrcFile || !pDestFile || !nOffset) + return 0; + + buffer = (unsigned char *)calloc_function(bufferSize); + + if (!buffer) + return 1; + + fseek_function(pSrcFile, 0, SEEK_SET); + fseek_function(pDestFile, *nOffset, SEEK_SET); + while ((bytesRead = fread_function(buffer, sizeof(unsigned char), + bufferSize, pSrcFile)) > 0) + { + bytesWritten = fwrite_function(buffer, sizeof(unsigned char), bytesRead, + pDestFile); + if (bytesWritten != bytesRead) + { + free_function(buffer); + return 1; + } + if (nOffset) + (*nOffset) += bytesWritten; + } + free_function(buffer); + return 0; +} + +/* -------------------------------------------------------------------- */ +/* Layer: Offsets and variables types managing */ +/* -------------------------------------------------------------------- */ + +// Alineation described in format documents. +static void MMGetOffsetAlignedTo8(MM_FILE_OFFSET *Offset) +{ + MM_FILE_OFFSET reajust; + + if ((*Offset) % 8L) + { + reajust = 8 - ((*Offset) % 8L); + (*Offset) += reajust; + } +} + +// Reading integers depending on the version being read. +int MMReadGUInt64DependingOnVersion(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MM_FLUSH_INFO *FlushInfo, + GUInt64 *pnUI64) +{ + uint32_t nUL32; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + { + FlushInfo->pBlockToBeSaved = (void *)&nUL32; + FlushInfo->SizeOfBlockToBeSaved = sizeof(nUL32); + if (MMReadBlockFromBuffer(FlushInfo)) + { + FlushInfo->pBlockToBeSaved = nullptr; + return 1; + } + *pnUI64 = (GUInt64)nUL32; + } + else + { + FlushInfo->pBlockToBeSaved = (void *)pnUI64; + FlushInfo->SizeOfBlockToBeSaved = sizeof(*pnUI64); + if (MMReadBlockFromBuffer(FlushInfo)) + { + FlushInfo->pBlockToBeSaved = nullptr; + return 1; + } + } + FlushInfo->pBlockToBeSaved = nullptr; + return 0; +} + +// Reading offsets depending on the version is being read. +int MMReadOffsetDependingOnVersion(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MM_FLUSH_INFO *FlushInfo, + MM_FILE_OFFSET *pnUI64) +{ + uint32_t nUL32; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + { + FlushInfo->pBlockToBeSaved = (void *)&nUL32; + FlushInfo->SizeOfBlockToBeSaved = sizeof(nUL32); + if (MMReadBlockFromBuffer(FlushInfo)) + { + FlushInfo->pBlockToBeSaved = nullptr; + return 1; + } + *pnUI64 = (MM_FILE_OFFSET)nUL32; + } + else + { + FlushInfo->pBlockToBeSaved = (void *)pnUI64; + FlushInfo->SizeOfBlockToBeSaved = sizeof(*pnUI64); + if (MMReadBlockFromBuffer(FlushInfo)) + { + FlushInfo->pBlockToBeSaved = nullptr; + return 1; + } + } + FlushInfo->pBlockToBeSaved = nullptr; + return 0; +} + +// Appending integers depending on the version. +int MMAppendIntegerDependingOnVersion( + struct MiraMonVectLayerInfo *hMiraMonLayer, struct MM_FLUSH_INFO *FlushInfo, + uint32_t *nUL32, GUInt64 nUI64) +{ + int result; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + { + *nUL32 = (uint32_t)nUI64; + FlushInfo->SizeOfBlockToBeSaved = sizeof(*nUL32); + hMiraMonLayer->OffsetCheck += FlushInfo->SizeOfBlockToBeSaved; + FlushInfo->pBlockToBeSaved = (void *)nUL32; + } + else + { + FlushInfo->SizeOfBlockToBeSaved = sizeof(nUI64); + hMiraMonLayer->OffsetCheck += FlushInfo->SizeOfBlockToBeSaved; + FlushInfo->pBlockToBeSaved = (void *)&nUI64; + } + result = MMAppendBlockToBuffer(FlushInfo); + FlushInfo->pBlockToBeSaved = nullptr; + return result; +} + +/* -------------------------------------------------------------------- */ +/* Layer: Reading and writing layer sections */ +/* This code follows the specifications of the following document: */ +/* https://www.miramon.cat/new_note/usa/notes/ \ */ +/* FormatFitxersTopologicsMiraMon.pdf */ +/* -------------------------------------------------------------------- */ +int MMReadAHArcSection(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + MM_INTERNAL_FID iElem, nElem; + struct MM_FLUSH_INFO FlushTMP; + char *pBuffer = nullptr; + MM_FILE_OFFSET nBlockSize; + struct MiraMonArcLayer *pMMArcLayer; + MM_N_VERTICES_TYPE nElementCount; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + { + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + nElem = hMiraMonLayer->MMPolygon.TopArcHeader.nElemCount; + } + else + { + pMMArcLayer = &hMiraMonLayer->MMArc; + nElem = hMiraMonLayer->TopHeader.nElemCount; + } + + if (MMCheckSize_t(nElem, pMMArcLayer->nSizeArcHeader)) + { + return 1; + } + + nBlockSize = nElem * (pMMArcLayer->nSizeArcHeader); + + if (MMInitFlush(&FlushTMP, pMMArcLayer->pF, nBlockSize, &pBuffer, + hMiraMonLayer->nHeaderDiskSize, 0)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockWhereToSaveOrRead = (void *)pBuffer; + if (MMReadFlush(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + for (iElem = 0; iElem < nElem; iElem++) + { + // Bounding box + FlushTMP.pBlockToBeSaved = + (void *)&(pMMArcLayer->pArcHeader[iElem].dfBB.dfMinX); + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->pArcHeader[iElem].dfBB.dfMinX); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->pArcHeader[iElem].dfBB.dfMaxX; + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->pArcHeader[iElem].dfBB.dfMaxX); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->pArcHeader[iElem].dfBB.dfMinY; + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->pArcHeader[iElem].dfBB.dfMinY); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->pArcHeader[iElem].dfBB.dfMaxY; + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->pArcHeader[iElem].dfBB.dfMaxY); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Element count: number of vertices of the arc + nElementCount = pMMArcLayer->pArcHeader[iElem].nElemCount; + if (MMReadGUInt64DependingOnVersion(hMiraMonLayer, &FlushTMP, + &nElementCount)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + pMMArcLayer->pArcHeader[iElem].nElemCount = nElementCount; + + // Offset: offset of the first vertice of the arc + if (MMReadOffsetDependingOnVersion( + hMiraMonLayer, &FlushTMP, + &pMMArcLayer->pArcHeader[iElem].nOffset)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + // First node: first node of the arc + if (MMReadGUInt64DependingOnVersion( + hMiraMonLayer, &FlushTMP, + &pMMArcLayer->pArcHeader[iElem].nFirstIdNode)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + // Last node: first node of the arc + if (MMReadGUInt64DependingOnVersion( + hMiraMonLayer, &FlushTMP, + &pMMArcLayer->pArcHeader[iElem].nLastIdNode)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + // Length of the arc + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->pArcHeader[iElem].dfLength; + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->pArcHeader[iElem].dfLength); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + + if (pBuffer) + free_function(pBuffer); + return 0; +} + +int MMWriteAHArcSection(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET DiskOffset) +{ + MM_INTERNAL_FID iElem; + struct MM_FLUSH_INFO FlushTMP; + char *pBuffer = nullptr; + uint32_t nUL32; + MM_FILE_OFFSET nOffsetDiff; + struct MiraMonArcLayer *pMMArcLayer; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + else + pMMArcLayer = &hMiraMonLayer->MMArc; + + nOffsetDiff = + hMiraMonLayer->nHeaderDiskSize + + hMiraMonLayer->nFinalElemCount * (pMMArcLayer->nSizeArcHeader); + + if (MMInitFlush(&FlushTMP, pMMArcLayer->pF, MM_1MB, &pBuffer, DiskOffset, + 0)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.pBlockWhereToSaveOrRead = (void *)pBuffer; + for (iElem = 0; iElem < hMiraMonLayer->nFinalElemCount; iElem++) + { + // Bounding box + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->pArcHeader[iElem].dfBB.dfMinX); + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->pArcHeader[iElem].dfBB.dfMinX; + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->pArcHeader[iElem].dfBB.dfMaxX; + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->pArcHeader[iElem].dfBB.dfMinY; + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->pArcHeader[iElem].dfBB.dfMaxY; + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Element count: number of vertices of the arc + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + pMMArcLayer->pArcHeader[iElem].nElemCount)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Offset: offset of the first vertice of the arc + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + pMMArcLayer->pArcHeader[iElem].nOffset + nOffsetDiff)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + // First node: first node of the arc + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + pMMArcLayer->pArcHeader[iElem].nFirstIdNode)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + // Last node: first node of the arc + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + pMMArcLayer->pArcHeader[iElem].nLastIdNode)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + // Length of the arc + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->pArcHeader[iElem].dfLength); + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->pArcHeader[iElem].dfLength; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + FlushTMP.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + if (pBuffer) + free_function(pBuffer); + return 0; +} + +#ifdef JUST_IN_CASE_WE_NEED_IT_SOMEDAY +static int MMReadNHNodeSection(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + MM_INTERNAL_FID iElem, nElem; + struct MM_FLUSH_INFO FlushTMP; + char *pBuffer = nullptr; + MM_FILE_OFFSET nBlockSize; + struct MiraMonArcLayer *pMMArcLayer; + + if (hMiraMonLayer->bIsPolygon) + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + else + pMMArcLayer = &hMiraMonLayer->MMArc; + + nElem = pMMArcLayer->TopNodeHeader.nElemCount; + + nBlockSize = nElem * pMMArcLayer->MMNode.nSizeNodeHeader; + + if (MMInitFlush(&FlushTMP, pMMArcLayer->MMNode.pF, nBlockSize, &pBuffer, + hMiraMonLayer->nHeaderDiskSize, 0)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.pBlockWhereToSaveOrRead = (void *)pBuffer; + if (MMReadFlush(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + for (iElem = 0; iElem < nElem; iElem++) + { + // Arcs count + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->MMNode.pNodeHeader[iElem].nArcsCount; + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->MMNode.pNodeHeader[iElem].nArcsCount); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + // Node type + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->MMNode.pNodeHeader[iElem].cNodeType; + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->MMNode.pNodeHeader[iElem].cNodeType); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.SizeOfBlockToBeSaved = 1; + FlushTMP.pBlockToBeSaved = (void *)nullptr; + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Offset: offset of the first arc to the node + if (MMReadOffsetDependingOnVersion( + hMiraMonLayer, &FlushTMP, + &pMMArcLayer->MMNode.pNodeHeader[iElem].nOffset)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + + if (pBuffer) + free_function(pBuffer); + return 0; +} +#endif // JUST_IN_CASE_WE_NEED_IT_SOMEDAY + +int MMWriteNHNodeSection(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET DiskOffset) +{ + MM_INTERNAL_FID iElem; + struct MM_FLUSH_INFO FlushTMP; + char *pBuffer = nullptr; + uint32_t nUL32; + MM_FILE_OFFSET nOffsetDiff; + struct MiraMonArcLayer *pMMArcLayer; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + else + pMMArcLayer = &hMiraMonLayer->MMArc; + + nOffsetDiff = hMiraMonLayer->nHeaderDiskSize + + (pMMArcLayer->TopNodeHeader.nElemCount * + pMMArcLayer->MMNode.nSizeNodeHeader); + + if (MMInitFlush(&FlushTMP, pMMArcLayer->MMNode.pF, MM_1MB, &pBuffer, + DiskOffset, 0)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.pBlockWhereToSaveOrRead = (void *)pBuffer; + for (iElem = 0; iElem < pMMArcLayer->TopNodeHeader.nElemCount; iElem++) + { + // Arcs count + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->MMNode.pNodeHeader[iElem].nArcsCount); + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->MMNode.pNodeHeader[iElem].nArcsCount; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + // Node type + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMArcLayer->MMNode.pNodeHeader[iElem].cNodeType); + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + FlushTMP.pBlockToBeSaved = + (void *)&pMMArcLayer->MMNode.pNodeHeader[iElem].cNodeType; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.SizeOfBlockToBeSaved = 1; + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + FlushTMP.pBlockToBeSaved = (void *)nullptr; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Offset: offset of the first arc to the node + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + pMMArcLayer->MMNode.pNodeHeader[iElem].nOffset + nOffsetDiff)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + FlushTMP.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + if (pBuffer) + free_function(pBuffer); + return 0; +} + +int MMReadPHPolygonSection(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + MM_INTERNAL_FID iElem; + struct MM_FLUSH_INFO FlushTMP; + char *pBuffer = nullptr; + MM_FILE_OFFSET nBlockSize; + struct MiraMonPolygonLayer *pMMPolygonLayer; + + if (!hMiraMonLayer) + return 1; + + pMMPolygonLayer = &hMiraMonLayer->MMPolygon; + + if (MMCheckSize_t(hMiraMonLayer->TopHeader.nElemCount, + pMMPolygonLayer->nPHElementSize) || + MMCheckSize_t(hMiraMonLayer->MMPolygon.TopArcHeader.nElemCount, + hMiraMonLayer->MMPolygon.nPSElementSize)) + { + return 1; + } + nBlockSize = + hMiraMonLayer->TopHeader.nElemCount * (pMMPolygonLayer->nPHElementSize); + + if (MMInitFlush(&FlushTMP, pMMPolygonLayer->pF, nBlockSize, &pBuffer, + hMiraMonLayer->nHeaderDiskSize + + (hMiraMonLayer->MMPolygon.TopArcHeader.nElemCount * + hMiraMonLayer->MMPolygon.nPSElementSize), + 0)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockWhereToSaveOrRead = (void *)pBuffer; + if (MMReadFlush(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + for (iElem = 0; iElem < hMiraMonLayer->TopHeader.nElemCount; iElem++) + { + // Bounding box + FlushTMP.pBlockToBeSaved = + (void *)&(pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMinX); + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMinX); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMaxX; + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMaxX); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMinY; + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMinY); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMaxY; + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMaxY); + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Arcs count: number of arcs of the polygon + if (MMReadGUInt64DependingOnVersion( + hMiraMonLayer, &FlushTMP, + &pMMPolygonLayer->pPolHeader[iElem].nArcsCount)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // External arcs count: number of external arcs of the polygon + if (MMReadGUInt64DependingOnVersion( + hMiraMonLayer, &FlushTMP, + &pMMPolygonLayer->pPolHeader[iElem].nExternalRingsCount)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Rings count: number of rings of the polygon + if (MMReadGUInt64DependingOnVersion( + hMiraMonLayer, &FlushTMP, + &pMMPolygonLayer->pPolHeader[iElem].nRingsCount)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Offset: offset of the first vertex of the arc + if (MMReadOffsetDependingOnVersion( + hMiraMonLayer, &FlushTMP, + &pMMPolygonLayer->pPolHeader[iElem].nOffset)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Perimeter of the arc + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMPolygonLayer->pPolHeader[iElem].dfPerimeter); + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfPerimeter; + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Area of the arc + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMPolygonLayer->pPolHeader[iElem].dfArea); + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfArea; + if (MMReadBlockFromBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + + if (pBuffer) + free_function(pBuffer); + return 0; +} + +int MMWritePHPolygonSection(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET DiskOffset) +{ + MM_INTERNAL_FID iElem; + struct MM_FLUSH_INFO FlushTMP; + char *pBuffer = nullptr; + uint32_t nUL32; + MM_FILE_OFFSET nOffsetDiff; + struct MiraMonPolygonLayer *pMMPolygonLayer; + + if (!hMiraMonLayer) + return 1; + + pMMPolygonLayer = &hMiraMonLayer->MMPolygon; + + if (!pMMPolygonLayer->pF) + return 0; + + if (!hMiraMonLayer->nFinalElemCount) + return 0; + + nOffsetDiff = DiskOffset + hMiraMonLayer->TopHeader.nElemCount * + (pMMPolygonLayer->nPHElementSize); + + if (MMInitFlush(&FlushTMP, pMMPolygonLayer->pF, MM_1MB, &pBuffer, + DiskOffset, 0)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + FlushTMP.pBlockWhereToSaveOrRead = (void *)pBuffer; + for (iElem = 0; iElem < hMiraMonLayer->nFinalElemCount; iElem++) + { + // Bounding box + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMinX); + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMinX; + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMaxX; + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMinY; + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfBB.dfMaxY; + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Arcs count: number of the arcs of the polygon + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + pMMPolygonLayer->pPolHeader[iElem].nArcsCount)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // External arcs count: number of external arcs of the polygon + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + pMMPolygonLayer->pPolHeader[iElem].nExternalRingsCount)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Rings count: number of rings of the polygon + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + pMMPolygonLayer->pPolHeader[iElem].nRingsCount)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Offset: offset of the first vertex of the arc + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, &FlushTMP, &nUL32, + pMMPolygonLayer->pPolHeader[iElem].nOffset + nOffsetDiff)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Perimeter of the arc + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMPolygonLayer->pPolHeader[iElem].dfPerimeter); + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfPerimeter; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + // Area of the arc + FlushTMP.SizeOfBlockToBeSaved = + sizeof(pMMPolygonLayer->pPolHeader[iElem].dfArea); + hMiraMonLayer->OffsetCheck += FlushTMP.SizeOfBlockToBeSaved; + FlushTMP.pBlockToBeSaved = + (void *)&pMMPolygonLayer->pPolHeader[iElem].dfArea; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + } + FlushTMP.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&FlushTMP)) + { + if (pBuffer) + free_function(pBuffer); + return 1; + } + + if (pBuffer) + free_function(pBuffer); + return 0; +} + +/* -------------------------------------------------------------------- */ +/* Feature Functions */ +/* -------------------------------------------------------------------- */ +int MMInitFeature(struct MiraMonFeature *hMMFeature) +{ + memset(hMMFeature, 0, sizeof(*hMMFeature)); + + hMMFeature->nMaxMRecords = MM_INIT_NUMBER_OF_RECORDS; + if (MMCheckSize_t(hMMFeature->nMaxMRecords, + sizeof(*(hMMFeature->pRecords)))) + return 1; + + if (!hMMFeature->nMaxMRecords) + return 0; // No elements nothing to do. + + if ((hMMFeature->pRecords = + calloc_function((size_t)hMMFeature->nMaxMRecords * + sizeof(*(hMMFeature->pRecords)))) == nullptr) + return 1; + + hMMFeature->pRecords[0].nMaxField = MM_INIT_NUMBER_OF_FIELDS; + hMMFeature->pRecords[0].nNumField = 0; + if (MMCheckSize_t(hMMFeature->pRecords[0].nMaxField, + sizeof(*(hMMFeature->pRecords[0].pField)))) + return 1; + if (nullptr == (hMMFeature->pRecords[0].pField = calloc_function( + (size_t)hMMFeature->pRecords[0].nMaxField * + sizeof(*(hMMFeature->pRecords[0].pField))))) + return 1; + + return 0; +} + +// Conserves all allocated memory but resets the information +void MMResetFeatureGeometry(struct MiraMonFeature *hMMFeature) +{ + if (hMMFeature->pNCoordRing) + { + memset(hMMFeature->pNCoordRing, 0, + (size_t)hMMFeature->nMaxpNCoordRing * + sizeof(*(hMMFeature->pNCoordRing))); + } + if (hMMFeature->pCoord) + { + memset(hMMFeature->pCoord, 0, + (size_t)hMMFeature->nMaxpCoord * sizeof(*(hMMFeature->pCoord))); + } + hMMFeature->nICoord = 0; + if (hMMFeature->pZCoord) + { + memset(hMMFeature->pZCoord, 0, + (size_t)hMMFeature->nMaxpZCoord * + sizeof(*(hMMFeature->pZCoord))); + } + hMMFeature->nNRings = 0; + hMMFeature->nIRing = 0; + + if (hMMFeature->flag_VFG) + { + memset(hMMFeature->flag_VFG, 0, + (size_t)hMMFeature->nMaxVFG * sizeof(*(hMMFeature->flag_VFG))); + } +} + +// Preserves all allocated memory but initializes it to zero. +void MMResetFeatureRecord(struct MiraMonFeature *hMMFeature) +{ + MM_EXT_DBF_N_MULTIPLE_RECORDS nIRecord; + MM_EXT_DBF_N_FIELDS nIField; + + if (!hMMFeature->pRecords) + return; + + for (nIRecord = 0; nIRecord < hMMFeature->nMaxMRecords; nIRecord++) + { + if (!hMMFeature->pRecords[nIRecord].pField) + continue; + for (nIField = 0; nIField < hMMFeature->pRecords[nIRecord].nMaxField; + nIField++) + { + if (hMMFeature->pRecords[nIRecord].pField[nIField].pDinValue) + *(hMMFeature->pRecords[nIRecord].pField[nIField].pDinValue) = + '\0'; + hMMFeature->pRecords[nIRecord].pField[nIField].bIsValid = 0; + } + } +} + +// Destroys all allocated memory +void MMDestroyFeature(struct MiraMonFeature *hMMFeature) +{ + if (hMMFeature->pCoord) + { + free_function(hMMFeature->pCoord); + hMMFeature->pCoord = nullptr; + } + if (hMMFeature->pZCoord) + { + free_function(hMMFeature->pZCoord); + hMMFeature->pZCoord = nullptr; + } + if (hMMFeature->pNCoordRing) + { + free_function(hMMFeature->pNCoordRing); + hMMFeature->pNCoordRing = nullptr; + } + + if (hMMFeature->flag_VFG) + { + free_function(hMMFeature->flag_VFG); + hMMFeature->flag_VFG = nullptr; + } + + if (hMMFeature->pRecords) + { + MM_EXT_DBF_N_MULTIPLE_RECORDS nIRecord; + MM_EXT_DBF_N_FIELDS nIField; + + for (nIRecord = 0; nIRecord < hMMFeature->nMaxMRecords; nIRecord++) + { + if (!hMMFeature->pRecords[nIRecord].pField) + continue; + for (nIField = 0; + nIField < hMMFeature->pRecords[nIRecord].nMaxField; nIField++) + { + if (hMMFeature->pRecords[nIRecord].pField[nIField].pDinValue) + free_function(hMMFeature->pRecords[nIRecord] + .pField[nIField] + .pDinValue); + } + free_function(hMMFeature->pRecords[nIRecord].pField); + } + free_function(hMMFeature->pRecords); + hMMFeature->pRecords = nullptr; + } + + hMMFeature->nNRings = 0; + hMMFeature->nNumMRecords = 0; + hMMFeature->nMaxMRecords = 0; +} + +// Creates a MiraMon polygon, multipolygon, or linestring (arc) feature. +static int MMCreateFeaturePolOrArc(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature) +{ + double *pZ = nullptr; + struct MM_POINT_2D *pCoord, *pCoordReal; + MM_POLYGON_RINGS_COUNT nIPart; + MM_N_VERTICES_TYPE nIVertice; + double dtempx, dtempy; + MM_POLYGON_RINGS_COUNT nExternalRingsCount; + struct MM_PH *pCurrentPolHeader = nullptr; + struct MM_AH *pCurrentArcHeader; + // To access how many points have been stored in the last stringline + struct MM_AH *pLastArcHeader = nullptr; + struct MM_NH *pCurrentNodeHeader, *pCurrentNodeHeaderPlus1 = nullptr; + uint32_t UnsignedLongNumber; + struct MiraMonArcLayer *pMMArc; + struct MiraMonNodeLayer *pMMNode; + struct MM_TH *pArcTopHeader; + struct MM_TH *pNodeTopHeader; + char VFG = 0; + MM_FILE_OFFSET nOffsetTmp; + struct MM_ZD *pZDesc = nullptr; + struct MM_FLUSH_INFO *pFlushAL, *pFlushNL, *pFlushZL, *pFlushPS, *pFlushPAL; + MM_N_VERTICES_TYPE nPolVertices = 0; + MM_BOOLEAN bReverseArc; + int prevCoord = -1; + + if (!hMiraMonLayer) + return MM_FATAL_ERROR_WRITING_FEATURES; + + if (!hMMFeature) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // Setting pointer to 3D structure (if exists). + if (hMiraMonLayer->TopHeader.bIs3d) + pZ = hMMFeature->pZCoord; + + // Setting pointers to arc/node structures. + if (hMiraMonLayer->bIsPolygon) + { + pMMArc = &hMiraMonLayer->MMPolygon.MMArc; + pArcTopHeader = &hMiraMonLayer->MMPolygon.TopArcHeader; + + pMMNode = &hMiraMonLayer->MMPolygon.MMArc.MMNode; + pNodeTopHeader = &hMiraMonLayer->MMPolygon.MMArc.TopNodeHeader; + } + else + { + pMMArc = &hMiraMonLayer->MMArc; + pArcTopHeader = &hMiraMonLayer->TopHeader; + + pMMNode = &hMiraMonLayer->MMArc.MMNode; + pNodeTopHeader = &hMiraMonLayer->MMArc.TopNodeHeader; + } + + // Setting pointers to polygon structures + if (hMiraMonLayer->bIsPolygon) + { + if (MMResizePolHeaderPointer(&hMiraMonLayer->MMPolygon.pPolHeader, + &hMiraMonLayer->MMPolygon.nMaxPolHeader, + hMiraMonLayer->TopHeader.nElemCount, + MM_INCR_NUMBER_OF_POLYGONS, 0)) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizePolHeaderPointer())"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + pCurrentPolHeader = hMiraMonLayer->MMPolygon.pPolHeader + + hMiraMonLayer->TopHeader.nElemCount; + MMInitBoundingBox(&pCurrentPolHeader->dfBB); + + pCurrentPolHeader->dfPerimeter = 0; + pCurrentPolHeader->dfArea = 0L; + } + + // Setting flushes to all sections described in + // format specifications document. + pFlushAL = &pMMArc->FlushAL; + pFlushNL = &pMMNode->FlushNL; + pFlushZL = &pMMArc->pZSection.FlushZL; + pFlushPS = &hMiraMonLayer->MMPolygon.FlushPS; + pFlushPAL = &hMiraMonLayer->MMPolygon.FlushPAL; + + pFlushNL->pBlockWhereToSaveOrRead = (void *)pMMNode->pNL; + pFlushAL->pBlockWhereToSaveOrRead = (void *)pMMArc->pAL; + if (hMiraMonLayer->TopHeader.bIs3d) + pFlushZL->pBlockWhereToSaveOrRead = (void *)pMMArc->pZSection.pZL; + if (hMiraMonLayer->bIsPolygon) + { + pFlushPS->pBlockWhereToSaveOrRead = + (void *)hMiraMonLayer->MMPolygon.pPS; + pFlushPAL->pBlockWhereToSaveOrRead = + (void *)hMiraMonLayer->MMPolygon.pPAL; + } + + // Creation of the MiraMon extended database + if (!hMiraMonLayer->bIsPolygon) + { + if (hMiraMonLayer->TopHeader.nElemCount == 0) + { + MMCPLDebug("MiraMon", "Creating MiraMon database"); + if (MMCreateMMDB(hMiraMonLayer)) + return MM_FATAL_ERROR_WRITING_FEATURES; + MMCPLDebug("MiraMon", "MiraMon database created. " + "Creating features..."); + } + } + else + { // Universal polygon has been created + if (hMiraMonLayer->TopHeader.nElemCount == 1) + { + MMCPLDebug("MiraMon", "Creating MiraMon database"); + if (MMCreateMMDB(hMiraMonLayer)) + return MM_FATAL_ERROR_WRITING_FEATURES; + MMCPLDebug("MiraMon", "MiraMon database created. " + "Creating features..."); + + // Universal polygon have a record with ID_GRAFIC=0 and blancs + if (MMAddPolygonRecordToMMDB(hMiraMonLayer, nullptr, 0, 0, nullptr)) + return MM_FATAL_ERROR_WRITING_FEATURES; + } + } + + // Checking if its possible continue writing the file due + // to version limitations. + if (hMiraMonLayer->LayerVersion == MM_32BITS_VERSION) + { + MM_FILE_OFFSET nNodeOffset, nArcOffset; + MM_INTERNAL_FID nArcElemCount, nNodeElemCount; + nNodeOffset = pFlushNL->TotalSavedBytes + pFlushNL->nNumBytes; + nArcOffset = pMMArc->nOffsetArc; + + nArcElemCount = pArcTopHeader->nElemCount; + nNodeElemCount = pNodeTopHeader->nElemCount; + for (nIPart = 0; nIPart < hMMFeature->nNRings; nIPart++, + nArcElemCount++, + nNodeElemCount += (hMiraMonLayer->bIsPolygon ? 1 : 2)) + { + // There is space for the element that is going to be written? + // Polygon or arc + if (MMCheckVersionForFID(hMiraMonLayer, + hMiraMonLayer->TopHeader.nElemCount)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in MMCheckVersionForFID() (1)"); + return MM_STOP_WRITING_FEATURES; + } + + // Arc if there is no polygon + if (MMCheckVersionForFID(hMiraMonLayer, nArcElemCount)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in MMCheckVersionForFID() (2)"); + return MM_STOP_WRITING_FEATURES; + } + + // Nodes + if (MMCheckVersionForFID(hMiraMonLayer, nNodeElemCount)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in MMCheckVersionForFID() (3)"); + return MM_STOP_WRITING_FEATURES; + } + + // There is space for the last node(s) that is(are) going to be written? + if (!hMiraMonLayer->bIsPolygon) + { + if (MMCheckVersionForFID(hMiraMonLayer, nNodeElemCount + 1)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in MMCheckVersionForFID() (4)"); + return MM_STOP_WRITING_FEATURES; + } + } + + // Checking offsets + // AL: check the last point + if (MMCheckVersionOffset(hMiraMonLayer, nArcOffset)) + { + MMCPLDebug("MiraMon", "Error in MMCheckVersionOffset() (0)"); + return MM_STOP_WRITING_FEATURES; + } + // Setting next offset + nArcOffset += + (hMMFeature->pNCoordRing[nIPart]) * pMMArc->nALElementSize; + + // NL: check the last node + if (hMiraMonLayer->bIsPolygon) + nNodeOffset += (hMMFeature->nNRings) * MM_SIZE_OF_NL_32BITS; + else + nNodeOffset += (2 * hMMFeature->nNRings) * MM_SIZE_OF_NL_32BITS; + + if (MMCheckVersionOffset(hMiraMonLayer, nNodeOffset)) + { + MMCPLDebug("MiraMon", "Error in MMCheckVersionOffset() (1)"); + return MM_STOP_WRITING_FEATURES; + } + // Setting next offset + nNodeOffset += MM_SIZE_OF_NL_32BITS; + + if (!hMiraMonLayer->bIsPolygon) + { + if (MMCheckVersionOffset(hMiraMonLayer, nNodeOffset)) + { + MMCPLDebug("MiraMon", + "Error in MMCheckVersionOffset() (2)"); + return MM_STOP_WRITING_FEATURES; + } + // Setting next offset + nNodeOffset += MM_SIZE_OF_NL_32BITS; + } + + // Where 3D part is going to start + if (hMiraMonLayer->TopHeader.bIs3d) + { + nArcOffset += + hMMFeature->pNCoordRing[nIPart] * pMMArc->nALElementSize; + if (MMCheckVersionFor3DOffset( + hMiraMonLayer, nArcOffset, + hMiraMonLayer->TopHeader.nElemCount + + hMMFeature->nNRings)) + { + MMCPLDebug("MiraMon", + "Error in MMCheckVersionFor3DOffset()"); + return MM_STOP_WRITING_FEATURES; + } + } + } + } + + // Going through parts of the feature. + nExternalRingsCount = 0; + pCoord = hMMFeature->pCoord; + + if (!pCoord) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // Doing real job + for (nIPart = 0; nIPart < hMMFeature->nNRings; nIPart++, + pArcTopHeader->nElemCount++, + pNodeTopHeader->nElemCount += (hMiraMonLayer->bIsPolygon ? 1 : 2)) + { + // Resize structures if necessary + if (MMResizeArcHeaderPointer( + &pMMArc->pArcHeader, &pMMArc->nMaxArcHeader, + pArcTopHeader->nElemCount + 1, MM_INCR_NUMBER_OF_ARCS, 0)) + { + MMCPLDebug("MiraMon", "Error in MMResizeArcHeaderPointer()"); + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMCreateFeaturePolOrArc())"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + if (MMResizeNodeHeaderPointer( + &pMMNode->pNodeHeader, &pMMNode->nMaxNodeHeader, + hMiraMonLayer->bIsPolygon ? pNodeTopHeader->nElemCount + 1 + : pNodeTopHeader->nElemCount + 2, + MM_INCR_NUMBER_OF_NODES, 0)) + { + MMCPLDebug("MiraMon", "Error in MMResizeNodeHeaderPointer()"); + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMCreateFeaturePolOrArc())"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + if (hMiraMonLayer->TopHeader.bIs3d) + { + if (MMResizeZSectionDescrPointer( + &pMMArc->pZSection.pZDescription, + &pMMArc->pZSection.nMaxZDescription, pMMArc->nMaxArcHeader, + MM_INCR_NUMBER_OF_ARCS, 0)) + { + MMCPLDebug("MiraMon", + "Error in MMResizeZSectionDescrPointer()"); + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMCreateFeaturePolOrArc())"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + pZDesc = pMMArc->pZSection.pZDescription; + } + + // Setting pointers to current headers + pCurrentArcHeader = pMMArc->pArcHeader + pArcTopHeader->nElemCount; + MMInitBoundingBox(&pCurrentArcHeader->dfBB); + + pCurrentNodeHeader = pMMNode->pNodeHeader + pNodeTopHeader->nElemCount; + if (!hMiraMonLayer->bIsPolygon) + pCurrentNodeHeaderPlus1 = pCurrentNodeHeader + 1; + + // Initializing feature information (section AH/PH) + pCurrentArcHeader->nElemCount = hMMFeature->pNCoordRing[nIPart]; + pCurrentArcHeader->dfLength = 0.0; + pCurrentArcHeader->nOffset = + pFlushAL->TotalSavedBytes + pFlushAL->nNumBytes; + + // Dumping vertices and calculating stuff that + // MiraMon needs (longitude/perimeter, area) + bReverseArc = FALSE; + if (hMiraMonLayer->bIsPolygon) + { + VFG = hMMFeature->flag_VFG[nIPart]; + bReverseArc = (VFG & MM_ROTATE_ARC) ? TRUE : FALSE; + } + + if (bReverseArc) + { + prevCoord = 1; // to find previous coordinate + pCoordReal = pCoord + pCurrentArcHeader->nElemCount - 1; + } + else + { + prevCoord = -1; // to find previous coordinate + pCoordReal = pCoord; + } + + for (nIVertice = 0; nIVertice < pCurrentArcHeader->nElemCount; + nIVertice++, (bReverseArc) ? pCoordReal-- : pCoordReal++) + { + // Writing the arc in the normal way + pFlushAL->SizeOfBlockToBeSaved = sizeof(pCoordReal->dfX); + pFlushAL->pBlockToBeSaved = (void *)&(pCoord + nIVertice)->dfX; + if (MMAppendBlockToBuffer(pFlushAL)) + { + MMCPLDebug("MiraMon", "Error in MMAppendBlockToBuffer() (1)"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + pFlushAL->pBlockToBeSaved = (void *)&(pCoord + nIVertice)->dfY; + if (MMAppendBlockToBuffer(pFlushAL)) + { + MMCPLDebug("MiraMon", "Error in MMAppendBlockToBuffer() (2)"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // Calculating stuff using the inverse coordinates if it's needed + MMUpdateBoundingBoxXY(&pCurrentArcHeader->dfBB, pCoordReal); + if (nIVertice == 0 || + nIVertice == pCurrentArcHeader->nElemCount - 1) + MMUpdateBoundingBoxXY(&pNodeTopHeader->hBB, pCoordReal); + if (nIVertice > 0) + { + dtempx = pCoordReal->dfX - (pCoordReal + prevCoord)->dfX; + dtempy = pCoordReal->dfY - (pCoordReal + prevCoord)->dfY; + pCurrentArcHeader->dfLength += + sqrt(dtempx * dtempx + dtempy * dtempy); + if (hMiraMonLayer->bIsPolygon && pCurrentPolHeader) + { + pCurrentPolHeader->dfArea += + (pCoordReal->dfX * (pCoordReal + prevCoord)->dfY - + (pCoordReal + prevCoord)->dfX * pCoordReal->dfY); + } + } + } + if (bReverseArc) + pCoord = pCoordReal + pCurrentArcHeader->nElemCount + 1; + else + pCoord += pCurrentArcHeader->nElemCount; + + nPolVertices += pCurrentArcHeader->nElemCount; + + // Updating bounding boxes + MMUpdateBoundingBox(&pArcTopHeader->hBB, &pCurrentArcHeader->dfBB); + if (hMiraMonLayer->bIsPolygon) + MMUpdateBoundingBox(&hMiraMonLayer->TopHeader.hBB, + &pCurrentArcHeader->dfBB); + + pMMArc->nOffsetArc += + (pCurrentArcHeader->nElemCount) * pMMArc->nALElementSize; + + pCurrentArcHeader->nFirstIdNode = (2 * pArcTopHeader->nElemCount); + if (hMiraMonLayer->bIsPolygon) + { + pCurrentArcHeader->nFirstIdNode = pArcTopHeader->nElemCount; + pCurrentArcHeader->nLastIdNode = pArcTopHeader->nElemCount; + } + else + { + pCurrentArcHeader->nFirstIdNode = (2 * pArcTopHeader->nElemCount); + pCurrentArcHeader->nLastIdNode = + (2 * pArcTopHeader->nElemCount + 1); + } + if (MMAddArcRecordToMMDB(hMiraMonLayer, hMMFeature, + pArcTopHeader->nElemCount, pCurrentArcHeader)) + { + MMCPLDebug("MiraMon", "Error in MMAddArcRecordToMMDB()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // Node Stuff: writing NL section + pCurrentNodeHeader->nArcsCount = 1; + if (hMiraMonLayer->bIsPolygon) + pCurrentNodeHeader->cNodeType = MM_RING_NODE; + else + pCurrentNodeHeader->cNodeType = MM_FINAL_NODE; + + pCurrentNodeHeader->nOffset = + pFlushNL->TotalSavedBytes + pFlushNL->nNumBytes; + if (MMAppendIntegerDependingOnVersion(hMiraMonLayer, pFlushNL, + &UnsignedLongNumber, + pArcTopHeader->nElemCount)) + { + MMCPLDebug("MiraMon", + "Error in MMAppendIntegerDependingOnVersion()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // 8bytes alignment + nOffsetTmp = pFlushNL->TotalSavedBytes + pFlushNL->nNumBytes; + MMGetOffsetAlignedTo8(&nOffsetTmp); + if (nOffsetTmp != pFlushNL->TotalSavedBytes + pFlushNL->nNumBytes) + { + pFlushNL->SizeOfBlockToBeSaved = + (size_t)(nOffsetTmp - + (pFlushNL->TotalSavedBytes + pFlushNL->nNumBytes)); + pFlushNL->pBlockToBeSaved = (void *)nullptr; + if (MMAppendBlockToBuffer(pFlushNL)) + { + MMCPLDebug("MiraMon", "Error in MMAppendBlockToBuffer() (3)"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + } + if (MMAddNodeRecordToMMDB(hMiraMonLayer, pNodeTopHeader->nElemCount, + pCurrentNodeHeader)) + { + MMCPLDebug("MiraMon", "Error in MMAddNodeRecordToMMDB()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + if (!hMiraMonLayer->bIsPolygon) + { + pCurrentNodeHeaderPlus1->nArcsCount = 1; + if (hMiraMonLayer->bIsPolygon) + pCurrentNodeHeaderPlus1->cNodeType = MM_RING_NODE; + else + pCurrentNodeHeaderPlus1->cNodeType = MM_FINAL_NODE; + + pCurrentNodeHeaderPlus1->nOffset = + pFlushNL->TotalSavedBytes + pFlushNL->nNumBytes; + + if (MMAppendIntegerDependingOnVersion(hMiraMonLayer, pFlushNL, + &UnsignedLongNumber, + pArcTopHeader->nElemCount)) + { + MMCPLDebug("MiraMon", + "Error in MMAppendIntegerDependingOnVersion()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // 8bytes alignment + nOffsetTmp = pFlushNL->TotalSavedBytes + pFlushNL->nNumBytes; + MMGetOffsetAlignedTo8(&nOffsetTmp); + if (nOffsetTmp != pFlushNL->TotalSavedBytes + pFlushNL->nNumBytes) + { + pFlushNL->SizeOfBlockToBeSaved = + (size_t)(nOffsetTmp - + (pFlushNL->TotalSavedBytes + pFlushNL->nNumBytes)); + pFlushNL->pBlockToBeSaved = (void *)nullptr; + if (MMAppendBlockToBuffer(pFlushNL)) + { + MMCPLDebug("MiraMon", "Error in MMAppendBlockToBuffer()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + } + if (MMAddNodeRecordToMMDB(hMiraMonLayer, + pNodeTopHeader->nElemCount + 1, + pCurrentNodeHeaderPlus1)) + { + MMCPLDebug("MiraMon", "Error in MMAddNodeRecordToMMDB()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + } + + // 3D stuff + if (hMiraMonLayer->TopHeader.bIs3d && pZDesc) + { + pZDesc[pArcTopHeader->nElemCount].dfBBminz = + STATISTICAL_UNDEF_VALUE; + pZDesc[pArcTopHeader->nElemCount].dfBBmaxz = + -STATISTICAL_UNDEF_VALUE; + for (nIVertice = 0; nIVertice < pCurrentArcHeader->nElemCount; + nIVertice++, pZ++) + { + pFlushZL->SizeOfBlockToBeSaved = sizeof(*pZ); + pFlushZL->pBlockToBeSaved = (void *)pZ; + if (MMAppendBlockToBuffer(pFlushZL)) + { + MMCPLDebug("MiraMon", "Error in MMAppendBlockToBuffer()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + if (pZDesc[pArcTopHeader->nElemCount].dfBBminz > *pZ) + pZDesc[pArcTopHeader->nElemCount].dfBBminz = *pZ; + if (pZDesc[pArcTopHeader->nElemCount].dfBBmaxz < *pZ) + pZDesc[pArcTopHeader->nElemCount].dfBBmaxz = *pZ; + } + pZDesc[pArcTopHeader->nElemCount].nZCount = 1; + if (pArcTopHeader->nElemCount == 0) + pZDesc[pArcTopHeader->nElemCount].nOffsetZ = 0; + else + { + pLastArcHeader = + pMMArc->pArcHeader + pArcTopHeader->nElemCount - 1; + pZDesc[pArcTopHeader->nElemCount].nOffsetZ = + pZDesc[pArcTopHeader->nElemCount - 1].nOffsetZ + + sizeof(*pZ) * (pLastArcHeader->nElemCount); + } + } + + // Exclusive polygon stuff + if (hMiraMonLayer->bIsPolygon && pCurrentPolHeader) + { + // PS SECTION + if (MMAppendIntegerDependingOnVersion(hMiraMonLayer, pFlushPS, + &UnsignedLongNumber, 0)) + { + MMCPLDebug("MiraMon", + "Error in MMAppendIntegerDependingOnVersion()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + if (MMAppendIntegerDependingOnVersion( + hMiraMonLayer, pFlushPS, &UnsignedLongNumber, + hMiraMonLayer->TopHeader.nElemCount)) + { + MMCPLDebug("MiraMon", + "Error in MMAppendIntegerDependingOnVersion()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // PAL SECTION + // Vertices of rings defining + // holes in polygons are in a counterclockwise direction. + // Holes are at the end of all external rings that contain the holes!! + if (VFG & MM_EXTERIOR_ARC_SIDE) + nExternalRingsCount++; + + pCurrentPolHeader->nArcsCount++; + //(MM_POLYGON_ARCS_COUNT)hMMFeature->nNRings; + if (VFG & MM_EXTERIOR_ARC_SIDE) + pCurrentPolHeader + ->nExternalRingsCount++; //= nExternalRingsCount; + + if (VFG & MM_END_ARC_IN_RING) + pCurrentPolHeader->nRingsCount++; //= hMMFeature->nNRings; + if (nIPart == 0) + { + pCurrentPolHeader->nOffset = + pFlushPAL->TotalSavedBytes + pFlushPAL->nNumBytes; + } + + if (nIPart == hMMFeature->nNRings - 1) + pCurrentPolHeader->dfArea /= 2; + + pFlushPAL->SizeOfBlockToBeSaved = 1; + pFlushPAL->pBlockToBeSaved = (void *)&VFG; + if (MMAppendBlockToBuffer(pFlushPAL)) + { + MMCPLDebug("MiraMon", "Error in MMAppendBlockToBuffer()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + if (MMAppendIntegerDependingOnVersion(hMiraMonLayer, pFlushPAL, + &UnsignedLongNumber, + pArcTopHeader->nElemCount)) + { + MMCPLDebug("MiraMon", + "Error in MMAppendIntegerDependingOnVersion()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // 8bytes alignment + if (nIPart == hMMFeature->nNRings - 1) + { + nOffsetTmp = pFlushPAL->TotalSavedBytes + pFlushPAL->nNumBytes; + MMGetOffsetAlignedTo8(&nOffsetTmp); + + if (nOffsetTmp != + pFlushPAL->TotalSavedBytes + pFlushPAL->nNumBytes) + { + pFlushPAL->SizeOfBlockToBeSaved = + (size_t)(nOffsetTmp - (pFlushPAL->TotalSavedBytes + + pFlushPAL->nNumBytes)); + pFlushPAL->pBlockToBeSaved = (void *)nullptr; + if (MMAppendBlockToBuffer(pFlushPAL)) + { + MMCPLDebug("MiraMon", + "Error in MMAppendBlockToBuffer()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + } + } + + MMUpdateBoundingBox(&pCurrentPolHeader->dfBB, + &pCurrentArcHeader->dfBB); + pCurrentPolHeader->dfPerimeter += pCurrentArcHeader->dfLength; + } + } + + // Updating element count and if the polygon is multipart. + // MiraMon does not accept multipoints or multilines, only multipolygons. + if (hMiraMonLayer->bIsPolygon) + { + if (MMAddPolygonRecordToMMDB(hMiraMonLayer, hMMFeature, + hMiraMonLayer->TopHeader.nElemCount, + nPolVertices, pCurrentPolHeader)) + { + MMCPLDebug("MiraMon", "Error in MMAddPolygonRecordToMMDB()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + hMiraMonLayer->TopHeader.nElemCount++; + + if (nExternalRingsCount > 1) + hMiraMonLayer->TopHeader.bIsMultipolygon = TRUE; + } + + return MM_CONTINUE_WRITING_FEATURES; +} // End of de MMCreateFeaturePolOrArc() + +// Creates a MiraMon DBF record when not associated with a geometric feature. +static int MMCreateRecordDBF(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature) +{ + int result; + + if (!hMiraMonLayer) + return MM_FATAL_ERROR_WRITING_FEATURES; + + if (hMiraMonLayer->TopHeader.nElemCount == 0) + { + if (MMCreateMMDB(hMiraMonLayer)) + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + result = MMAddDBFRecordToMMDB(hMiraMonLayer, hMMFeature); + if (result == MM_FATAL_ERROR_WRITING_FEATURES || + result == MM_STOP_WRITING_FEATURES) + return result; + + // Everything OK. + return MM_CONTINUE_WRITING_FEATURES; +} // End of de MMCreateRecordDBF() + +// Creates a MiraMon point feature. +static int MMCreateFeaturePoint(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature) +{ + double *pZ = nullptr; + struct MM_POINT_2D *pCoord; + MM_POLYGON_RINGS_COUNT nIPart; + MM_N_VERTICES_TYPE nIVertice, nCoord; + struct MM_ZD *pZDescription = nullptr; + MM_INTERNAL_FID nElemCount; + int result; + + if (!hMiraMonLayer) + return MM_FATAL_ERROR_WRITING_FEATURES; + + if (!hMMFeature) + return MM_STOP_WRITING_FEATURES; + + if (hMiraMonLayer->TopHeader.bIs3d) + pZ = hMMFeature->pZCoord; + + nElemCount = hMiraMonLayer->TopHeader.nElemCount; + for (nIPart = 0, pCoord = hMMFeature->pCoord; nIPart < hMMFeature->nNRings; + nIPart++, nElemCount++) + { + nCoord = hMMFeature->pNCoordRing[nIPart]; + + // Checking if its possible continue writing the file due + // to version limitations. + if (MMCheckVersionForFID(hMiraMonLayer, + hMiraMonLayer->TopHeader.nElemCount + nCoord)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in MMCheckVersionForFID() (5)"); + return MM_STOP_WRITING_FEATURES; + } + + if (hMiraMonLayer->TopHeader.bIs3d) + { + if (nElemCount == 0) + { + if (MMCheckVersionFor3DOffset(hMiraMonLayer, 0, nElemCount + 1)) + return MM_STOP_WRITING_FEATURES; + } + else + { + pZDescription = hMiraMonLayer->MMPoint.pZSection.pZDescription; + if (!pZDescription) + { + MMCPLError(CE_Failure, CPLE_ObjectNull, + "Error: pZDescription should not be nullptr"); + return MM_STOP_WRITING_FEATURES; + } + if (MMCheckVersionFor3DOffset( + hMiraMonLayer, + pZDescription[nElemCount - 1].nOffsetZ + sizeof(*pZ), + nElemCount + 1)) + return MM_STOP_WRITING_FEATURES; + } + } + + // Doing real job + // Memory issues + if (hMiraMonLayer->TopHeader.bIs3d && pZ) + { + if (MMResizeZSectionDescrPointer( + &hMiraMonLayer->MMPoint.pZSection.pZDescription, + &hMiraMonLayer->MMPoint.pZSection.nMaxZDescription, + nElemCount, MM_INCR_NUMBER_OF_POINTS, 0)) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMCreateFeaturePoint())"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + pZDescription = hMiraMonLayer->MMPoint.pZSection.pZDescription; + if (!pZDescription) + { + MMCPLError(CE_Failure, CPLE_ObjectNull, + "Error: pZDescription should not be nullptr"); + return MM_STOP_WRITING_FEATURES; + } + + pZDescription[nElemCount].dfBBminz = *pZ; + pZDescription[nElemCount].dfBBmaxz = *pZ; + pZDescription[nElemCount].nZCount = 1; + if (nElemCount == 0) + pZDescription[nElemCount].nOffsetZ = 0; + else + pZDescription[nElemCount].nOffsetZ = + pZDescription[nElemCount - 1].nOffsetZ + sizeof(*pZ); + } + + // Flush settings + hMiraMonLayer->MMPoint.FlushTL.pBlockWhereToSaveOrRead = + (void *)hMiraMonLayer->MMPoint.pTL; + if (hMiraMonLayer->TopHeader.bIs3d) + hMiraMonLayer->MMPoint.pZSection.FlushZL.pBlockWhereToSaveOrRead = + (void *)hMiraMonLayer->MMPoint.pZSection.pZL; + + // Dump point or points (MiraMon does not have multiple points) + for (nIVertice = 0; nIVertice < nCoord; nIVertice++, pCoord++) + { + // Updating the bounding box of the layer + MMUpdateBoundingBoxXY(&hMiraMonLayer->TopHeader.hBB, pCoord); + + // Adding the point at the memory block + hMiraMonLayer->MMPoint.FlushTL.SizeOfBlockToBeSaved = + sizeof(pCoord->dfX); + hMiraMonLayer->MMPoint.FlushTL.pBlockToBeSaved = + (void *)&pCoord->dfX; + if (MMAppendBlockToBuffer(&hMiraMonLayer->MMPoint.FlushTL)) + return MM_FATAL_ERROR_WRITING_FEATURES; + hMiraMonLayer->MMPoint.FlushTL.pBlockToBeSaved = + (void *)&pCoord->dfY; + if (MMAppendBlockToBuffer(&hMiraMonLayer->MMPoint.FlushTL)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // Adding the 3D part, if exists, at the memory block + if (hMiraMonLayer->TopHeader.bIs3d && pZ) + { + hMiraMonLayer->MMPoint.pZSection.FlushZL.SizeOfBlockToBeSaved = + sizeof(*pZ); + hMiraMonLayer->MMPoint.pZSection.FlushZL.pBlockToBeSaved = + (void *)pZ; + if (MMAppendBlockToBuffer( + &hMiraMonLayer->MMPoint.pZSection.FlushZL)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + if (!pZDescription) + { + MMCPLError(CE_Failure, CPLE_ObjectNull, + "Error: pZDescription should not be nullptr"); + return MM_STOP_WRITING_FEATURES; + } + + if (pZDescription[nElemCount].dfBBminz > *pZ) + pZDescription[nElemCount].dfBBminz = *pZ; + if (pZDescription[nElemCount].dfBBmaxz < *pZ) + pZDescription[nElemCount].dfBBmaxz = *pZ; + + if (hMiraMonLayer->MMPoint.pZSection.ZHeader.dfBBminz > *pZ) + hMiraMonLayer->MMPoint.pZSection.ZHeader.dfBBminz = *pZ; + if (hMiraMonLayer->MMPoint.pZSection.ZHeader.dfBBmaxz < *pZ) + hMiraMonLayer->MMPoint.pZSection.ZHeader.dfBBmaxz = *pZ; + + pZ++; + } + } + + if (hMiraMonLayer->TopHeader.nElemCount == 0) + { + if (MMCreateMMDB(hMiraMonLayer)) + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + result = MMAddPointRecordToMMDB(hMiraMonLayer, hMMFeature, nElemCount); + if (result == MM_FATAL_ERROR_WRITING_FEATURES || + result == MM_STOP_WRITING_FEATURES) + return result; + } + // Updating nElemCount at the header of the layer + hMiraMonLayer->TopHeader.nElemCount = nElemCount; + + // Everything OK. + return MM_CONTINUE_WRITING_FEATURES; +} // End of de MMCreateFeaturePoint() + +// Checks whether a given Feature ID (FID) exceeds the maximum allowed +// index for 2 GB vectors in a specific MiraMon layer. +int MMCheckVersionForFID(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_INTERNAL_FID FID) +{ + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->LayerVersion != MM_32BITS_VERSION) + return 0; + + if (FID >= MAXIMUM_OBJECT_INDEX_IN_2GB_VECTORS) + return 1; + return 0; +} + +// Checks whether a given offset exceeds the maximum allowed +// index for 2 GB vectors in a specific MiraMon layer. +int MMCheckVersionOffset(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET OffsetToCheck) +{ + if (!hMiraMonLayer) + return 1; + + // Checking if the final version is 1.1 or 2.0 + if (hMiraMonLayer->LayerVersion != MM_32BITS_VERSION) + return 0; + + // User decided that if necessary, output version can be 2.0 + if (OffsetToCheck < MAXIMUM_OFFSET_IN_2GB_VECTORS) + return 0; + + return 1; +} + +// Checks whether a given offset in 3D section exceeds the maximum allowed +// index for 2 GB vectors in a specific MiraMon layer. +int MMCheckVersionFor3DOffset(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_FILE_OFFSET nOffset, + MM_INTERNAL_FID nElemCount) +{ + MM_FILE_OFFSET LastOffset; + + if (!hMiraMonLayer) + return 1; + + // Checking if the final version is 1.1 or 2.0 + if (hMiraMonLayer->LayerVersion != MM_32BITS_VERSION) + return 0; + + // User decided that if necessary, output version can be 2.0 + LastOffset = nOffset + MM_HEADER_SIZE_32_BITS + nElemCount * MM_SIZE_OF_TL; + + LastOffset += MM_SIZE_OF_ZH; + LastOffset += nElemCount * MM_SIZE_OF_ZD_32_BITS; + + if (LastOffset < MAXIMUM_OFFSET_IN_2GB_VECTORS) + return 0; + + return 1; +} + +// Adds a feature in a MiraMon layer. +int MMAddFeature(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMiraMonFeature) +{ + int re; + MM_INTERNAL_FID previousFID = 0; + + if (!hMiraMonLayer) + return MM_FATAL_ERROR_WRITING_FEATURES; + + if (!hMiraMonLayer->bIsBeenInit) + { + if (MMInitLayerByType(hMiraMonLayer)) + { + MMCPLDebug("MiraMon", "Error in MMInitLayerByType()"); + return MM_FATAL_ERROR_WRITING_FEATURES; + } + hMiraMonLayer->bIsBeenInit = 1; + } + + if (hMiraMonFeature) + previousFID = hMiraMonLayer->TopHeader.nElemCount; + + if (hMiraMonLayer->bIsPoint) + { + re = LOG_ACTION(MMCreateFeaturePoint(hMiraMonLayer, hMiraMonFeature)); + if (hMiraMonFeature) + { + hMiraMonFeature->nReadFeatures = + hMiraMonLayer->TopHeader.nElemCount - previousFID; + } + return re; + } + if (hMiraMonLayer->bIsArc || hMiraMonLayer->bIsPolygon) + { + re = + LOG_ACTION(MMCreateFeaturePolOrArc(hMiraMonLayer, hMiraMonFeature)); + if (hMiraMonFeature) + { + hMiraMonFeature->nReadFeatures = + hMiraMonLayer->TopHeader.nElemCount - previousFID; + } + return re; + } + if (hMiraMonLayer->bIsDBF) + { + // Adding a record to DBF file + re = LOG_ACTION(MMCreateRecordDBF(hMiraMonLayer, hMiraMonFeature)); + if (hMiraMonFeature) + { + hMiraMonFeature->nReadFeatures = + hMiraMonLayer->TopHeader.nElemCount - previousFID; + } + return re; + } + + return MM_CONTINUE_WRITING_FEATURES; +} + +/* -------------------------------------------------------------------- */ +/* Tools used by MiraMon. */ +/* -------------------------------------------------------------------- */ + +void MMInitBoundingBox(struct MMBoundingBox *dfBB) +{ + if (!dfBB) + return; + dfBB->dfMinX = STATISTICAL_UNDEF_VALUE; + dfBB->dfMaxX = -STATISTICAL_UNDEF_VALUE; + dfBB->dfMinY = STATISTICAL_UNDEF_VALUE; + dfBB->dfMaxY = -STATISTICAL_UNDEF_VALUE; +} + +void MMUpdateBoundingBox(struct MMBoundingBox *dfBBToBeAct, + struct MMBoundingBox *dfBBWithData) +{ + if (!dfBBToBeAct) + return; + + if (dfBBToBeAct->dfMinX > dfBBWithData->dfMinX) + dfBBToBeAct->dfMinX = dfBBWithData->dfMinX; + + if (dfBBToBeAct->dfMinY > dfBBWithData->dfMinY) + dfBBToBeAct->dfMinY = dfBBWithData->dfMinY; + + if (dfBBToBeAct->dfMaxX < dfBBWithData->dfMaxX) + dfBBToBeAct->dfMaxX = dfBBWithData->dfMaxX; + + if (dfBBToBeAct->dfMaxY < dfBBWithData->dfMaxY) + dfBBToBeAct->dfMaxY = dfBBWithData->dfMaxY; +} + +void MMUpdateBoundingBoxXY(struct MMBoundingBox *dfBB, + struct MM_POINT_2D *pCoord) +{ + if (!pCoord) + return; + + if (pCoord->dfX < dfBB->dfMinX) + dfBB->dfMinX = pCoord->dfX; + + if (pCoord->dfY < dfBB->dfMinY) + dfBB->dfMinY = pCoord->dfY; + + if (pCoord->dfX > dfBB->dfMaxX) + dfBB->dfMaxX = pCoord->dfX; + + if (pCoord->dfY > dfBB->dfMaxY) + dfBB->dfMaxY = pCoord->dfY; +} + +/* -------------------------------------------------------------------- */ +/* Resize structures for reuse */ +/* -------------------------------------------------------------------- */ +int MMResizeMiraMonFieldValue(struct MiraMonFieldValue **pFieldValue, + MM_EXT_DBF_N_MULTIPLE_RECORDS *nMax, + MM_EXT_DBF_N_MULTIPLE_RECORDS nNum, + MM_EXT_DBF_N_MULTIPLE_RECORDS nIncr, + MM_EXT_DBF_N_MULTIPLE_RECORDS nProposedMax) +{ + MM_EXT_DBF_N_MULTIPLE_RECORDS nPrevMax; + MM_EXT_DBF_N_MULTIPLE_RECORDS nNewMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pFieldValue))) + { + return 1; + } + if ((pTmp = realloc_function( + *pFieldValue, (size_t)nNewMax * sizeof(**pFieldValue))) == nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeMiraMonFieldValue())"); + return 1; + } + *nMax = nNewMax; + *pFieldValue = pTmp; + + memset((*pFieldValue) + nPrevMax, 0, + (size_t)(*nMax - nPrevMax) * sizeof(**pFieldValue)); + return 0; +} + +int MMResizeMiraMonPolygonArcs(struct MM_PAL_MEM **pFID, + MM_POLYGON_ARCS_COUNT *nMax, + MM_POLYGON_ARCS_COUNT nNum, + MM_POLYGON_ARCS_COUNT nIncr, + MM_POLYGON_ARCS_COUNT nProposedMax) +{ + MM_POLYGON_ARCS_COUNT nPrevMax; + MM_POLYGON_ARCS_COUNT nNewMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pFID))) + { + return 1; + } + if (nNewMax == 0 && *pFID) + return 0; + if ((pTmp = realloc_function(*pFID, (size_t)nNewMax * sizeof(**pFID))) == + nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeMiraMonPolygonArcs())"); + return 1; + } + *nMax = nNewMax; + *pFID = pTmp; + + memset((*pFID) + nPrevMax, 0, (size_t)(*nMax - nPrevMax) * sizeof(**pFID)); + return 0; +} + +int MMResizeMiraMonRecord(struct MiraMonRecord **pMiraMonRecord, + MM_EXT_DBF_N_MULTIPLE_RECORDS *nMax, + MM_EXT_DBF_N_MULTIPLE_RECORDS nNum, + MM_EXT_DBF_N_MULTIPLE_RECORDS nIncr, + MM_EXT_DBF_N_MULTIPLE_RECORDS nProposedMax) +{ + MM_EXT_DBF_N_MULTIPLE_RECORDS nPrevMax; + MM_EXT_DBF_N_MULTIPLE_RECORDS nNewMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pMiraMonRecord))) + { + return 1; + } + if (nNewMax == 0 && *pMiraMonRecord) + return 0; + if ((pTmp = realloc_function(*pMiraMonRecord, + (size_t)nNewMax * sizeof(**pMiraMonRecord))) == + nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeMiraMonRecord())"); + return 1; + } + *nMax = nNewMax; + *pMiraMonRecord = pTmp; + + memset((*pMiraMonRecord) + nPrevMax, 0, + (size_t)(*nMax - nPrevMax) * sizeof(**pMiraMonRecord)); + return 0; +} + +int MMResizeZSectionDescrPointer(struct MM_ZD **pZDescription, GUInt64 *nMax, + GUInt64 nNum, GUInt64 nIncr, + GUInt64 nProposedMax) +{ + GUInt64 nNewMax, nPrevMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pZDescription))) + { + return 1; + } + if (nNewMax == 0 && *pZDescription) + return 0; + if ((pTmp = realloc_function(*pZDescription, + (size_t)nNewMax * sizeof(**pZDescription))) == + nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeZSectionDescrPointer())"); + return 1; + } + *nMax = nNewMax; + *pZDescription = pTmp; + + memset((*pZDescription) + nPrevMax, 0, + (size_t)(*nMax - nPrevMax) * sizeof(**pZDescription)); + return 0; +} + +int MMResizeNodeHeaderPointer(struct MM_NH **pNodeHeader, GUInt64 *nMax, + GUInt64 nNum, GUInt64 nIncr, GUInt64 nProposedMax) +{ + GUInt64 nNewMax, nPrevMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pNodeHeader))) + { + return 1; + } + if (nNewMax == 0 && *pNodeHeader) + return 0; + if ((pTmp = realloc_function( + *pNodeHeader, (size_t)nNewMax * sizeof(**pNodeHeader))) == nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeNodeHeaderPointer())"); + return 1; + } + *nMax = nNewMax; + *pNodeHeader = pTmp; + + memset((*pNodeHeader) + nPrevMax, 0, + (size_t)(*nMax - nPrevMax) * sizeof(**pNodeHeader)); + return 0; +} + +int MMResizeArcHeaderPointer(struct MM_AH **pArcHeader, GUInt64 *nMax, + GUInt64 nNum, GUInt64 nIncr, GUInt64 nProposedMax) +{ + GUInt64 nNewMax, nPrevMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pArcHeader))) + { + return 1; + } + if (nNewMax == 0 && *pArcHeader) + return 0; + if ((pTmp = realloc_function( + *pArcHeader, (size_t)nNewMax * sizeof(**pArcHeader))) == nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeArcHeaderPointer())"); + return 1; + } + *nMax = nNewMax; + *pArcHeader = pTmp; + + memset((*pArcHeader) + nPrevMax, 0, + (size_t)(*nMax - nPrevMax) * sizeof(**pArcHeader)); + return 0; +} + +int MMResizePolHeaderPointer(struct MM_PH **pPolHeader, GUInt64 *nMax, + GUInt64 nNum, GUInt64 nIncr, GUInt64 nProposedMax) +{ + GUInt64 nNewMax, nPrevMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pPolHeader))) + { + return 1; + } + if (nNewMax == 0 && *pPolHeader) + return 0; + if ((pTmp = realloc_function( + *pPolHeader, (size_t)nNewMax * sizeof(**pPolHeader))) == nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizePolHeaderPointer())"); + return 1; + } + *nMax = nNewMax; + *pPolHeader = pTmp; + + memset((*pPolHeader) + nPrevMax, 0, + (size_t)(*nMax - nPrevMax) * sizeof(**pPolHeader)); + return 0; +} + +int MMResize_MM_N_VERTICES_TYPE_Pointer(MM_N_VERTICES_TYPE **pVrt, + MM_N_VERTICES_TYPE *nMax, + MM_N_VERTICES_TYPE nNum, + MM_N_VERTICES_TYPE nIncr, + MM_N_VERTICES_TYPE nProposedMax) +{ + MM_N_VERTICES_TYPE nNewMax, nPrevMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pVrt))) + { + return 1; + } + if (nNewMax == 0 && *pVrt) + return 0; + if ((pTmp = realloc_function(*pVrt, (size_t)nNewMax * sizeof(**pVrt))) == + nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResize_MM_N_VERTICES_TYPE_Pointer())"); + return 1; + } + *nMax = nNewMax; + *pVrt = pTmp; + + memset((*pVrt) + nPrevMax, 0, (size_t)(*nMax - nPrevMax) * sizeof(**pVrt)); + return 0; +} + +int MMResizeVFGPointer(char **pInt, MM_INTERNAL_FID *nMax, MM_INTERNAL_FID nNum, + MM_INTERNAL_FID nIncr, MM_INTERNAL_FID nProposedMax) +{ + MM_N_VERTICES_TYPE nNewMax, nPrevMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pInt))) + { + return 1; + } + if (nNewMax == 0 && *pInt) + return 0; + if ((pTmp = realloc_function(*pInt, (size_t)nNewMax * sizeof(**pInt))) == + nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeVFGPointer())"); + return 1; + } + *nMax = nNewMax; + *pInt = pTmp; + + memset((*pInt) + nPrevMax, 0, (size_t)(*nMax - nPrevMax) * sizeof(**pInt)); + return 0; +} + +int MMResizeMM_POINT2DPointer(struct MM_POINT_2D **pPoint2D, + MM_N_VERTICES_TYPE *nMax, MM_N_VERTICES_TYPE nNum, + MM_N_VERTICES_TYPE nIncr, + MM_N_VERTICES_TYPE nProposedMax) +{ + MM_N_VERTICES_TYPE nNewMax, nPrevMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pPoint2D))) + { + return 1; + } + if (nNewMax == 0 && *pPoint2D) + return 0; + if ((pTmp = realloc_function(*pPoint2D, (size_t)nNewMax * + sizeof(**pPoint2D))) == nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeMM_POINT2DPointer())"); + return 1; + } + *nMax = nNewMax; + *pPoint2D = pTmp; + + memset((*pPoint2D) + nPrevMax, 0, + (size_t)(*nMax - nPrevMax) * sizeof(**pPoint2D)); + return 0; +} + +int MMResizeDoublePointer(MM_COORD_TYPE **pDouble, MM_N_VERTICES_TYPE *nMax, + MM_N_VERTICES_TYPE nNum, MM_N_VERTICES_TYPE nIncr, + MM_N_VERTICES_TYPE nProposedMax) +{ + MM_N_VERTICES_TYPE nNewMax, nPrevMax; + void *pTmp; + + if (nNum < *nMax) + return 0; + + nPrevMax = *nMax; + nNewMax = max_function(nNum + nIncr, nProposedMax); + if (MMCheckSize_t(nNewMax, sizeof(**pDouble))) + { + return 1; + } + if (nNewMax == 0 && *pDouble) + return 0; + if ((pTmp = realloc_function(*pDouble, (size_t)nNewMax * + sizeof(**pDouble))) == nullptr) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeDoublePointer())"); + return 1; + } + *nMax = nNewMax; + *pDouble = pTmp; + + memset((*pDouble) + nPrevMax, 0, + (size_t)(*nMax - nPrevMax) * sizeof(**pDouble)); + return 0; +} + +int MMResizeStringToOperateIfNeeded(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_EXT_DBF_N_FIELDS nNewSize) +{ + if (!hMiraMonLayer) + return 1; + + if (nNewSize >= hMiraMonLayer->nNumStringToOperate) + { + char *p; + if (MMCheckSize_t(nNewSize, 1)) + return 1; + p = (char *)calloc_function((size_t)nNewSize); + if (!p) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMResizeStringToOperateIfNeeded())"); + return 1; + } + free_function(hMiraMonLayer->szStringToOperate); + hMiraMonLayer->szStringToOperate = p; + hMiraMonLayer->nNumStringToOperate = nNewSize; + } + return 0; +} + +// Checks if a string is empty +int MMIsEmptyString(const char *string) +{ + const char *ptr = string; + + for (; *ptr; ptr++) + if (*ptr != ' ' && *ptr != '\t') + return 0; + + return 1; +} + +/* -------------------------------------------------------------------- */ +/* Metadata Functions */ +/* -------------------------------------------------------------------- */ + +// Returns the value of an INI file. Used to read MiraMon metadata +char *MMReturnValueFromSectionINIFile(const char *filename, const char *section, + const char *key) +{ + char *value = nullptr; +#ifndef GDAL_COMPILATION + char line[1024]; +#endif + const char *pszLine; + char *section_header = nullptr; + size_t key_len = 0; + + FILE_TYPE *file = fopen_function(filename, "rb"); + if (file == nullptr) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, "Cannot open INI file %s.", + filename); + return nullptr; + } + + if (key) + key_len = strlen(key); + +#ifndef GDAL_COMPILATION + while (fgets(line, (int)sizeof(line), file)) + { + pszLine = line; +#else + while ((pszLine = CPLReadLine2L(file, 1024, nullptr)) != nullptr) + { +#endif + char *pszString = + CPLRecode_function(pszLine, CPL_ENC_ISO8859_1, CPL_ENC_UTF8); + + // Skip comments and empty lines + if (*pszString == ';' || *pszString == '#' || *pszString == '\n' || + *pszString == '\r') + { + free_function(pszString); + // Move to next line + continue; + } + + // Check for section header + if (*pszString == '[') + { + char *section_end = strchr(pszString, ']'); + if (section_end != nullptr) + { + *section_end = '\0'; // Terminate the string at ']' + if (section_header) + free_function(section_header); + section_header = + strdup_function(pszString + 1); // Skip the '[' + } + free_function(pszString); + continue; + } + + if (key) + { + // If the current line belongs to the desired section + if (section_header != nullptr && + strcmp(section_header, section) == 0) + { + // Check if the line contains the desired key + if (strncmp(pszString, key, key_len) == 0 && + pszString[key_len] == '=') + { + // Extract the value + char *value_start = pszString + key_len + 1; + char *value_end = strstr(value_start, "\r\n"); + if (value_end != nullptr) + { + *value_end = + '\0'; // Terminate the string at newline character if found + } + else + { + value_end = strstr(value_start, "\n"); + if (value_end != nullptr) + { + *value_end = + '\0'; // Terminate the string at newline character if found + } + else + { + value_end = strstr(value_start, "\r"); + if (value_end != nullptr) + { + *value_end = + '\0'; // Terminate the string at newline character if found + } + } + } + + value = strdup_function(value_start); + fclose_function(file); + free_function(section_header); // Free allocated memory + free_function(pszString); + return value; + } + } + } + else + { + value = section_header; // Freed out + fclose_function(file); + free_function(pszString); + return value; + } + free_function(pszString); + } + + if (section_header) + free_function(section_header); // Free allocated memory + fclose_function(file); + return value; +} + +// Retrieves EPSG codes from a CSV file based on provided geodetic identifiers. +int MMReturnCodeFromMM_m_idofic(char *pMMSRS_or_pSRS, char *szResult, + MM_BYTE direction) +{ + char *aMMIDDBFFile = nullptr; //m_idofic.dbf + FILE_TYPE *pfMMSRS; + const char *pszLine; + size_t nLong; + char *id_geodes, *psidgeodes, *epsg; + + if (!pMMSRS_or_pSRS) + { + return 1; + } + +#ifdef GDAL_COMPILATION + aMMIDDBFFile = strdup_function(CPLFindFile("gdal", "MM_m_idofic.csv")); +#else + { + char temp_file[MM_CPL_PATH_BUF_SIZE]; + MuntaPath(DirectoriPrograma, strcpy(temp_file, "m_idofic.csv"), TRUE); + aMMIDDBFFile = strdup_function(temp_file); + } +#endif + + if (!aMMIDDBFFile) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error opening data\\MM_m_idofic.csv.\n"); + return 1; + } + + // Opening the file with SRS information + if (nullptr == (pfMMSRS = fopen_function(aMMIDDBFFile, "r"))) + { + free_function(aMMIDDBFFile); + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error opening data\\MM_m_idofic.csv.\n"); + return 1; + } + free_function(aMMIDDBFFile); + + // Checking the header of the csv file + pszLine = CPLReadLine2L(pfMMSRS, 1024, nullptr); + if (!pszLine) + + { + fclose_function(pfMMSRS); + MMCPLError(CE_Failure, CPLE_NotSupported, + "Wrong format in data\\MM_m_idofic.csv.\n"); + return 1; + } + id_geodes = strstr(pszLine, "ID_GEODES"); + if (!id_geodes) + { + fclose_function(pfMMSRS); + MMCPLError(CE_Failure, CPLE_NotSupported, + "Wrong format in data\\MM_m_idofic.csv.\n"); + return 1; + } + id_geodes[strlen("ID_GEODES")] = '\0'; + psidgeodes = strstr(pszLine, "PSIDGEODES"); + if (!psidgeodes) + { + fclose_function(pfMMSRS); + MMCPLError(CE_Failure, CPLE_NotSupported, + "Wrong format in data\\MM_m_idofic.csv.\n"); + return 1; + } + psidgeodes[strlen("PSIDGEODES")] = '\0'; + + // Is PSIDGEODES in first place? + if (strncmp(pszLine, psidgeodes, strlen("PSIDGEODES"))) + { + fclose_function(pfMMSRS); + MMCPLError(CE_Failure, CPLE_NotSupported, + "Wrong format in data\\MM_m_idofic.csv.\n"); + return 1; + } + // Is ID_GEODES after PSIDGEODES? + if (strncmp(pszLine + strlen("PSIDGEODES") + 1, "ID_GEODES", + strlen("ID_GEODES"))) + { + fclose_function(pfMMSRS); + MMCPLError(CE_Failure, CPLE_NotSupported, + "Wrong format in data\\MM_m_idofic.csv.\n"); + return 1; + } + + // Looking for the information. + while ((pszLine = CPLReadLine2L(pfMMSRS, 1024, nullptr)) != nullptr) + { + id_geodes = strstr(pszLine, ";"); + if (!id_geodes) + { + fclose_function(pfMMSRS); + MMCPLError(CE_Failure, CPLE_NotSupported, + "Wrong format in data\\MM_m_idofic.csv.\n"); + return 1; + } + + psidgeodes = strstr(id_geodes + 1, ";"); + if (!psidgeodes) + { + fclose_function(pfMMSRS); + MMCPLError(CE_Failure, CPLE_NotSupported, + "Wrong format in data\\MM_m_idofic.csv.\n"); + return 1; + } + + id_geodes[(ptrdiff_t)psidgeodes - (ptrdiff_t)id_geodes] = '\0'; + psidgeodes = strdup_function(pszLine); + psidgeodes[(ptrdiff_t)id_geodes - (ptrdiff_t)pszLine] = '\0'; + id_geodes++; + + if (direction == EPSG_FROM_MMSRS) + { + // I have pMMSRS and I want pSRS + if (strcmp(pMMSRS_or_pSRS, id_geodes)) + { + free_function(psidgeodes); + continue; + } + + epsg = strstr(psidgeodes, "EPSG:"); + nLong = strlen("EPSG:"); + if (epsg && !strncmp(epsg, psidgeodes, nLong)) + { + if (epsg[nLong] != '\0') + { + strcpy(szResult, epsg + nLong); + free_function(psidgeodes); + fclose_function(pfMMSRS); + return 0; // found + } + else + { + fclose_function(pfMMSRS); + *szResult = '\0'; + free_function(psidgeodes); + return 1; // not found + } + } + } + else + { + // I have pSRS and I want pMMSRS + epsg = strstr(psidgeodes, "EPSG:"); + nLong = strlen("EPSG:"); + if (epsg && !strncmp(epsg, psidgeodes, nLong)) + { + if (epsg[nLong] != '\0') + { + if (!strcmp(pMMSRS_or_pSRS, epsg + nLong)) + { + strcpy(szResult, id_geodes); + fclose_function(pfMMSRS); + free_function(psidgeodes); + return 0; // found + } + } + } + } + free_function(psidgeodes); + } + + fclose_function(pfMMSRS); + return 1; // not found +} + +#define LineReturn "\r\n" + +// Generates an idientifier that REL 4 MiraMon metadata needs. +static void MMGenerateFileIdentifierFromMetadataFileName(char *pMMFN, + char *aFileIdentifier) +{ + char aCharRand[8]; + static const char aCharset[] = + "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; + int i, len_charset; + + memset(aFileIdentifier, '\0', MM_MAX_LEN_LAYER_IDENTIFIER); + + aCharRand[0] = '_'; + len_charset = (int)strlen(aCharset); + for (i = 1; i < 7; i++) + { + // coverity[dont_call] + aCharRand[i] = aCharset[rand() % (len_charset - 1)]; + } + aCharRand[7] = '\0'; + CPLStrlcpy(aFileIdentifier, pMMFN, MM_MAX_LEN_LAYER_IDENTIFIER - 7); + strcat(aFileIdentifier, aCharRand); + return; +} + +// Converts a string from UTF-8 to ANSI to be written in a REL 4 file +static void +MMWrite_ANSI_MetadataKeyDescriptor(struct MiraMonVectorMetaData *hMMMD, + FILE_TYPE *pF, const char *pszEng, + const char *pszCat, const char *pszEsp) +{ + char *pszString = nullptr; + + switch (hMMMD->nMMLanguage) + { + case MM_CAT_LANGUAGE: + pszString = + CPLRecode_function(pszCat, CPL_ENC_UTF8, CPL_ENC_ISO8859_1); + break; + case MM_SPA_LANGUAGE: + pszString = + CPLRecode_function(pszEsp, CPL_ENC_UTF8, CPL_ENC_ISO8859_1); + break; + default: + case MM_ENG_LANGUAGE: + pszString = + CPLRecode_function(pszEng, CPL_ENC_UTF8, CPL_ENC_ISO8859_1); + break; + } + if (pszString) + { + fprintf_function(pF, "%s", KEY_descriptor); + fprintf_function(pF, "="); + fprintf_function(pF, "%s", pszString); + fprintf_function(pF, "%s", LineReturn); + CPLFree_function(pszString); + } +} + +/* + Writes a MiraMon REL 4 metadata file. Next sections are included: + VERSION, METADADES, IDENTIFICATION, EXTENT, OVERVIEW, + TAULA_PRINCIPAL and GEOMETRIA_I_TOPOLOGIA + + Please, consult the meaning of all them at: + https://www.miramon.cat/help/eng/GeMPlus/ClausREL.htm +*/ +static int MMWriteMetadataFile(struct MiraMonVectorMetaData *hMMMD) +{ + char aMessage[MM_MESSAGE_LENGTH], + aFileIdentifier[MM_MAX_LEN_LAYER_IDENTIFIER], aMMIDSRS[MM_MAX_ID_SNY]; + MM_EXT_DBF_N_FIELDS nIField; + FILE_TYPE *pF; + time_t currentTime; + char aTimeString[200]; + + if (!hMMMD->aLayerName) + return 0; + + if (nullptr == (pF = fopen_function(hMMMD->aLayerName, "wb"))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, "The file %s must exist.", + hMMMD->aLayerName); + return 1; + } + + // Writing MiraMon version section + fprintf_function(pF, "[%s]" LineReturn, SECTION_VERSIO); + + fprintf_function(pF, "%s=%u" LineReturn, KEY_Vers, (unsigned)MM_VERS); + fprintf_function(pF, "%s=%u" LineReturn, KEY_SubVers, (unsigned)MM_SUBVERS); + + fprintf_function(pF, "%s=%u" LineReturn, KEY_VersMetaDades, + (unsigned)MM_VERS_METADADES); + fprintf_function(pF, "%s=%u" LineReturn, KEY_SubVersMetaDades, + (unsigned)MM_SUBVERS_METADADES); + + // Writing METADADES section + fprintf_function(pF, "\r\n[%s]" LineReturn, SECTION_METADADES); + CPLStrlcpy(aMessage, hMMMD->aLayerName, sizeof(aMessage)); + MMGenerateFileIdentifierFromMetadataFileName(aMessage, aFileIdentifier); + fprintf_function(pF, "%s=%s" LineReturn, KEY_FileIdentifier, + aFileIdentifier); + fprintf_function(pF, "%s=%s" LineReturn, KEY_language, KEY_Value_eng); + fprintf_function(pF, "%s=%s" LineReturn, KEY_MDIdiom, KEY_Value_eng); + fprintf_function(pF, "%s=%s" LineReturn, KEY_characterSet, + KEY_Value_characterSet); + + // Writing IDENTIFICATION section + fprintf_function(pF, LineReturn "[%s]" LineReturn, SECTION_IDENTIFICATION); + fprintf_function(pF, "%s=%s" LineReturn, KEY_code, aFileIdentifier); + fprintf_function(pF, "%s=" LineReturn, KEY_codeSpace); + if (hMMMD->szLayerTitle && !MMIsEmptyString(hMMMD->szLayerTitle)) + { + if (hMMMD->ePlainLT == MM_LayerType_Point) + fprintf_function(pF, "%s=%s (pnt)" LineReturn, KEY_DatasetTitle, + hMMMD->szLayerTitle); + if (hMMMD->ePlainLT == MM_LayerType_Arc) + fprintf_function(pF, "%s=%s (arc)" LineReturn, KEY_DatasetTitle, + hMMMD->szLayerTitle); + if (hMMMD->ePlainLT == MM_LayerType_Pol) + fprintf_function(pF, "%s=%s (pol)" LineReturn, KEY_DatasetTitle, + hMMMD->szLayerTitle); + } + fprintf_function(pF, "%s=%s" LineReturn, KEY_language, KEY_Value_eng); + + if (hMMMD->ePlainLT != MM_LayerType_Node) + { + if (hMMMD->pSRS && hMMMD->ePlainLT != MM_LayerType_Pol) + { + fprintf_function(pF, LineReturn "[%s:%s]" LineReturn, + SECTION_SPATIAL_REFERENCE_SYSTEM, + SECTION_HORIZONTAL); + if (!ReturnMMIDSRSFromEPSGCodeSRS(hMMMD->pSRS, aMMIDSRS) && + !MMIsEmptyString(aMMIDSRS)) + fprintf_function(pF, "%s=%s" LineReturn, + KEY_HorizontalSystemIdentifier, aMMIDSRS); + else + { + MMCPLWarning(CE_Warning, CPLE_NotSupported, + "The MiraMon driver cannot assign any HRS."); + // Horizontal Reference System + fprintf_function(pF, "%s=plane" LineReturn, + KEY_HorizontalSystemIdentifier); + fprintf_function(pF, "%s=local" LineReturn, + KEY_HorizontalSystemDefinition); + if (hMMMD->pXUnit) + fprintf_function(pF, "%s=%s" LineReturn, KEY_unitats, + hMMMD->pXUnit); + if (hMMMD->pYUnit) + { + if (!hMMMD->pXUnit || + strcasecmp(hMMMD->pXUnit, hMMMD->pYUnit)) + fprintf_function(pF, "%s=%s" LineReturn, KEY_unitatsY, + hMMMD->pYUnit); + } + } + } + else + { + fprintf_function(pF, "%s=plane" LineReturn, + KEY_HorizontalSystemIdentifier); + fprintf_function(pF, "%s=local" LineReturn, + KEY_HorizontalSystemDefinition); + if (hMMMD->pXUnit) + { + fprintf_function(pF, "%s=%s" LineReturn, KEY_unitats, + hMMMD->pXUnit); + if (hMMMD->pYUnit) + { + if (!hMMMD->pXUnit || + strcasecmp(hMMMD->pXUnit, hMMMD->pYUnit)) + fprintf_function(pF, "%s=%s" LineReturn, KEY_unitatsY, + hMMMD->pYUnit); + } + } + } + } + + // Writing OVERVIEW:ASPECTES_TECNICS in polygon metadata file. + // ArcSource=fitx_pol.arc + if (hMMMD->ePlainLT == MM_LayerType_Pol) + { + fprintf_function(pF, LineReturn "[%s]" LineReturn, + SECTION_OVVW_ASPECTES_TECNICS); + fprintf_function(pF, "%s=\"%s\"" LineReturn, KEY_ArcSource, + hMMMD->aArcFile); + } + + // Writing EXTENT section + fprintf_function(pF, LineReturn "[%s]" LineReturn, SECTION_EXTENT); + fprintf_function(pF, "%s=0" LineReturn, KEY_toler_env); + + if (hMMMD->hBB.dfMinX != MM_UNDEFINED_STATISTICAL_VALUE && + hMMMD->hBB.dfMaxX != -MM_UNDEFINED_STATISTICAL_VALUE && + hMMMD->hBB.dfMinY != MM_UNDEFINED_STATISTICAL_VALUE && + hMMMD->hBB.dfMaxY != -MM_UNDEFINED_STATISTICAL_VALUE) + { + fprintf_function(pF, "%s=%lf" LineReturn, KEY_MinX, hMMMD->hBB.dfMinX); + fprintf_function(pF, "%s=%lf" LineReturn, KEY_MaxX, hMMMD->hBB.dfMaxX); + fprintf_function(pF, "%s=%lf" LineReturn, KEY_MinY, hMMMD->hBB.dfMinY); + fprintf_function(pF, "%s=%lf" LineReturn, KEY_MaxY, hMMMD->hBB.dfMaxY); + } + + // Writing OVERVIEW section + fprintf_function(pF, LineReturn "[%s]" LineReturn, SECTION_OVERVIEW); + + currentTime = time(nullptr); + +#ifdef GDAL_COMPILATION + { + struct tm ltime; + VSILocalTime(¤tTime, <ime); + snprintf(aTimeString, sizeof(aTimeString), + "%04d%02d%02d %02d%02d%02d%02d+00:00", ltime.tm_year + 1900, + ltime.tm_mon + 1, ltime.tm_mday, ltime.tm_hour, ltime.tm_min, + ltime.tm_sec, 0); + fprintf_function(pF, "%s=%s" LineReturn, KEY_CreationDate, aTimeString); + fprintf_function(pF, LineReturn); + } +#else + { + struct tm *pLocalTime; + pLocalTime = localtime(¤tTime); + snprintf(aTimeString, sizeof(aTimeString), + "%04d%02d%02d %02d%02d%02d%02d+00:00", + pLocalTime->tm_year + 1900, pLocalTime->tm_mon + 1, + pLocalTime->tm_mday, pLocalTime->tm_hour, pLocalTime->tm_min, + pLocalTime->tm_sec, 0); + fprintf_function(pF, "%s=%s" LineReturn, KEY_CreationDate, aTimeString); + fprintf_function(pF, LineReturn); + } +#endif + + // Writing TAULA_PRINCIPAL section + fprintf_function(pF, "[%s]" LineReturn, SECTION_TAULA_PRINCIPAL); + fprintf_function(pF, "IdGrafic=%s" LineReturn, szMMNomCampIdGraficDefecte); + fprintf_function(pF, "TipusRelacio=RELACIO_1_1_DICC" LineReturn); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampIdGraficDefecte); + fprintf_function(pF, "visible=1" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + + MMWrite_ANSI_MetadataKeyDescriptor( + hMMMD, pF, szInternalGraphicIdentifierEng, + szInternalGraphicIdentifierCat, szInternalGraphicIdentifierSpa); + + if (hMMMD->ePlainLT == MM_LayerType_Arc) + { + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampNVertexsDefecte); + fprintf_function(pF, "visible=0" LineReturn); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor(hMMMD, pF, szNumberOfVerticesEng, + szNumberOfVerticesCat, + szNumberOfVerticesSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampLongitudArcDefecte); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor( + hMMMD, pF, szLengthOfAarcEng, szLengthOfAarcCat, szLengthOfAarcSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampNodeIniDefecte); + fprintf_function(pF, "visible=0" LineReturn); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor(hMMMD, pF, szInitialNodeEng, + szInitialNodeCat, szInitialNodeSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampNodeFiDefecte); + fprintf_function(pF, "visible=0" LineReturn); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor(hMMMD, pF, szFinalNodeEng, + szFinalNodeCat, szFinalNodeSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[GEOMETRIA_I_TOPOLOGIA]" LineReturn); + fprintf_function(pF, "NomCampNVertexs=%s" LineReturn, + szMMNomCampNVertexsDefecte); + fprintf_function(pF, "NomCampLongitudArc=%s" LineReturn, + szMMNomCampLongitudArcDefecte); + fprintf_function(pF, "NomCampNodeIni=%s" LineReturn, + szMMNomCampNodeIniDefecte); + fprintf_function(pF, "NomCampNodeFi=%s" LineReturn, + szMMNomCampNodeFiDefecte); + } + else if (hMMMD->ePlainLT == MM_LayerType_Node) + { + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampArcsANodeDefecte); + fprintf_function(pF, "visible=0" LineReturn); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor(hMMMD, pF, szNumberOfArcsToNodeEng, + szNumberOfArcsToNodeCat, + szNumberOfArcsToNodeSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampTipusNodeDefecte); + fprintf_function(pF, "visible=0" LineReturn); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor(hMMMD, pF, szNodeTypeEng, + szNodeTypeCat, szNodeTypeSpa); + } + else if (hMMMD->ePlainLT == MM_LayerType_Pol) + { + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampNVertexsDefecte); + fprintf_function(pF, "visible=0" LineReturn); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor(hMMMD, pF, szNumberOfVerticesEng, + szNumberOfVerticesCat, + szNumberOfVerticesSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampPerimetreDefecte); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor( + hMMMD, pF, szPerimeterOfThePolygonEng, szPerimeterOfThePolygonCat, + szPerimeterOfThePolygonSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampAreaDefecte); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor(hMMMD, pF, szAreaOfThePolygonEng, + szAreaOfThePolygonCat, + szAreaOfThePolygonSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampNArcsDefecte); + fprintf_function(pF, "visible=0" LineReturn); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor( + hMMMD, pF, szNumberOfArcsEng, szNumberOfArcsCat, szNumberOfArcsSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[%s:%s]" LineReturn, SECTION_TAULA_PRINCIPAL, + szMMNomCampNPoligonsDefecte); + fprintf_function(pF, "visible=0" LineReturn); + fprintf_function(pF, "simbolitzable=0" LineReturn); + fprintf_function(pF, "MostrarUnitats=0" LineReturn); + MMWrite_ANSI_MetadataKeyDescriptor( + hMMMD, pF, szNumberOfElementaryPolygonsEng, + szNumberOfElementaryPolygonsCat, szNumberOfElementaryPolygonsSpa); + + fprintf_function(pF, LineReturn); + fprintf_function(pF, "[GEOMETRIA_I_TOPOLOGIA]" LineReturn); + fprintf_function(pF, "NomCampNVertexs=%s" LineReturn, + szMMNomCampNVertexsDefecte); + fprintf_function(pF, "NomCampPerimetre=%s" LineReturn, + szMMNomCampPerimetreDefecte); + fprintf_function(pF, "NomCampArea=%s" LineReturn, + szMMNomCampAreaDefecte); + fprintf_function(pF, "NomCampNArcs=%s" LineReturn, + szMMNomCampNArcsDefecte); + fprintf_function(pF, "NomCampNPoligons=%s" LineReturn, + szMMNomCampNPoligonsDefecte); + } + + if (hMMMD->pLayerDB && hMMMD->pLayerDB->nNFields > 0) + { + // For each field of the databes + for (nIField = 0; nIField < hMMMD->pLayerDB->nNFields; nIField++) + { + if (!MMIsEmptyString( + hMMMD->pLayerDB->pFields[nIField].pszFieldDescription) && + !MMIsEmptyString( + hMMMD->pLayerDB->pFields[nIField].pszFieldName)) + { + fprintf_function( + pF, LineReturn "[%s:%s]" LineReturn, + SECTION_TAULA_PRINCIPAL, + hMMMD->pLayerDB->pFields[nIField].pszFieldName); + + MMWrite_ANSI_MetadataKeyDescriptor( + hMMMD, pF, + hMMMD->pLayerDB->pFields[nIField].pszFieldDescription, + hMMMD->pLayerDB->pFields[nIField].pszFieldDescription, + hMMMD->pLayerDB->pFields[nIField].pszFieldDescription); + } + } + } + fclose_function(pF); + return 0; +} + +// Writes metadata files for MiraMon vector layers +static int MMWriteVectorMetadataFile(struct MiraMonVectLayerInfo *hMiraMonLayer, + int layerPlainType, int layerMainPlainType) +{ + struct MiraMonVectorMetaData hMMMD; + + if (!hMiraMonLayer) + return 1; + + // MiraMon writes a REL file of each .pnt, .arc, .nod or .pol + memset(&hMMMD, 0, sizeof(hMMMD)); + hMMMD.ePlainLT = layerPlainType; + hMMMD.pSRS = hMiraMonLayer->pSRS; + hMMMD.nMMLanguage = hMiraMonLayer->nMMLanguage; + + hMMMD.szLayerTitle = hMiraMonLayer->szLayerTitle; + if (layerPlainType == MM_LayerType_Point) + { + hMMMD.aLayerName = hMiraMonLayer->MMPoint.pszREL_LayerName; + if (MMIsEmptyString(hMMMD.aLayerName)) + return 0; // If no file, no error. Just continue. + memcpy(&hMMMD.hBB, &hMiraMonLayer->TopHeader.hBB, sizeof(hMMMD.hBB)); + hMMMD.pLayerDB = hMiraMonLayer->pLayerDB; + return MMWriteMetadataFile(&hMMMD); + } + else if (layerPlainType == MM_LayerType_Arc) + { + // Arcs and not polygons + if (layerMainPlainType == MM_LayerType_Arc) + { + hMMMD.aLayerName = hMiraMonLayer->MMArc.pszREL_LayerName; + if (MMIsEmptyString(hMMMD.aLayerName)) + return 0; // If no file, no error. Just continue. + memcpy(&hMMMD.hBB, &hMiraMonLayer->TopHeader.hBB, + sizeof(hMMMD.hBB)); + hMMMD.pLayerDB = hMiraMonLayer->pLayerDB; + } + // Arcs and polygons + else + { + // Arc from polygon + hMMMD.aLayerName = hMiraMonLayer->MMPolygon.MMArc.pszREL_LayerName; + if (MMIsEmptyString(hMMMD.aLayerName)) + return 0; // If no file, no error. Just continue. + + memcpy(&hMMMD.hBB, &hMiraMonLayer->MMPolygon.TopArcHeader.hBB, + sizeof(hMMMD.hBB)); + hMMMD.pLayerDB = nullptr; + } + return MMWriteMetadataFile(&hMMMD); + } + else if (layerPlainType == MM_LayerType_Pol) + { + int nResult; + + hMMMD.aLayerName = hMiraMonLayer->MMPolygon.pszREL_LayerName; + + if (MMIsEmptyString(hMMMD.aLayerName)) + return 0; // If no file, no error. Just continue. + + memcpy(&hMMMD.hBB, &hMiraMonLayer->TopHeader.hBB, sizeof(hMMMD.hBB)); + hMMMD.pLayerDB = hMiraMonLayer->pLayerDB; + hMMMD.aArcFile = strdup_function( + get_filename_function(hMiraMonLayer->MMPolygon.MMArc.pszLayerName)); + nResult = MMWriteMetadataFile(&hMMMD); + free_function(hMMMD.aArcFile); + return nResult; + } + else if (layerPlainType == MM_LayerType_Node) + { + // Node from arc + if (layerMainPlainType == MM_LayerType_Arc) + { + hMMMD.aLayerName = hMiraMonLayer->MMArc.MMNode.pszREL_LayerName; + if (MMIsEmptyString(hMMMD.aLayerName)) + return 0; // If no file, no error. Just continue. + memcpy(&hMMMD.hBB, &hMiraMonLayer->MMArc.TopNodeHeader.hBB, + sizeof(hMMMD.hBB)); + } + else // Node from polygon + { + hMMMD.aLayerName = + hMiraMonLayer->MMPolygon.MMArc.MMNode.pszREL_LayerName; + if (MMIsEmptyString(hMMMD.aLayerName)) + return 0; // If no file, no error. Just continue. + memcpy(&hMMMD.hBB, + &hMiraMonLayer->MMPolygon.MMArc.TopNodeHeader.hBB, + sizeof(hMMMD.hBB)); + } + hMMMD.pLayerDB = nullptr; + return MMWriteMetadataFile(&hMMMD); + } + return 0; +} + +int MMWriteVectorMetadata(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPoint) + return MMWriteVectorMetadataFile(hMiraMonLayer, MM_LayerType_Point, + MM_LayerType_Point); + if (hMiraMonLayer->bIsArc && !hMiraMonLayer->bIsPolygon) + { + if (MMWriteVectorMetadataFile(hMiraMonLayer, MM_LayerType_Node, + MM_LayerType_Arc)) + return 1; + return MMWriteVectorMetadataFile(hMiraMonLayer, MM_LayerType_Arc, + MM_LayerType_Arc); + } + if (hMiraMonLayer->bIsPolygon) + { + if (MMWriteVectorMetadataFile(hMiraMonLayer, MM_LayerType_Node, + MM_LayerType_Pol)) + return 1; + if (MMWriteVectorMetadataFile(hMiraMonLayer, MM_LayerType_Arc, + MM_LayerType_Pol)) + return 1; + return MMWriteVectorMetadataFile(hMiraMonLayer, MM_LayerType_Pol, + MM_LayerType_Pol); + } + if (hMiraMonLayer->bIsDBF) + { + return MMWriteVectorMetadataFile(hMiraMonLayer, MM_LayerType_Unknown, + MM_LayerType_Unknown); + } + return 0; +} + +// Verifies the version of a MiraMon REL 4 file. +int MMCheck_REL_FILE(const char *szREL_file) +{ + char *pszLine; + FILE_TYPE *pF; + + // Does the REL file exist? + pF = fopen_function(szREL_file, "r"); + if (!pF) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, "The file %s must exist.", + szREL_file); + return 1; + } + fclose_function(pF); + + // Does the REL file have VERSION? + pszLine = + MMReturnValueFromSectionINIFile(szREL_file, SECTION_VERSIO, nullptr); + if (!pszLine) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "The file \"%s\" must be REL4. " + "You can use ConvREL.exe from MiraMon software " + "to convert this file to REL4.", + szREL_file); + return 1; + } + free_function(pszLine); + + // Does the REL file have the correct VERSION? + // Vers>=4? + pszLine = + MMReturnValueFromSectionINIFile(szREL_file, SECTION_VERSIO, KEY_Vers); + if (pszLine) + { + if (*pszLine == '\0' || atoi(pszLine) < (int)MM_VERS) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "The file \"%s\" must have %s>=%d.", szREL_file, + KEY_Vers, MM_VERS); + free_function(pszLine); + return 1; + } + free_function(pszLine); + } + else + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "The file \"%s\" must have %s>=%d.", szREL_file, KEY_Vers, + MM_VERS); + return 1; + } + + // SubVers>=3? + pszLine = MMReturnValueFromSectionINIFile(szREL_file, SECTION_VERSIO, + KEY_SubVers); + if (pszLine) + { + if (*pszLine == '\0' || atoi(pszLine) < (int)MM_SUBVERS) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "The file \"%s\" must have %s>=%d.", szREL_file, + KEY_SubVers, MM_SUBVERS); + + free_function(pszLine); + return 1; + } + free_function(pszLine); + } + else + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "The file \"%s\" must have %s>=%d.", szREL_file, KEY_SubVers, + MM_SUBVERS); + return 1; + } + + // VersMetaDades>=5? + pszLine = MMReturnValueFromSectionINIFile(szREL_file, SECTION_VERSIO, + KEY_VersMetaDades); + if (pszLine) + { + if (*pszLine == '\0' || atoi(pszLine) < (int)MM_VERS_METADADES) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "The file \"%s\" must have %s>=%d.", szREL_file, + KEY_VersMetaDades, MM_VERS_METADADES); + free_function(pszLine); + return 1; + } + free_function(pszLine); + } + else + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "The file \"%s\" must have %s>=%d.", szREL_file, + KEY_VersMetaDades, MM_VERS_METADADES); + return 1; + } + + // SubVersMetaDades>=0? + pszLine = MMReturnValueFromSectionINIFile(szREL_file, SECTION_VERSIO, + KEY_SubVersMetaDades); + if (pszLine) + { + if (*pszLine == '\0' || atoi(pszLine) < (int)MM_SUBVERS_METADADES) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "The file \"%s\" must have %s>=%d.", szREL_file, + KEY_SubVersMetaDades, MM_SUBVERS_METADADES); + free_function(pszLine); + return 1; + } + free_function(pszLine); + } + else + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "The file \"%s\" must have %s>=%d.", szREL_file, + KEY_SubVersMetaDades, MM_SUBVERS_METADADES); + return 1; + } + return 0; +} + +/* -------------------------------------------------------------------- */ +/* MiraMon database functions */ +/* -------------------------------------------------------------------- */ + +// Initializes a MiraMon database associated with a vector layer: +// Sets the usual fields that MiraMon needs and after them, adds +// all fields of the input layer +static int MMInitMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MMAdmDatabase *pMMAdmDB) +{ + if (!hMiraMonLayer) + return 1; + + if (!pMMAdmDB) + return 1; + + if (MMIsEmptyString(pMMAdmDB->pszExtDBFLayerName)) + return 0; // No file, no error. Just continue + + strcpy(pMMAdmDB->pMMBDXP->ReadingMode, "wb+"); + if (FALSE == + MM_CreateDBFFile(pMMAdmDB->pMMBDXP, pMMAdmDB->pszExtDBFLayerName)) + return 1; + + // Opening the file + if (nullptr == (pMMAdmDB->pFExtDBF = + fopen_function(pMMAdmDB->pszExtDBFLayerName, + "r+b"))) //hMiraMonLayer->pszFlags))) + { + MMCPLError(CE_Failure, CPLE_OpenFailed, + "Error pMMAdmDB: Cannot open file %s.", + pMMAdmDB->pszExtDBFLayerName); + return 1; + } + fseek_function(pMMAdmDB->pFExtDBF, pMMAdmDB->pMMBDXP->FirstRecordOffset, + SEEK_SET); + + if (MMInitFlush(&pMMAdmDB->FlushRecList, pMMAdmDB->pFExtDBF, MM_1MB, + &pMMAdmDB->pRecList, pMMAdmDB->pMMBDXP->FirstRecordOffset, + 0)) + return 1; + + pMMAdmDB->nNumRecordOnCourse = + (GUInt64)pMMAdmDB->pMMBDXP->BytesPerRecord + 1; + if (MMCheckSize_t(pMMAdmDB->nNumRecordOnCourse, 1)) + return 1; + pMMAdmDB->szRecordOnCourse = + calloc_function((size_t)pMMAdmDB->nNumRecordOnCourse); + if (!pMMAdmDB->szRecordOnCourse) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMInitMMDB())"); + return 1; + } + return 0; +} + +// Creates a MiraMon database associated with a vector layer. +// It determines the number of fields and initializes the database header +// accordingly. Depending on the layer type (point, arc, polygon, or generic), +// it defines the fields and initializes the corresponding MiraMon database +// structures. +int MMCreateMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + struct MM_DATA_BASE_XP *pBD_XP = nullptr, *pBD_XP_Aux = nullptr; + struct MM_FIELD MMField; + size_t nIFieldLayer; + MM_EXT_DBF_N_FIELDS nIField = 0; + MM_EXT_DBF_N_FIELDS nNFields; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPoint) + { + if (hMiraMonLayer->pLayerDB) + nNFields = + MM_PRIVATE_POINT_DB_FIELDS + hMiraMonLayer->pLayerDB->nNFields; + else + nNFields = MM_PRIVATE_POINT_DB_FIELDS; + pBD_XP = hMiraMonLayer->MMPoint.MMAdmDB.pMMBDXP = + MM_CreateDBFHeader(nNFields, hMiraMonLayer->nCharSet); + + if (!pBD_XP) + return 1; + + if (0 == (nIField = (MM_EXT_DBF_N_FIELDS)MM_DefineFirstPointFieldsDB_XP( + pBD_XP))) + return 1; + } + else if (hMiraMonLayer->bIsArc && !hMiraMonLayer->bIsPolygon) + { + if (hMiraMonLayer->pLayerDB) + nNFields = + MM_PRIVATE_ARC_DB_FIELDS + hMiraMonLayer->pLayerDB->nNFields; + else + nNFields = MM_PRIVATE_ARC_DB_FIELDS; + + pBD_XP = hMiraMonLayer->MMArc.MMAdmDB.pMMBDXP = + MM_CreateDBFHeader(nNFields, hMiraMonLayer->nCharSet); + + if (!pBD_XP) + return 1; + + if (0 == (nIField = (MM_EXT_DBF_N_FIELDS)MM_DefineFirstArcFieldsDB_XP( + pBD_XP))) + return 1; + + pBD_XP_Aux = hMiraMonLayer->MMArc.MMNode.MMAdmDB.pMMBDXP = + MM_CreateDBFHeader(3, hMiraMonLayer->nCharSet); + + if (!pBD_XP_Aux) + return 1; + + if (0 == MM_DefineFirstNodeFieldsDB_XP(pBD_XP_Aux)) + return 1; + } + else if (hMiraMonLayer->bIsPolygon) + { + if (hMiraMonLayer->pLayerDB) + nNFields = MM_PRIVATE_POLYGON_DB_FIELDS + + hMiraMonLayer->pLayerDB->nNFields; + else + nNFields = MM_PRIVATE_POLYGON_DB_FIELDS; + + pBD_XP = hMiraMonLayer->MMPolygon.MMAdmDB.pMMBDXP = + MM_CreateDBFHeader(nNFields, hMiraMonLayer->nCharSet); + + if (!pBD_XP) + return 1; + + if (0 == + (nIField = + (MM_EXT_DBF_N_FIELDS)MM_DefineFirstPolygonFieldsDB_XP(pBD_XP))) + return 1; + + pBD_XP_Aux = hMiraMonLayer->MMPolygon.MMArc.MMAdmDB.pMMBDXP = + MM_CreateDBFHeader(5, hMiraMonLayer->nCharSet); + + if (!pBD_XP_Aux) + return 1; + + if (0 == MM_DefineFirstArcFieldsDB_XP(pBD_XP_Aux)) + return 1; + + pBD_XP_Aux = hMiraMonLayer->MMPolygon.MMArc.MMNode.MMAdmDB.pMMBDXP = + MM_CreateDBFHeader(3, hMiraMonLayer->nCharSet); + + if (!pBD_XP_Aux) + return 1; + + if (0 == MM_DefineFirstNodeFieldsDB_XP(pBD_XP_Aux)) + return 1; + } + else if (hMiraMonLayer->bIsDBF) + { + // Creating only a DBF + if (hMiraMonLayer->pLayerDB) + nNFields = hMiraMonLayer->pLayerDB->nNFields; + else + nNFields = 0; + + pBD_XP = hMiraMonLayer->MMAdmDBWriting.pMMBDXP = + MM_CreateDBFHeader(nNFields, hMiraMonLayer->nCharSet); + + if (!pBD_XP) + return 1; + } + else + return 0; + + // After private MiraMon fields, other fields are added. + // If names are no compatible, some changes are done. + if (hMiraMonLayer->pLayerDB) + { + for (nIFieldLayer = 0; nIField < nNFields; nIField++, nIFieldLayer++) + { + MM_InitializeField(&MMField); + CPLStrlcpy( + MMField.FieldName, + hMiraMonLayer->pLayerDB->pFields[nIFieldLayer].pszFieldName, + MM_MAX_LON_FIELD_NAME_DBF); + + CPLStrlcpy(MMField.FieldDescription[0], + hMiraMonLayer->pLayerDB->pFields[nIFieldLayer] + .pszFieldDescription, + MM_MAX_BYTES_FIELD_DESC); + + MMField.BytesPerField = + hMiraMonLayer->pLayerDB->pFields[nIFieldLayer].nFieldSize; + switch (hMiraMonLayer->pLayerDB->pFields[nIFieldLayer].eFieldType) + { + case MM_Numeric: + MMField.FieldType = 'N'; + if (hMiraMonLayer->pLayerDB->pFields[nIFieldLayer] + .bIs64BitInteger) + MMField.Is64 = 1; + if (MMField.BytesPerField == 0) + MMField.BytesPerField = MM_MAX_AMPLADA_CAMP_N_DBF; + break; + case MM_Character: + MMField.FieldType = 'C'; + if (MMField.BytesPerField == 0) + MMField.BytesPerField = MM_MAX_AMPLADA_CAMP_C_DBF; + break; + case MM_Data: + MMField.FieldType = 'D'; + if (MMField.BytesPerField == 0) + MMField.BytesPerField = MM_MAX_AMPLADA_CAMP_D_DBF; + break; + case MM_Logic: + MMField.FieldType = 'L'; + if (MMField.BytesPerField == 0) + MMField.BytesPerField = 1; + break; + default: + MMField.FieldType = 'C'; + if (MMField.BytesPerField == 0) + MMField.BytesPerField = MM_MAX_AMPLADA_CAMP_C_DBF; + }; + + MMField.DecimalsIfFloat = + (MM_BYTE)hMiraMonLayer->pLayerDB->pFields[nIFieldLayer] + .nNumberOfDecimals; + + MM_DuplicateFieldDBXP(pBD_XP->pField + nIField, &MMField); + MM_ModifyFieldNameAndDescriptorIfPresentBD_XP( + pBD_XP->pField + nIField, pBD_XP, FALSE, 0); + if (pBD_XP->pField[nIField].FieldType == 'F') + pBD_XP->pField[nIField].FieldType = 'N'; + } + } + + if (hMiraMonLayer->bIsPoint) + { + if (MMInitMMDB(hMiraMonLayer, &hMiraMonLayer->MMPoint.MMAdmDB)) + return 1; + } + else if (hMiraMonLayer->bIsArc && !hMiraMonLayer->bIsPolygon) + { + if (MMInitMMDB(hMiraMonLayer, &hMiraMonLayer->MMArc.MMAdmDB)) + return 1; + + if (MMInitMMDB(hMiraMonLayer, &hMiraMonLayer->MMArc.MMNode.MMAdmDB)) + return 1; + } + else if (hMiraMonLayer->bIsPolygon) + { + if (MMInitMMDB(hMiraMonLayer, &hMiraMonLayer->MMPolygon.MMAdmDB)) + return 1; + + if (MMInitMMDB(hMiraMonLayer, &hMiraMonLayer->MMPolygon.MMArc.MMAdmDB)) + return 1; + + if (MMInitMMDB(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMArc.MMNode.MMAdmDB)) + return 1; + } + else if (hMiraMonLayer->bIsDBF) + { + if (MMInitMMDB(hMiraMonLayer, &hMiraMonLayer->MMAdmDBWriting)) + return 1; + } + return 0; +} + +// Checks and fits the width of a specific field in a MiraMon database +// associated with a vector layer. It examines the length of the provided +// value and resizes the field width, if necessary, to accommodate the new +// value. If the new width exceeds the current width of the field, +// it updates the database structure, including the field width and +// the size of the record. Additionally, it reallocates memory if needed +// for the record handling buffer. + +static int +MMTestAndFixValueToRecordDBXP(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MMAdmDatabase *pMMAdmDB, + MM_EXT_DBF_N_FIELDS nIField, char *szValue) +{ + struct MM_FIELD *camp; + MM_BYTES_PER_FIELD_TYPE_DBF nNewWidth; + + if (!hMiraMonLayer) + return 1; + + camp = pMMAdmDB->pMMBDXP->pField + nIField; + + if (!szValue) + return 0; + + nNewWidth = (MM_BYTES_PER_FIELD_TYPE_DBF)strlen(szValue); + if (MMResizeStringToOperateIfNeeded(hMiraMonLayer, nNewWidth + 1)) + return 1; + + if (nNewWidth > camp->BytesPerField) + { + if (MM_WriteNRecordsMMBD_XPFile(pMMAdmDB)) + return 1; + + // Flushing all to be flushed + pMMAdmDB->FlushRecList.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&pMMAdmDB->FlushRecList)) + return 1; + + pMMAdmDB->pMMBDXP->pfDataBase = pMMAdmDB->pFExtDBF; + + if (MM_ChangeDBFWidthField( + pMMAdmDB->pMMBDXP, nIField, nNewWidth, + pMMAdmDB->pMMBDXP->pField[nIField].DecimalsIfFloat, + (MM_BYTE)MM_NOU_N_DECIMALS_NO_APLICA)) + return 1; + + // The record on course also has to change its size. + if ((GUInt64)pMMAdmDB->pMMBDXP->BytesPerRecord + 1 >= + pMMAdmDB->nNumRecordOnCourse) + { + void *pTmp; + if (nullptr == (pTmp = realloc_function( + pMMAdmDB->szRecordOnCourse, + (size_t)pMMAdmDB->pMMBDXP->BytesPerRecord + 1))) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMTestAndFixValueToRecordDBXP())"); + return 1; + } + pMMAdmDB->szRecordOnCourse = pTmp; + } + + // File has changed its size, so it has to be updated + // at the Flush tool + fseek_function(pMMAdmDB->pFExtDBF, 0, SEEK_END); + pMMAdmDB->FlushRecList.OffsetWhereToFlush = + ftell_function(pMMAdmDB->pFExtDBF); + } + return 0; +} + +static int +MMWriteValueToszStringToOperate(struct MiraMonVectLayerInfo *hMiraMonLayer, + const struct MM_FIELD *camp, const void *valor, + MM_BOOLEAN is_64) +{ + if (!hMiraMonLayer) + return 1; + + if (!camp) + return 0; + + if (MMResizeStringToOperateIfNeeded(hMiraMonLayer, + camp->BytesPerField + 10)) + return 1; + + if (!valor) + *hMiraMonLayer->szStringToOperate = '\0'; + else + { + if (camp->FieldType == 'N') + { + if (!is_64) + { + snprintf(hMiraMonLayer->szStringToOperate, + (size_t)hMiraMonLayer->nNumStringToOperate, "%*.*f", + camp->BytesPerField, camp->DecimalsIfFloat, + *(const double *)valor); + } + else + { + snprintf(hMiraMonLayer->szStringToOperate, + (size_t)hMiraMonLayer->nNumStringToOperate, "%*lld", + camp->BytesPerField, *(const GInt64 *)valor); + } + } + else + { + snprintf(hMiraMonLayer->szStringToOperate, + (size_t)hMiraMonLayer->nNumStringToOperate, "%-*s", + camp->BytesPerField, (const char *)valor); + } + } + + return 0; +} + +int MMWriteValueToRecordDBXP(struct MiraMonVectLayerInfo *hMiraMonLayer, + char *registre, const struct MM_FIELD *camp, + const void *valor, MM_BOOLEAN is_64) +{ + if (!hMiraMonLayer) + return 1; + + if (!camp) + return 0; + + if (MMWriteValueToszStringToOperate(hMiraMonLayer, camp, valor, is_64)) + return 1; + + memcpy(registre + camp->AccumulatedBytes, hMiraMonLayer->szStringToOperate, + camp->BytesPerField); + return 0; +} + +static int MMAddFeatureRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature, + struct MMAdmDatabase *pMMAdmDB, + char *pszRecordOnCourse, + struct MM_FLUSH_INFO *pFlushRecList, + MM_EXT_DBF_N_RECORDS *nNumRecords, + MM_EXT_DBF_N_FIELDS nNumPrivateMMField) +{ + MM_EXT_DBF_N_MULTIPLE_RECORDS nIRecord; + MM_EXT_DBF_N_FIELDS nIField; + struct MM_DATA_BASE_XP *pBD_XP = nullptr; + + if (!hMiraMonLayer) + return 1; + + if (!hMMFeature) + return 1; + + pBD_XP = pMMAdmDB->pMMBDXP; + for (nIRecord = 0; nIRecord < hMMFeature->nNumMRecords; nIRecord++) + { + for (nIField = 0; nIField < hMMFeature->pRecords[nIRecord].nNumField; + nIField++) + { + // A field with no valid value is written as blank + if (!hMMFeature->pRecords[nIRecord].pField[nIField].bIsValid) + { + memset( + pszRecordOnCourse + + pBD_XP->pField[nIField + nNumPrivateMMField] + .AccumulatedBytes, + ' ', + pBD_XP->pField[nIField + nNumPrivateMMField].BytesPerField); + + continue; + } + if (pBD_XP->pField[nIField + nNumPrivateMMField].FieldType == 'C') + { + if (MMWriteValueToRecordDBXP(hMiraMonLayer, pszRecordOnCourse, + pBD_XP->pField + nIField + + nNumPrivateMMField, + hMMFeature->pRecords[nIRecord] + .pField[nIField] + .pDinValue, + FALSE)) + return 1; + } + else if (pBD_XP->pField[nIField + nNumPrivateMMField].FieldType == + 'N') + { + if (pBD_XP->pField[nIField + nNumPrivateMMField].Is64) + { + if (MMWriteValueToRecordDBXP( + hMiraMonLayer, pszRecordOnCourse, + pBD_XP->pField + nIField + nNumPrivateMMField, + &hMMFeature->pRecords[nIRecord] + .pField[nIField] + .iValue, + TRUE)) + return 1; + } + else + { + if (MMWriteValueToRecordDBXP( + hMiraMonLayer, pszRecordOnCourse, + pBD_XP->pField + nIField + nNumPrivateMMField, + &hMMFeature->pRecords[nIRecord] + .pField[nIField] + .dValue, + FALSE)) + return 1; + } + } + else if (pBD_XP->pField[nIField + nNumPrivateMMField].FieldType == + 'D') + { + if (MMWriteValueToRecordDBXP(hMiraMonLayer, pszRecordOnCourse, + pBD_XP->pField + nIField + + nNumPrivateMMField, + hMMFeature->pRecords[nIRecord] + .pField[nIField] + .pDinValue, + FALSE)) + return 1; + } + } + + if (MMAppendBlockToBuffer(pFlushRecList)) + return 1; + + (*nNumRecords)++; + } + return 0; +} + +// Adds feature records to a MiraMon database associated with a vector layer. +static int MMDetectAndFixDBFWidthChange( + struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature, struct MMAdmDatabase *pMMAdmDB, + MM_EXT_DBF_N_FIELDS nNumPrivateMMField, + MM_EXT_DBF_N_MULTIPLE_RECORDS nIRecord, MM_EXT_DBF_N_FIELDS nIField) +{ + if (!hMiraMonLayer) + return 1; + + if (!hMMFeature) + return 1; + + if (nIRecord >= hMMFeature->nNumMRecords) + return 1; + + if (nIField >= hMMFeature->pRecords[nIRecord].nNumField) + return 1; + + if (MMTestAndFixValueToRecordDBXP( + hMiraMonLayer, pMMAdmDB, nIField + nNumPrivateMMField, + hMMFeature->pRecords[nIRecord].pField[nIField].pDinValue)) + return 1; + + // We analyze next fields + if (nIField == hMMFeature->pRecords[nIRecord].nNumField - 1) + { + if (nIRecord + 1 < hMMFeature->nNumMRecords) + { + if (MMDetectAndFixDBFWidthChange(hMiraMonLayer, hMMFeature, + pMMAdmDB, nNumPrivateMMField, + nIRecord + 1, 0)) + return 1; + } + else + return 0; + } + else + { + if (nIField + 1 < hMMFeature->pRecords[nIRecord].nNumField) + { + if (MMDetectAndFixDBFWidthChange(hMiraMonLayer, hMMFeature, + pMMAdmDB, nNumPrivateMMField, + nIRecord, nIField + 1)) + return 1; + } + else + return 0; + } + return 0; +} // End of MMDetectAndFixDBFWidthChange() + +// Adds a DBF record to a MiraMon table associated with a vector layer. +// It sets up flush settings for writing to the table and initializes +// variables needed for the process. Then, it checks and fixes the width +// change if necessary. +int MMAddDBFRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature) +{ + struct MM_DATA_BASE_XP *pBD_XP = nullptr; + MM_EXT_DBF_N_FIELDS nNumPrivateMMField = 0; + struct MM_FLUSH_INFO *pFlushRecList; + + if (!hMiraMonLayer) + return MM_FATAL_ERROR_WRITING_FEATURES; + + pBD_XP = hMiraMonLayer->MMAdmDBWriting.pMMBDXP; + + // Test length + if (hMMFeature && hMMFeature->nNumMRecords && + hMMFeature->pRecords[0].nNumField) + { + if (MMDetectAndFixDBFWidthChange(hMiraMonLayer, hMMFeature, + &hMiraMonLayer->MMAdmDBWriting, + nNumPrivateMMField, 0, 0)) + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // Adding record to the MiraMon table (extended DBF) + // Flush settings + pFlushRecList = &hMiraMonLayer->MMAdmDBWriting.FlushRecList; + pFlushRecList->pBlockWhereToSaveOrRead = + (void *)hMiraMonLayer->MMAdmDBWriting.pRecList; + + pFlushRecList->pBlockToBeSaved = + (void *)hMiraMonLayer->MMAdmDBWriting.szRecordOnCourse; + pFlushRecList->SizeOfBlockToBeSaved = pBD_XP->BytesPerRecord; + + if (MMAddFeatureRecordToMMDB( + hMiraMonLayer, hMMFeature, &hMiraMonLayer->MMAdmDBWriting, + hMiraMonLayer->MMAdmDBWriting.szRecordOnCourse, pFlushRecList, + &hMiraMonLayer->MMAdmDBWriting.pMMBDXP->nRecords, + nNumPrivateMMField)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // In this case, the number of features is also updated + hMiraMonLayer->TopHeader.nElemCount = + hMiraMonLayer->MMAdmDBWriting.pMMBDXP->nRecords; + + return MM_CONTINUE_WRITING_FEATURES; +} + +// Adds a point record to a MiraMon table associated with a vector layer. +int MMAddPointRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature, + MM_INTERNAL_FID nElemCount) +{ + struct MM_DATA_BASE_XP *pBD_XP = nullptr; + MM_EXT_DBF_N_FIELDS nNumPrivateMMField = MM_PRIVATE_POINT_DB_FIELDS; + struct MM_FLUSH_INFO *pFlushRecList; + + if (!hMiraMonLayer) + return MM_FATAL_ERROR_WRITING_FEATURES; + + if (!hMMFeature) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // In V1.1 only _UI32_MAX records number is allowed + if (MMCheckVersionForFID(hMiraMonLayer, + hMiraMonLayer->MMPoint.MMAdmDB.pMMBDXP->nRecords + + hMMFeature->nNumMRecords)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in MMCheckVersionForFID() (6)"); + return MM_STOP_WRITING_FEATURES; + } + + pBD_XP = hMiraMonLayer->MMPoint.MMAdmDB.pMMBDXP; + + // Test length + // Private fields + // ID_GRAFIC + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField, + &nElemCount, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, + &hMiraMonLayer->MMPoint.MMAdmDB, 0, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // GDAL fields + if (hMMFeature->nNumMRecords && hMMFeature->pRecords[0].nNumField) + { + if (MMDetectAndFixDBFWidthChange(hMiraMonLayer, hMMFeature, + &hMiraMonLayer->MMPoint.MMAdmDB, + nNumPrivateMMField, 0, 0)) + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // Now length is sure, write + memset(hMiraMonLayer->MMPoint.MMAdmDB.szRecordOnCourse, 0, + pBD_XP->BytesPerRecord); + MMWriteValueToRecordDBXP(hMiraMonLayer, + hMiraMonLayer->MMPoint.MMAdmDB.szRecordOnCourse, + pBD_XP->pField, &nElemCount, TRUE); + + // Adding record to the MiraMon table (extended DBF) + // Flush settings + pFlushRecList = &hMiraMonLayer->MMPoint.MMAdmDB.FlushRecList; + pFlushRecList->pBlockWhereToSaveOrRead = + (void *)hMiraMonLayer->MMPoint.MMAdmDB.pRecList; + + pFlushRecList->pBlockToBeSaved = + (void *)hMiraMonLayer->MMPoint.MMAdmDB.szRecordOnCourse; + pFlushRecList->SizeOfBlockToBeSaved = pBD_XP->BytesPerRecord; + + if (MMAddFeatureRecordToMMDB( + hMiraMonLayer, hMMFeature, &hMiraMonLayer->MMPoint.MMAdmDB, + hMiraMonLayer->MMPoint.MMAdmDB.szRecordOnCourse, pFlushRecList, + &hMiraMonLayer->MMPoint.MMAdmDB.pMMBDXP->nRecords, + nNumPrivateMMField)) + return MM_FATAL_ERROR_WRITING_FEATURES; + return MM_CONTINUE_WRITING_FEATURES; +} + +// Adds a stringline record to a MiraMon table associated with a vector layer. +int MMAddArcRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature, + MM_INTERNAL_FID nElemCount, struct MM_AH *pArcHeader) +{ + struct MM_DATA_BASE_XP *pBD_XP = nullptr; + struct MiraMonArcLayer *pMMArcLayer; + MM_EXT_DBF_N_FIELDS nNumPrivateMMField = MM_PRIVATE_ARC_DB_FIELDS; + struct MM_FLUSH_INFO *pFlushRecList; + + if (!hMiraMonLayer) + return MM_FATAL_ERROR_WRITING_FEATURES; + + if (hMiraMonLayer->bIsPolygon) + pMMArcLayer = &hMiraMonLayer->MMPolygon.MMArc; + else + pMMArcLayer = &hMiraMonLayer->MMArc; + + // In V1.1 only _UI32_MAX records number is allowed + if (hMiraMonLayer->bIsPolygon) + { + if (MMCheckVersionForFID(hMiraMonLayer, + pMMArcLayer->MMAdmDB.pMMBDXP->nRecords + 1)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in MMCheckVersionForFID() (7)"); + return MM_STOP_WRITING_FEATURES; + } + } + else + { + if (MMCheckVersionForFID(hMiraMonLayer, + pMMArcLayer->MMAdmDB.pMMBDXP->nRecords + + hMMFeature->nNumMRecords)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in MMCheckVersionForFID() (8)"); + return MM_STOP_WRITING_FEATURES; + } + } + + pBD_XP = pMMArcLayer->MMAdmDB.pMMBDXP; + + // Test length + // Private fields + // ID_GRAFIC + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField, + &nElemCount, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, &pMMArcLayer->MMAdmDB, 0, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // N_VERTEXS + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField + 1, + &pArcHeader->nElemCount, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, &pMMArcLayer->MMAdmDB, 1, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // LENGTH + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField + 2, + &pArcHeader->dfLength, FALSE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, &pMMArcLayer->MMAdmDB, 2, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // NODE_INI + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField + 3, + &pArcHeader->nFirstIdNode, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, &pMMArcLayer->MMAdmDB, 3, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // NODE_FI + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField + 4, + &pArcHeader->nLastIdNode, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, &pMMArcLayer->MMAdmDB, 4, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // GDAL fields + if (!hMiraMonLayer->bIsPolygon) + { + if (hMMFeature->nNumMRecords && hMMFeature->pRecords[0].nNumField) + { + if (MMDetectAndFixDBFWidthChange(hMiraMonLayer, hMMFeature, + &pMMArcLayer->MMAdmDB, + nNumPrivateMMField, 0, 0)) + return MM_FATAL_ERROR_WRITING_FEATURES; + } + } + + // Now length is sure, write + memset(pMMArcLayer->MMAdmDB.szRecordOnCourse, 0, pBD_XP->BytesPerRecord); + MMWriteValueToRecordDBXP(hMiraMonLayer, + pMMArcLayer->MMAdmDB.szRecordOnCourse, + pBD_XP->pField, &nElemCount, TRUE); + + MMWriteValueToRecordDBXP(hMiraMonLayer, + pMMArcLayer->MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 1, &pArcHeader->nElemCount, TRUE); + + MMWriteValueToRecordDBXP(hMiraMonLayer, + pMMArcLayer->MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 2, &pArcHeader->dfLength, FALSE); + + MMWriteValueToRecordDBXP( + hMiraMonLayer, pMMArcLayer->MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 3, &pArcHeader->nFirstIdNode, TRUE); + + MMWriteValueToRecordDBXP( + hMiraMonLayer, pMMArcLayer->MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 4, &pArcHeader->nLastIdNode, TRUE); + + // Adding record to the MiraMon table (extended DBF) + // Flush settings + pFlushRecList = &pMMArcLayer->MMAdmDB.FlushRecList; + pFlushRecList->pBlockWhereToSaveOrRead = + (void *)pMMArcLayer->MMAdmDB.pRecList; + + pFlushRecList->SizeOfBlockToBeSaved = pBD_XP->BytesPerRecord; + pFlushRecList->pBlockToBeSaved = + (void *)pMMArcLayer->MMAdmDB.szRecordOnCourse; + + if (hMiraMonLayer->bIsPolygon) + { + if (MMAppendBlockToBuffer(pFlushRecList)) + return MM_FATAL_ERROR_WRITING_FEATURES; + pMMArcLayer->MMAdmDB.pMMBDXP->nRecords++; + return MM_CONTINUE_WRITING_FEATURES; + } + + pFlushRecList->SizeOfBlockToBeSaved = pBD_XP->BytesPerRecord; + if (MMAddFeatureRecordToMMDB( + hMiraMonLayer, hMMFeature, &pMMArcLayer->MMAdmDB, + pMMArcLayer->MMAdmDB.szRecordOnCourse, pFlushRecList, + &pMMArcLayer->MMAdmDB.pMMBDXP->nRecords, nNumPrivateMMField)) + return MM_FATAL_ERROR_WRITING_FEATURES; + return MM_CONTINUE_WRITING_FEATURES; +} + +// Adds a node record to a MiraMon table associated with a vector layer. +int MMAddNodeRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_INTERNAL_FID nElemCount, struct MM_NH *pNodeHeader) +{ + struct MM_DATA_BASE_XP *pBD_XP = nullptr; + struct MiraMonNodeLayer *pMMNodeLayer; + double nDoubleValue; + + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->bIsPolygon) + pMMNodeLayer = &hMiraMonLayer->MMPolygon.MMArc.MMNode; + else + pMMNodeLayer = &hMiraMonLayer->MMArc.MMNode; + + if (!pMMNodeLayer) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in pMMNodeLayer() (1)"); + return MM_STOP_WRITING_FEATURES; + } + + // In V1.1 only _UI32_MAX records number is allowed + if (MMCheckVersionForFID(hMiraMonLayer, + pMMNodeLayer->MMAdmDB.pMMBDXP->nRecords + 1)) + { + MMCPLError(CE_Failure, CPLE_NotSupported, + "Error in MMCheckVersionForFID() (9)"); + return MM_STOP_WRITING_FEATURES; + } + + // Test length + // Private fields + // ID_GRAFIC + if (MMWriteValueToszStringToOperate(hMiraMonLayer, + pMMNodeLayer->MMAdmDB.pMMBDXP->pField, + &nElemCount, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, &pMMNodeLayer->MMAdmDB, 0, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // ARCS_A_NOD + nDoubleValue = pNodeHeader->nArcsCount; + if (MMWriteValueToszStringToOperate( + hMiraMonLayer, pMMNodeLayer->MMAdmDB.pMMBDXP->pField + 1, + &nDoubleValue, FALSE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, &pMMNodeLayer->MMAdmDB, 1, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // TIPUS_NODE + nDoubleValue = pNodeHeader->cNodeType; + if (MMWriteValueToszStringToOperate( + hMiraMonLayer, pMMNodeLayer->MMAdmDB.pMMBDXP->pField + 2, + &nDoubleValue, FALSE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, &pMMNodeLayer->MMAdmDB, 2, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // Adding record to the MiraMon table (extended DBF) + // Flush settings + pMMNodeLayer->MMAdmDB.FlushRecList.pBlockWhereToSaveOrRead = + (void *)pMMNodeLayer->MMAdmDB.pRecList; + + pBD_XP = pMMNodeLayer->MMAdmDB.pMMBDXP; + + pMMNodeLayer->MMAdmDB.FlushRecList.SizeOfBlockToBeSaved = + pBD_XP->BytesPerRecord; + pMMNodeLayer->MMAdmDB.FlushRecList.pBlockToBeSaved = + (void *)pMMNodeLayer->MMAdmDB.szRecordOnCourse; + + memset(pMMNodeLayer->MMAdmDB.szRecordOnCourse, 0, pBD_XP->BytesPerRecord); + MMWriteValueToRecordDBXP(hMiraMonLayer, + pMMNodeLayer->MMAdmDB.szRecordOnCourse, + pBD_XP->pField, &nElemCount, TRUE); + + nDoubleValue = pNodeHeader->nArcsCount; + MMWriteValueToRecordDBXP(hMiraMonLayer, + pMMNodeLayer->MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 1, &nDoubleValue, FALSE); + + nDoubleValue = pNodeHeader->cNodeType; + MMWriteValueToRecordDBXP(hMiraMonLayer, + pMMNodeLayer->MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 2, &nDoubleValue, FALSE); + + if (MMAppendBlockToBuffer(&pMMNodeLayer->MMAdmDB.FlushRecList)) + return MM_FATAL_ERROR_WRITING_FEATURES; + pMMNodeLayer->MMAdmDB.pMMBDXP->nRecords++; + return MM_CONTINUE_WRITING_FEATURES; +} + +// Adds a polygon or multipolygon record to a MiraMon table +// associated with a vector layer. +int MMAddPolygonRecordToMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMMFeature, + MM_INTERNAL_FID nElemCount, + MM_N_VERTICES_TYPE nVerticesCount, + struct MM_PH *pPolHeader) +{ + struct MM_DATA_BASE_XP *pBD_XP = nullptr; + MM_EXT_DBF_N_FIELDS nNumPrivateMMField = MM_PRIVATE_POLYGON_DB_FIELDS; + struct MM_FLUSH_INFO *pFlushRecList; + + if (!hMiraMonLayer) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // In V1.1 only _UI32_MAX records number is allowed + if (MMCheckVersionForFID( + hMiraMonLayer, hMiraMonLayer->MMPolygon.MMAdmDB.pMMBDXP->nRecords + + (hMMFeature ? hMMFeature->nNumMRecords : 0))) + return MM_STOP_WRITING_FEATURES; + + pBD_XP = hMiraMonLayer->MMPolygon.MMAdmDB.pMMBDXP; + + // Test length + // Private fields + // ID_GRAFIC + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField, + &nElemCount, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMAdmDB, 0, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // The other fields are valid if pPolHeader exists (it is not + // the universal polygon) + if (pPolHeader) + { + // N_VERTEXS + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField + 1, + &nVerticesCount, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMAdmDB, 1, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // PERIMETER + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField + 2, + &pPolHeader->dfPerimeter, FALSE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMAdmDB, 2, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // AREA + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField + 3, + &pPolHeader->dfArea, FALSE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMAdmDB, 3, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // N_ARCS + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField + 4, + &pPolHeader->nArcsCount, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMAdmDB, 4, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + // N_POLIG + if (MMWriteValueToszStringToOperate(hMiraMonLayer, pBD_XP->pField + 5, + &pPolHeader->nRingsCount, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + if (MMTestAndFixValueToRecordDBXP(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMAdmDB, 5, + hMiraMonLayer->szStringToOperate)) + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // GDAL fields + if (hMMFeature && hMMFeature->nNumMRecords && + hMMFeature->pRecords[0].nNumField) + { + if (MMDetectAndFixDBFWidthChange(hMiraMonLayer, hMMFeature, + &hMiraMonLayer->MMPolygon.MMAdmDB, + nNumPrivateMMField, 0, 0)) + return MM_FATAL_ERROR_WRITING_FEATURES; + } + + // Adding record to the MiraMon table (extended DBF) + // Flush settings + pFlushRecList = &hMiraMonLayer->MMPolygon.MMAdmDB.FlushRecList; + pFlushRecList->pBlockWhereToSaveOrRead = + (void *)hMiraMonLayer->MMPolygon.MMAdmDB.pRecList; + + pFlushRecList->SizeOfBlockToBeSaved = pBD_XP->BytesPerRecord; + pFlushRecList->pBlockToBeSaved = + (void *)hMiraMonLayer->MMPolygon.MMAdmDB.szRecordOnCourse; + + // Now length is sure, write + memset(hMiraMonLayer->MMPolygon.MMAdmDB.szRecordOnCourse, ' ', + pBD_XP->BytesPerRecord); + if (MMWriteValueToRecordDBXP( + hMiraMonLayer, hMiraMonLayer->MMPolygon.MMAdmDB.szRecordOnCourse, + pBD_XP->pField, &nElemCount, TRUE)) + return MM_FATAL_ERROR_WRITING_FEATURES; + + if (!hMMFeature) + { + if (MMAppendBlockToBuffer(pFlushRecList)) + return MM_FATAL_ERROR_WRITING_FEATURES; + hMiraMonLayer->MMPolygon.MMAdmDB.pMMBDXP->nRecords++; + return MM_CONTINUE_WRITING_FEATURES; + } + + if (pPolHeader) + { + MMWriteValueToRecordDBXP( + hMiraMonLayer, hMiraMonLayer->MMPolygon.MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 1, &nVerticesCount, TRUE); + + MMWriteValueToRecordDBXP( + hMiraMonLayer, hMiraMonLayer->MMPolygon.MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 2, &pPolHeader->dfPerimeter, FALSE); + + MMWriteValueToRecordDBXP( + hMiraMonLayer, hMiraMonLayer->MMPolygon.MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 3, &pPolHeader->dfArea, FALSE); + + MMWriteValueToRecordDBXP( + hMiraMonLayer, hMiraMonLayer->MMPolygon.MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 4, &pPolHeader->nArcsCount, TRUE); + + MMWriteValueToRecordDBXP( + hMiraMonLayer, hMiraMonLayer->MMPolygon.MMAdmDB.szRecordOnCourse, + pBD_XP->pField + 5, &pPolHeader->nRingsCount, TRUE); + } + + pFlushRecList->SizeOfBlockToBeSaved = pBD_XP->BytesPerRecord; + if (MMAddFeatureRecordToMMDB( + hMiraMonLayer, hMMFeature, &hMiraMonLayer->MMPolygon.MMAdmDB, + hMiraMonLayer->MMPolygon.MMAdmDB.szRecordOnCourse, pFlushRecList, + &hMiraMonLayer->MMPolygon.MMAdmDB.pMMBDXP->nRecords, + nNumPrivateMMField)) + return MM_FATAL_ERROR_WRITING_FEATURES; + return MM_CONTINUE_WRITING_FEATURES; +} + +// Close the MiraMon database associated with a vector layer. +static int MMCloseMMBD_XPFile(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MMAdmDatabase *MMAdmDB) +{ + int ret_code = 1; + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) + { + if (!MMAdmDB->pFExtDBF) + { + // In case of 0 elements created we have to + // create an empty DBF + if (hMiraMonLayer->bIsPolygon) + { + if (hMiraMonLayer->TopHeader.nElemCount <= 1) + { + if (MMCreateMMDB(hMiraMonLayer)) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMCreateMMDB())"); + goto end_label; + } + } + } + else if (hMiraMonLayer->bIsPoint || hMiraMonLayer->bIsArc) + { + if (hMiraMonLayer->TopHeader.nElemCount == 0) + { + if (MMCreateMMDB(hMiraMonLayer)) + { + MMCPLError(CE_Failure, CPLE_OutOfMemory, + "Memory error in MiraMon " + "driver (MMCreateMMDB())"); + goto end_label; + } + } + } + } + + if (MM_WriteNRecordsMMBD_XPFile(MMAdmDB)) + goto end_label; + + // Flushing all to be flushed + MMAdmDB->FlushRecList.SizeOfBlockToBeSaved = 0; + if (MMAppendBlockToBuffer(&MMAdmDB->FlushRecList)) + goto end_label; + } + + ret_code = 0; +end_label: + // Closing database files + fclose_and_nullify(&MMAdmDB->pFExtDBF); + + return ret_code; +} + +int MMCloseMMBD_XP(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + int ret_code = 0; + if (!hMiraMonLayer) + return 1; + + if (hMiraMonLayer->pMMBDXP) + { + fclose_and_nullify(&hMiraMonLayer->pMMBDXP->pfDataBase); + } + + if (hMiraMonLayer->bIsPoint) + ret_code = + MMCloseMMBD_XPFile(hMiraMonLayer, &hMiraMonLayer->MMPoint.MMAdmDB); + else if (hMiraMonLayer->bIsArc && !hMiraMonLayer->bIsPolygon) + { + if (MMCloseMMBD_XPFile(hMiraMonLayer, &hMiraMonLayer->MMArc.MMAdmDB)) + ret_code = 1; + if (MMCloseMMBD_XPFile(hMiraMonLayer, + &hMiraMonLayer->MMArc.MMNode.MMAdmDB)) + ret_code = 1; + } + else if (hMiraMonLayer->bIsPolygon) + { + if (MMCloseMMBD_XPFile(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMAdmDB)) + ret_code = 1; + if (MMCloseMMBD_XPFile(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMArc.MMAdmDB)) + ret_code = 1; + if (MMCloseMMBD_XPFile(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMArc.MMNode.MMAdmDB)) + ret_code = 1; + } + else if (hMiraMonLayer->bIsDBF) + ret_code = + MMCloseMMBD_XPFile(hMiraMonLayer, &hMiraMonLayer->MMAdmDBWriting); + + return ret_code; +} + +// Destroys the memory used to create a MiraMon table associated +// with a vector layer. +static void MMDestroyMMDBFile(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MMAdmDatabase *pMMAdmDB) +{ + if (!hMiraMonLayer) + return; + + if (pMMAdmDB && pMMAdmDB->szRecordOnCourse) + { + free_function(pMMAdmDB->szRecordOnCourse); + pMMAdmDB->szRecordOnCourse = nullptr; + } + if (hMiraMonLayer->szStringToOperate) + { + free_function(hMiraMonLayer->szStringToOperate); + hMiraMonLayer->szStringToOperate = nullptr; + hMiraMonLayer->nNumStringToOperate = 0; + } + + if (pMMAdmDB && pMMAdmDB->pMMBDXP) + { + MM_ReleaseDBFHeader(pMMAdmDB->pMMBDXP); + hMiraMonLayer->pMMBDXP = pMMAdmDB->pMMBDXP = nullptr; + } + if (pMMAdmDB && pMMAdmDB->pRecList) + { + free_function(pMMAdmDB->pRecList); + pMMAdmDB->pRecList = nullptr; + } +} + +void MMDestroyMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer) +{ + if (!hMiraMonLayer) + return; + + if (hMiraMonLayer->bIsPoint) + { + MMDestroyMMDBFile(hMiraMonLayer, &hMiraMonLayer->MMPoint.MMAdmDB); + return; + } + if (hMiraMonLayer->bIsArc && !hMiraMonLayer->bIsPolygon) + { + MMDestroyMMDBFile(hMiraMonLayer, &hMiraMonLayer->MMArc.MMAdmDB); + MMDestroyMMDBFile(hMiraMonLayer, &hMiraMonLayer->MMArc.MMNode.MMAdmDB); + return; + } + if (hMiraMonLayer->bIsPolygon) + { + MMDestroyMMDBFile(hMiraMonLayer, &hMiraMonLayer->MMPolygon.MMAdmDB); + MMDestroyMMDBFile(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMArc.MMAdmDB); + MMDestroyMMDBFile(hMiraMonLayer, + &hMiraMonLayer->MMPolygon.MMArc.MMNode.MMAdmDB); + } + if (hMiraMonLayer->bIsDBF) + MMDestroyMMDBFile(hMiraMonLayer, &hMiraMonLayer->MMAdmDBWriting); +} +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif diff --git a/ogr/ogrsf_frmts/miramon/mm_wrlayr.h b/ogr/ogrsf_frmts/miramon/mm_wrlayr.h new file mode 100644 index 000000000000..a1d7bbea6307 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/mm_wrlayr.h @@ -0,0 +1,222 @@ +#ifndef __MM_WRLAYR_H +#define __MM_WRLAYR_H + +/* -------------------------------------------------------------------- */ +/* Necessary functions to read/write a MiraMon Vector File */ +/* -------------------------------------------------------------------- */ + +#include "mm_gdal_driver_structs.h" +#ifndef GDAL_COMPILATION +#include "gdalmmf.h" // For PTR_MM_CPLRecode, ptr_MM_CPLRecode(), ... +#else +#include "ogr_api.h" // For OGRLayerH +CPL_C_START // Necessary for compiling in GDAL project +#endif + +#ifndef GDAL_COMPILATION +#include "memo.h" +#include "F64_str.h" //For FILE_64 +#include "FILE_64.h" // Per a fseek_64(),... +#include "bd_xp.h" //For MAX_LON_CAMP_DBF +#include "deftoler.h" // For QUASI_IGUAL +//#include "LbTopStr.h" // For struct GEOMETRIC_I_TOPOLOGIC_POL +//#include "str_snyd.h" // For struct SNY_TRANSFORMADOR_GEODESIA +#include "nomsfitx.h" // Per a CanviaExtensio() +#include "fitxers.h" // Per a removeAO() +#include "cadenes.h" // Per a EsCadenaDeBlancs() +#define calloc_function(a) MM_calloc((a)) +#define realloc_function MM_realloc +#define free_function(a) MM_free((a)) +#define fopen_function(f, a) fopenAO_64((f), (a)) +#define fflush_function fflush_64 +#define fclose_function(f) fclose_64((f)) +#define ftell_function(f) ftell_64((f)) +#define fwrite_function(p, s, r, f) fwrite_64((p), (s), (r), (f)) +#define fread_function(p, s, r, f) fread_64((p), (s), (r), (f)) +#define fseek_function(f, s, g) fseek_64((f), (s), (g)) +#define TruncateFile_function(a, b) TruncaFitxer_64((a), (b)) +#define strdup_function(p) strdup((p)) +#define get_filename_function TreuAdreca +#define get_path_function DonaAdreca +#define fprintf_function fprintf_64 +#define max_function(a, b) max((a), (b)) +#define get_extension_function(a) extensio(a) +#define reset_extension(a, b) CanviaExtensio((a), (b)) +#define remove_function(a) removeAO((a)) +#define OGR_F_GetFieldAsString_function(a, b) \ + ptr_MM_OGR_F_GetFieldAsString((a), (b)) +#define OGR_F_Destroy_function(a) ptr_MM_OGR_F_Destroy((a)) +#define GDALClose_function(a) ptr_MM_GDALClose((a)) +#define OGR_Fld_GetNameRef_function(a) ptr_MM_OGR_Fld_GetNameRef((a)) +#define OGR_FD_GetFieldDefn_function(a, b) ptr_MM_OGR_FD_GetFieldDefn((a), (b)) +#define GDALOpenEx_function(a, b, c, d, e) \ + ptr_MM_GDALOpenEx((a), (b), (c), (d), (e)) +#define OGR_FD_GetFieldCount_function(a) ptr_MM_OGR_FD_GetFieldCount((a)) +#define OGR_L_GetLayerDefn_function(a) ptr_MM_OGR_L_GetLayerDefn((a)) +#define OGR_L_GetNextFeature_function(a) ptr_MM_OGR_L_GetNextFeature((a)) +#define OGR_L_ResetReading_function(a) ptr_MM_OGR_L_ResetReading((a)) +#define GDALDatasetGetLayer_function(a, b) ptr_MM_GDALDatasetGetLayer((a), (b)) +#define CPLRecode_function(a, b, c) ptr_MM_CPLRecode((a), (b), (c)) +#define CPLFree_function(a) ptr_MM_CPLFree((a)) +#define form_filename_function(a, b) MuntaPath((a), (b), TRUE) +#define MM_CPLGetBasename(a) TreuAdreca((a)) +#define MM_IsNANDouble(x) EsNANDouble((x)) +#define MM_IsDoubleInfinite(x) EsDoubleInfinit((x)) +#else +#define calloc_function(a) VSICalloc(1, (a)) +#define realloc_function VSIRealloc +#define free_function(a) VSIFree((a)) +#define fopen_function(f, a) VSIFOpenL((f), (a)) +#define fflush_function VSIFFlushL +#define fclose_function(f) VSIFCloseL((f)) +#define ftell_function(f) VSIFTellL((f)) +#define fwrite_function(p, s, r, f) VSIFWriteL((p), (s), (r), (f)) +#define fread_function(p, s, r, f) VSIFReadL((p), (s), (r), (f)) +#define fseek_function(f, s, g) VSIFSeekL((f), (s), (g)) +#define TruncateFile_function(a, b) VSIFTruncateL((a), (b)) +#define strdup_function(p) CPLStrdup((p)) +#define get_filename_function CPLGetFilename +#define get_path_function CPLGetPath +#define fprintf_function VSIFPrintfL +#define max_function(a, b) MAX((a), (b)) +#define get_extension_function(a) CPLGetExtension((a)) +#define reset_extension(a, b) CPLResetExtension((a), (b)) +#define remove_function(a) VSIUnlink((a)) +#define OGR_F_GetFieldAsString_function(a, b) OGR_F_GetFieldAsString((a), (b)) +#define OGR_F_Destroy_function(a) OGR_F_Destroy((a)) +#define GDALClose_function(a) GDALClose((a)) +#define OGR_Fld_GetNameRef_function(a) OGR_Fld_GetNameRef((a)) +#define OGR_FD_GetFieldDefn_function(a, b) OGR_FD_GetFieldDefn((a), (b)) +#define GDALOpenEx_function(a, b, c, d, e) GDALOpenEx((a), (b), (c), (d), (e)) +#define OGR_FD_GetFieldCount_function(a) OGR_FD_GetFieldCount((a)) +#define OGR_L_GetLayerDefn_function(a) OGR_L_GetLayerDefn((a)) +#define OGR_L_GetNextFeature_function(a) OGR_L_GetNextFeature((a)) +#define OGR_L_ResetReading_function(a) OGR_L_ResetReading((a)) +#define GDALDatasetGetLayer_function(a, b) GDALDatasetGetLayer((a), (b)) +#define CPLRecode_function(a, b, c) CPLRecode((a), (b), (c)) +#define CPLFree_function(a) CPLFree((a)) +#define form_filename_function(a, b) CPLFormFilename((a), (b), "") +#define MM_CPLGetBasename(a) CPLGetBasename((a)) +#define MM_IsNANDouble(x) CPLIsNan((x)) +#define MM_IsDoubleInfinite(x) CPLIsInf((x)) +#endif + +/* -------------------------------------------------------------------- */ +/* Functions */ +/* -------------------------------------------------------------------- */ +// MM-GDAL functions +#ifdef GDAL_COMPILATION +#define MMCPLError CPLError +#define MMCPLWarning CPLError +#define MMCPLDebug CPLDebugOnly +#else + void + MMCPLError(int code, const char *fmt, ...); +void MMCPLWarning(int code, const char *fmt, ...); +void MMCPLDebug(int code, const char *fmt, ...); +#endif + +// Layer functions +int MMInitLayer(struct MiraMonVectLayerInfo *hMiraMonLayer, + const char *pzFileName, int LayerVersion, char nMMRecode, + char nMMLanguage, struct MiraMonDataBase *pLayerDB, + MM_BOOLEAN ReadOrWrite, struct MiraMonVectMapInfo *MMMap); +int MMInitLayerByType(struct MiraMonVectLayerInfo *hMiraMonLayer); +int MMDestroyLayer(struct MiraMonVectLayerInfo *hMiraMonLayer); +int MMCloseLayer(struct MiraMonVectLayerInfo *hMiraMonLayer); +int MMReadHeader(FILE_TYPE *pF, struct MM_TH *pMMHeader); +void MMInitHeader(struct MM_TH *pMMHeader, int layerType, int nVersion); +int MMWriteEmptyHeader(FILE_TYPE *pF, int layerType, int nVersion); +int MMReadAHArcSection(struct MiraMonVectLayerInfo *hMiraMonLayer); +int MMReadPHPolygonSection(struct MiraMonVectLayerInfo *hMiraMonLayer); +int MMReadZDescriptionHeaders(struct MiraMonVectLayerInfo *hMiraMonLayer, + FILE_TYPE *pF, MM_INTERNAL_FID nElements, + struct MM_ZSection *pZSection); +int MMReadZSection(struct MiraMonVectLayerInfo *hMiraMonLayer, FILE_TYPE *pF, + struct MM_ZSection *pZSection); + +// Feature functions +int MMInitFeature(struct MiraMonFeature *MMFeature); +void MMResetFeatureGeometry(struct MiraMonFeature *MMFeature); +void MMResetFeatureRecord(struct MiraMonFeature *hMMFeature); +void MMDestroyFeature(struct MiraMonFeature *MMFeature); +int MMAddFeature(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MiraMonFeature *hMiraMonFeature); +int MMCheckSize_t(GUInt64 nCount, GUInt64 nSize); +int MMGetVectorVersion(struct MM_TH *pTopHeader); +int MMInitFlush(struct MM_FLUSH_INFO *pFlush, FILE_TYPE *pF, GUInt64 nBlockSize, + char **pBuffer, MM_FILE_OFFSET DiskOffsetWhereToFlush, + GInt32 nMyDiskSize); +int MMReadFlush(struct MM_FLUSH_INFO *pFlush); +int MMReadBlockFromBuffer(struct MM_FLUSH_INFO *FlushInfo); +int MMReadGUInt64DependingOnVersion(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MM_FLUSH_INFO *FlushInfo, + GUInt64 *pnUI64); +int MMReadOffsetDependingOnVersion(struct MiraMonVectLayerInfo *hMiraMonLayer, + struct MM_FLUSH_INFO *FlushInfo, + MM_FILE_OFFSET *nUI64); + +// Tool functions +char *MMReturnValueFromSectionINIFile(const char *filename, const char *section, + const char *key); + +// In order to be efficient we reserve space to reuse it every +// time a feature is added. +int MMResizeMiraMonFieldValue(struct MiraMonFieldValue **pFieldValue, + MM_EXT_DBF_N_FIELDS *nMax, + MM_EXT_DBF_N_FIELDS nNum, + MM_EXT_DBF_N_FIELDS nIncr, + MM_EXT_DBF_N_FIELDS nProposedMax); + +int MMResizeMiraMonPolygonArcs(struct MM_PAL_MEM **pFID, + MM_POLYGON_ARCS_COUNT *nMax, + MM_POLYGON_ARCS_COUNT nNum, + MM_POLYGON_ARCS_COUNT nIncr, + MM_POLYGON_ARCS_COUNT nProposedMax); + +int MMResizeMiraMonRecord(struct MiraMonRecord **pMiraMonRecord, + MM_EXT_DBF_N_MULTIPLE_RECORDS *nMax, + MM_EXT_DBF_N_MULTIPLE_RECORDS nNum, + MM_EXT_DBF_N_MULTIPLE_RECORDS nIncr, + MM_EXT_DBF_N_MULTIPLE_RECORDS nProposedMax); + +int MMResize_MM_N_VERTICES_TYPE_Pointer(MM_N_VERTICES_TYPE **pUI64, + MM_N_VERTICES_TYPE *nMax, + MM_N_VERTICES_TYPE nNum, + MM_N_VERTICES_TYPE nIncr, + MM_N_VERTICES_TYPE nProposedMax); + +int MMResizeVFGPointer(char **pInt, MM_INTERNAL_FID *nMax, MM_INTERNAL_FID nNum, + MM_INTERNAL_FID nIncr, MM_INTERNAL_FID nProposedMax); + +int MMResizeMM_POINT2DPointer(struct MM_POINT_2D **pPoint2D, + MM_N_VERTICES_TYPE *nMax, MM_N_VERTICES_TYPE nNum, + MM_N_VERTICES_TYPE nIncr, + MM_N_VERTICES_TYPE nProposedMax); + +int MMResizeDoublePointer(MM_COORD_TYPE **pDouble, MM_N_VERTICES_TYPE *nMax, + MM_N_VERTICES_TYPE nNum, MM_N_VERTICES_TYPE nIncr, + MM_N_VERTICES_TYPE nProposedMax); +int MMResizeStringToOperateIfNeeded(struct MiraMonVectLayerInfo *hMiraMonLayer, + MM_EXT_DBF_N_FIELDS nNewSize); +int MMIsEmptyString(const char *string); +int MMGetNFieldValue(const char *pszStringList, GUInt32 nIRecord, + char *pszPartOfRawValue, size_t nSizeOfRawValue); +// Metadata functions +int MMReturnCodeFromMM_m_idofic(char *pMMSRS_or_pSRS, char *result, + MM_BYTE direction); + +#define EPSG_FROM_MMSRS 0 +#define MMSRS_FROM_EPSG 1 +#define ReturnEPSGCodeSRSFromMMIDSRS(pMMSRS, szResult) \ + MMReturnCodeFromMM_m_idofic((pMMSRS), (szResult), EPSG_FROM_MMSRS) +#define ReturnMMIDSRSFromEPSGCodeSRS(pSRS, szResult) \ + MMReturnCodeFromMM_m_idofic((pSRS), (szResult), MMSRS_FROM_EPSG) + +int MMWriteVectorMetadata(struct MiraMonVectLayerInfo *hMiraMonLayer); +int MMCheck_REL_FILE(const char *szREL_file); + +#ifdef GDAL_COMPILATION +CPL_C_END // Necessary for compiling in GDAL project +#endif +#endif //__MM_WRLAYR_H diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramon.h b/ogr/ogrsf_frmts/miramon/ogrmiramon.h new file mode 100644 index 000000000000..a57f6570db97 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/ogrmiramon.h @@ -0,0 +1,169 @@ +/****************************************************************************** + * $Id$ + * + * Project: OpenGIS Simple Features Reference Implementation + * Purpose: C++ classes for the MiraMon driver + * Author: Abel Pau + ****************************************************************************** + * Copyright (c) 2024, Xavier Pons + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + ****************************************************************************/ + +#ifndef OGRMIRAMON_H_INCLUDED +#define OGRMIRAMON_H_INCLUDED + +#include "ogrsf_frmts.h" +#include "ogr_api.h" +#include "cpl_string.h" +#include "mm_wrlayr.h" + +/************************************************************************/ +/* OGRMiraMonLayer */ +/************************************************************************/ + +class OGRMiraMonLayer final + : public OGRLayer, + public OGRGetNextFeatureThroughRaw<OGRMiraMonLayer> +{ + GDALDataset *m_poDS; + OGRSpatialReference *m_poSRS; + OGRFeatureDefn *m_poFeatureDefn; + + GUIntBig m_iNextFID; + + // Pointer to one of three possible MiraMon layers: points, + // arcs or polygons. Every time a feature is read this pointer + // points to the appropriate layer + struct MiraMonVectLayerInfo *phMiraMonLayer; + + // When writing a layer + struct MiraMonVectLayerInfo hMiraMonLayerPNT; // MiraMon points layer + struct MiraMonVectLayerInfo hMiraMonLayerARC; // MiraMon arcs layer + struct MiraMonVectLayerInfo hMiraMonLayerPOL; // MiraMon polygons layer + + // When reading a layer or the result of writing is only a DBF + struct MiraMonVectLayerInfo hMiraMonLayerReadOrNonGeom; + + struct MiraMonFeature hMMFeature; // Feature reading/writing + + bool m_bUpdate; + + VSILFILE *m_fp = nullptr; + + // Array of doubles used in the field features processing + double *padfValues; + GInt64 *pnInt64Values; + + OGRFeature *GetNextRawFeature(); + OGRFeature *GetFeature(GIntBig nFeatureId) override; + void GoToFieldOfMultipleRecord(MM_INTERNAL_FID iFID, + MM_EXT_DBF_N_RECORDS nIRecord, + MM_EXT_DBF_N_FIELDS nIField); + + OGRErr MMDumpVertices(OGRGeometryH hGeom, MM_BOOLEAN bExternalRing, + MM_BOOLEAN bUseVFG); + OGRErr MMProcessGeometry(OGRGeometryH poGeom, OGRFeature *poFeature, + MM_BOOLEAN bcalculateRecord); + OGRErr MMProcessMultiGeometry(OGRGeometryH hGeom, OGRFeature *poFeature); + OGRErr MMLoadGeometry(OGRGeometryH hGeom); + OGRErr MMWriteGeometry(); + GIntBig GetFeatureCount(int bForce) override; + + public: + bool bValidFile; + + OGRMiraMonLayer(GDALDataset *poDS, const char *pszFilename, VSILFILE *fp, + const OGRSpatialReference *poSRS, int bUpdate, + CSLConstList papszOpenOptions, + struct MiraMonVectMapInfo *MMMap); + virtual ~OGRMiraMonLayer(); + + void ResetReading() override; + DEFINE_GET_NEXT_FEATURE_THROUGH_RAW(OGRMiraMonLayer) + + OGRErr TranslateFieldsToMM(); + OGRErr TranslateFieldsValuesToMM(OGRFeature *poFeature); + OGRErr GetExtent(OGREnvelope *psExtent, int bForce) override; + + OGRFeatureDefn *GetLayerDefn() override; + + virtual OGRErr GetExtent(int iGeomField, OGREnvelope *psExtent, + int bForce) override + { + return OGRLayer::GetExtent(iGeomField, psExtent, bForce); + } + + OGRErr ICreateFeature(OGRFeature *poFeature) override; + + virtual OGRErr CreateField(const OGRFieldDefn *poField, + int bApproxOK = TRUE) override; + + int TestCapability(const char *) override; + void AddToFileList(CPLStringList &oFileList); + + GDALDataset *GetDataset() override + { + return m_poDS; + } +}; + +/************************************************************************/ +/* OGRMiraMonDataSource */ +/************************************************************************/ + +class OGRMiraMonDataSource final : public OGRDataSource +{ + OGRMiraMonLayer **papoLayers; + int nLayers; + char *pszRootName; + char *pszDSName; + bool bUpdate; + struct MiraMonVectMapInfo MMMap; + + public: + OGRMiraMonDataSource(); + ~OGRMiraMonDataSource(); + + int Open(const char *pszFilename, VSILFILE *fp, + const OGRSpatialReference *poSRS, int bUpdate, + CSLConstList papszOpenOptions); + int Create(const char *pszFilename, char **papszOptions); + + const char *GetName() override + { + return pszDSName; + } + + int GetLayerCount() override + { + return nLayers; + } + + OGRLayer *GetLayer(int) override; + char **GetFileList() override; + + OGRLayer *ICreateLayer(const char *pszLayerName, + const OGRGeomFieldDefn *poGeomFieldDefn, + CSLConstList papszOptions) override; + + int TestCapability(const char *) override; +}; + +#endif /* OGRMIRAMON_H_INCLUDED */ diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramondatasource.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramondatasource.cpp new file mode 100644 index 000000000000..92b2572c29dd --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/ogrmiramondatasource.cpp @@ -0,0 +1,291 @@ +/****************************************************************************** + * + * Project: OpenGIS Simple Features Reference Implementation + * Purpose: Implements OGRMiraMonDataSource class. + * Author: Abel Pau + ****************************************************************************** + * Copyright (c) 2024, Xavier Pons + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + ****************************************************************************/ + +#include "ogrmiramon.h" + +/****************************************************************************/ +/* OGRMiraMonDataSource() */ +/****************************************************************************/ +OGRMiraMonDataSource::OGRMiraMonDataSource() + : papoLayers(nullptr), nLayers(0), pszRootName(nullptr), pszDSName(nullptr), + bUpdate(false) + +{ + MMMap.nNumberOfLayers = 0; + MMMap.fMMMap = nullptr; +} + +/****************************************************************************/ +/* ~OGRMiraMonDataSource() */ +/****************************************************************************/ + +OGRMiraMonDataSource::~OGRMiraMonDataSource() + +{ + for (int i = 0; i < nLayers; i++) + delete papoLayers[i]; + CPLFree(papoLayers); + CPLFree(pszDSName); + CPLFree(pszRootName); + + if (MMMap.fMMMap) + VSIFCloseL(MMMap.fMMMap); +} + +/****************************************************************************/ +/* Open() */ +/****************************************************************************/ + +int OGRMiraMonDataSource::Open(const char *pszFilename, VSILFILE *fp, + const OGRSpatialReference *poSRS, int bUpdateIn, + CSLConstList papszOpenOptionsUsr) + +{ + bUpdate = CPL_TO_BOOL(bUpdateIn); + + OGRMiraMonLayer *poLayer = new OGRMiraMonLayer( + this, pszFilename, fp, poSRS, bUpdate, papszOpenOptionsUsr, &MMMap); + if (!poLayer->bValidFile) + { + delete poLayer; + return FALSE; + } + papoLayers = static_cast<OGRMiraMonLayer **>(CPLRealloc( + papoLayers, + (size_t)(sizeof(OGRMiraMonLayer *) * ((size_t)nLayers + (size_t)1)))); + papoLayers[nLayers] = poLayer; + nLayers++; + + if (pszDSName) + { + const char *pszExtension = CPLGetExtension(pszDSName); + if (!EQUAL(pszExtension, "pol") && !EQUAL(pszExtension, "arc") && + !EQUAL(pszExtension, "pnt")) + { + CPLStrlcpy( + MMMap.pszMapName, + CPLFormFilename(pszDSName, CPLGetBasename(pszDSName), "mmm"), + sizeof(MMMap.pszMapName)); + if (!MMMap.nNumberOfLayers) + { + MMMap.fMMMap = VSIFOpenL(MMMap.pszMapName, "w+"); + if (!MMMap.fMMMap) + { + // It could be an error but it is not so important + // to stop the process. This map is an extra element + // to open all layers in one click, at least in MiraMon + // software. + *MMMap.pszMapName = '\0'; + } + else + { + VSIFPrintfL(MMMap.fMMMap, "[VERSIO]\n"); + VSIFPrintfL(MMMap.fMMMap, "Vers=2\n"); + VSIFPrintfL(MMMap.fMMMap, "SubVers=0\n"); + VSIFPrintfL(MMMap.fMMMap, "variant=b\n"); + VSIFPrintfL(MMMap.fMMMap, "\n"); + VSIFPrintfL(MMMap.fMMMap, "[DOCUMENT]\n"); + VSIFPrintfL(MMMap.fMMMap, "Titol= %s(map)\n", + CPLGetBasename(poLayer->GetName())); + VSIFPrintfL(MMMap.fMMMap, "\n"); + } + } + } + else + *MMMap.pszMapName = '\0'; + } + else + *MMMap.pszMapName = '\0'; + + if (pszDSName) + CPLFree(pszDSName); + pszDSName = CPLStrdup(pszFilename); + + return TRUE; +} + +/****************************************************************************/ +/* Create() */ +/* */ +/* Create a new datasource. This does not really do anything */ +/* currently but save the name. */ +/****************************************************************************/ + +int OGRMiraMonDataSource::Create(const char *pszDataSetName, + char ** /* papszOptions */) + +{ + bUpdate = TRUE; + pszDSName = CPLStrdup(pszDataSetName); + pszRootName = CPLStrdup(pszDataSetName); + + return TRUE; +} + +/****************************************************************************/ +/* ICreateLayer() */ +/****************************************************************************/ + +OGRLayer * +OGRMiraMonDataSource::ICreateLayer(const char *pszLayerName, + const OGRGeomFieldDefn *poGeomFieldDefn, + CSLConstList papszOptions) +{ + CPLAssert(nullptr != pszLayerName); + + const auto eType = poGeomFieldDefn ? poGeomFieldDefn->GetType() : wkbNone; + const auto poSRS = + poGeomFieldDefn ? poGeomFieldDefn->GetSpatialRef() : nullptr; + + // It's a seed to be able to generate a random identifier in + // MMGenerateFileIdentifierFromMetadataFileName() function + srand((unsigned int)time(nullptr)); + + if (OGR_GT_HasM(eType)) + { + CPLError(CE_Warning, CPLE_NotSupported, + "Measures in this layer will be ignored."); + } + + /* -------------------------------------------------------------------- */ + /* If the dataset has an extension, it is understood that the path */ + /* of the file is where to write, and the layer name is the */ + /* dataset name (without extension). */ + /* -------------------------------------------------------------------- */ + const char *pszExtension = CPLGetExtension(pszRootName); + char *pszFullMMLayerName; + if (EQUAL(pszExtension, "pol") || EQUAL(pszExtension, "arc") || + EQUAL(pszExtension, "pnt")) + { + char *pszMMLayerName; + pszMMLayerName = CPLStrdup(CPLResetExtension(pszRootName, "")); + pszMMLayerName[strlen(pszMMLayerName) - 1] = '\0'; + + pszFullMMLayerName = CPLStrdup((const char *)pszMMLayerName); + + // Checking that the folder where to write exists + const char *szDestFolder = CPLGetDirname(pszFullMMLayerName); + if (!STARTS_WITH(szDestFolder, "/vsimem")) + { + VSIStatBufL sStat; + if (VSIStatL(szDestFolder, &sStat) != 0 || + !VSI_ISDIR(sStat.st_mode)) + { + CPLFree(pszMMLayerName); + CPLFree(pszFullMMLayerName); + CPLError(CE_Failure, CPLE_AppDefined, + "The folder %s does not exist.", szDestFolder); + return nullptr; + } + } + CPLFree(pszMMLayerName); + } + else + { + const char *osPath; + + osPath = pszRootName; + pszFullMMLayerName = + CPLStrdup(CPLFormFilename(pszRootName, pszLayerName, "")); + + /* -------------------------------------------------------------------- */ + /* Let's create the folder if it's not already created. */ + /* (only the las level of the folder) */ + /* -------------------------------------------------------------------- */ + if (!STARTS_WITH(osPath, "/vsimem")) + { + VSIStatBufL sStat; + if (VSIStatL(osPath, &sStat) != 0 || !VSI_ISDIR(sStat.st_mode)) + { + if (VSIMkdir(osPath, 0755) != 0) + { + CPLFree(pszFullMMLayerName); + CPLError(CE_Failure, CPLE_AppDefined, + "Unable to create the folder %s.", pszRootName); + return nullptr; + } + } + } + } + + /* -------------------------------------------------------------------- */ + /* Return open layer handle. */ + /* -------------------------------------------------------------------- */ + if (Open(pszFullMMLayerName, nullptr, poSRS, TRUE, papszOptions)) + { + CPLFree(pszFullMMLayerName); + auto poLayer = papoLayers[nLayers - 1]; + return poLayer; + } + + CPLFree(pszFullMMLayerName); + return nullptr; +} + +/****************************************************************************/ +/* TestCapability() */ +/****************************************************************************/ + +int OGRMiraMonDataSource::TestCapability(const char *pszCap) + +{ + if (EQUAL(pszCap, ODsCCreateLayer)) + return bUpdate; + else if (EQUAL(pszCap, ODsCZGeometries)) + return TRUE; + + return FALSE; +} + +/****************************************************************************/ +/* GetLayer() */ +/****************************************************************************/ + +OGRLayer *OGRMiraMonDataSource::GetLayer(int iLayer) + +{ + if (iLayer < 0 || iLayer >= nLayers) + return nullptr; + + return papoLayers[iLayer]; +} + +/************************************************************************/ +/* GetFileList() */ +/************************************************************************/ + +char **OGRMiraMonDataSource::GetFileList() +{ + CPLStringList oFileList; + GetLayerCount(); + for (int i = 0; i < nLayers; i++) + { + OGRMiraMonLayer *poLayer = papoLayers[i]; + poLayer->AddToFileList(oFileList); + } + return oFileList.StealList(); +} diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp new file mode 100644 index 000000000000..a4b96da1fe2a --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp @@ -0,0 +1,212 @@ +/****************************************************************************** + * + * Project: OpenGIS Simple Features Reference Implementation + * Purpose: Implements OGRMiraMonDriver class. + * Author: Abel Pau + ****************************************************************************** + * Copyright (c) 2024, Xavier Pons + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + ****************************************************************************/ + +#include "ogrmiramon.h" + +/****************************************************************************/ +/* OGRMMDriverIdentify() */ +/****************************************************************************/ + +static int OGRMiraMonDriverIdentify(GDALOpenInfo *poOpenInfo) + +{ + if (poOpenInfo->fpL == nullptr || poOpenInfo->nHeaderBytes < 7) + return FALSE; + else if (EQUAL(CPLGetExtension(poOpenInfo->pszFilename), "PNT") || + EQUAL(CPLGetExtension(poOpenInfo->pszFilename), "ARC") || + EQUAL(CPLGetExtension(poOpenInfo->pszFilename), "POL")) + { + // Format + if ((poOpenInfo->pabyHeader[0] == 'P' && + poOpenInfo->pabyHeader[1] == 'N' && + poOpenInfo->pabyHeader[2] == 'T') || + (poOpenInfo->pabyHeader[0] == 'A' && + poOpenInfo->pabyHeader[1] == 'R' && + poOpenInfo->pabyHeader[2] == 'C') || + (poOpenInfo->pabyHeader[0] == 'P' && + poOpenInfo->pabyHeader[1] == 'O' && + poOpenInfo->pabyHeader[2] == 'L')) + { + // Version 1.1 or 2.0 + if ((poOpenInfo->pabyHeader[3] == ' ' && + poOpenInfo->pabyHeader[4] == '1' && + poOpenInfo->pabyHeader[5] == '.' && + poOpenInfo->pabyHeader[6] == '1') || + (poOpenInfo->pabyHeader[3] == ' ' && + poOpenInfo->pabyHeader[4] == '2' && + poOpenInfo->pabyHeader[5] == '.' && + poOpenInfo->pabyHeader[6] == '0')) + { + return TRUE; + } + } + } + + return FALSE; +} + +/****************************************************************************/ +/* OGRMiraMonDriverOpen() */ +/****************************************************************************/ + +static GDALDataset *OGRMiraMonDriverOpen(GDALOpenInfo *poOpenInfo) + +{ + if (OGRMiraMonDriverIdentify(poOpenInfo) == FALSE) + return nullptr; + + OGRMiraMonDataSource *poDS = new OGRMiraMonDataSource(); + + if (poDS != nullptr && poOpenInfo->eAccess == GA_Update) + { + CPLError(CE_Failure, CPLE_OpenFailed, + "MiraMonVector driver does not support update."); + delete poDS; + poDS = nullptr; + } + else + { + if (!poDS->Open(poOpenInfo->pszFilename, nullptr, nullptr, + poOpenInfo->eAccess == GA_Update, + poOpenInfo->papszOpenOptions)) + { + delete poDS; + poDS = nullptr; + } + } + + return poDS; +} + +/****************************************************************************/ +/* OGRMiraMonDriverCreate() */ +/****************************************************************************/ + +static GDALDataset * +OGRMiraMonDriverCreate(const char *pszName, CPL_UNUSED int /*nBands*/, + CPL_UNUSED int /*nXSize*/, CPL_UNUSED int /*nYSize*/, + CPL_UNUSED GDALDataType /*eDT*/, char **papszOptions) +{ + OGRMiraMonDataSource *poDS = new OGRMiraMonDataSource(); + + if (poDS->Create(pszName, papszOptions)) + return poDS; + + delete poDS; + return nullptr; +} + +/****************************************************************************/ +/* RegisterOGRMM() */ +/****************************************************************************/ + +void RegisterOGRMiraMon() + +{ + if (GDALGetDriverByName("MiraMonVector") != nullptr) + return; + + GDALDriver *poDriver = new GDALDriver(); + poDriver->SetDescription("MiraMonVector"); + poDriver->SetMetadataItem(GDAL_DCAP_VECTOR, "YES"); + poDriver->SetMetadataItem(GDAL_DCAP_CREATE_LAYER, "YES"); + poDriver->SetMetadataItem(GDAL_DCAP_CREATE_FIELD, "YES"); + poDriver->SetMetadataItem(GDAL_DMD_LONGNAME, + "MiraMon Vectors (.pol, .arc, .pnt)"); + poDriver->SetMetadataItem(GDAL_DMD_EXTENSIONS, "pol arc pnt"); + poDriver->SetMetadataItem(GDAL_DMD_HELPTOPIC, + "drivers/vector/miramon.html"); + poDriver->SetMetadataItem(GDAL_DCAP_VIRTUALIO, "YES"); + poDriver->SetMetadataItem(GDAL_DCAP_Z_GEOMETRIES, "YES"); + + poDriver->SetMetadataItem( + GDAL_DMD_OPENOPTIONLIST, + "<OpenOptionList>" + " <Option name='Height' scope='vector' type='string-select' " + " description='Sets which of the possible heights is chosen: " + "the first, the highest or the lowest one.'>" + " <Value>First</Value>" + " <Value>Lower</Value>" + " <Value>Highest</Value>" + " </Option>" + " <Option name='MultiRecordIndex' scope='vector' type='string' " + " description='Sets which of the possible records is chosen: " + "0, 1, 2,... or the Last one. Use JSON when a serialized " + "JSON is wanted'>" + " </Option>" + " <Option name='OpenLanguage' scope='vector' type='string-select' " + " description='If the layer to be opened is multilingual " + "(in fact the *.rel* file), this parameter sets the language " + "to be read.'>" + " <Value>ENG</Value>" + " <Value>CAT</Value>" + " <Value>SPA</Value>" + " </Option>" + "</OpenOptionList>"); + + poDriver->SetMetadataItem( + GDAL_DS_LAYER_CREATIONOPTIONLIST, + "<LayerCreationOptionList>" + " <Option name='Version' type='string-select' description='Version of " + "the file. " + "V1.1 is a limited 32 bits for FID and for internal offsets. " + "V2.0 is the 64 bits version, with practically no limits for FID nor " + "for internal offsets.' " + "default='last_version'>" + "<Value>V1.1</Value>" + "<Value>V2.0</Value>" + "<Value>last_version</Value>" + "</Option>" + " <Option name='DBFEncoding' type='string-select' " + "description='Encoding of " + "the " + ".dbf files." + "MiraMon can write *.dbf* files in these two charsets.' " + "default='ANSI'>" + "<Value>UTF8</Value>" + "<Value>ANSI</Value>" + "</Option>" + " <Option name='CreationLanguage' scope='vector' type='string-select' " + " description='If the layer to be opened is multilingual " + "(in fact the *.rel* file), this parameter sets the language " + "to be read.'>" + " <Value>ENG</Value>" + " <Value>CAT</Value>" + " <Value>SPA</Value>" + " </Option>" + "</LayerCreationOptionList>"); + + poDriver->SetMetadataItem( + GDAL_DMD_CREATIONFIELDDATATYPES, + "Integer Integer64 Real String Date Time " + "Binary IntegerList Integer64List RealList StringList"); + poDriver->pfnOpen = OGRMiraMonDriverOpen; + poDriver->pfnIdentify = OGRMiraMonDriverIdentify; + poDriver->pfnCreate = OGRMiraMonDriverCreate; + + GetGDALDriverManager()->RegisterDriver(poDriver); +} diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp new file mode 100644 index 000000000000..ad04304d48c5 --- /dev/null +++ b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp @@ -0,0 +1,2761 @@ +/****************************************************************************** + * + * Project: OpenGIS Simple Features Reference Implementation + * Purpose: Implements OGRMiraMonLayer class. + * Author: Abel Pau + ****************************************************************************** + * Copyright (c) 2024, Xavier Pons + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + ****************************************************************************/ +#include "ogrmiramon.h" + +#include "mm_gdal_functions.h" // For MMCreateExtendedDBFIndex() +#include "mm_rdlayr.h" // For MMInitLayerToRead() +#include <algorithm> // For std::clamp() +#include <string> // For std::string +#include <algorithm> // For std::max + +/****************************************************************************/ +/* OGRMiraMonLayer() */ +/****************************************************************************/ +OGRMiraMonLayer::OGRMiraMonLayer(GDALDataset *poDS, const char *pszFilename, + VSILFILE *fp, const OGRSpatialReference *poSRS, + int bUpdateIn, CSLConstList papszOpenOptions, + struct MiraMonVectMapInfo *MMMap) + : m_poDS(poDS), m_poSRS(nullptr), m_poFeatureDefn(nullptr), m_iNextFID(0), + phMiraMonLayer(nullptr), hMiraMonLayerPNT(), hMiraMonLayerARC(), + hMiraMonLayerPOL(), hMiraMonLayerReadOrNonGeom(), hMMFeature(), + m_bUpdate(CPL_TO_BOOL(bUpdateIn)), + m_fp(fp ? fp : VSIFOpenL(pszFilename, (bUpdateIn ? "r+" : "r"))), + padfValues(nullptr), pnInt64Values(nullptr), bValidFile(false) +{ + + CPLDebugOnly("MiraMon", "Creating/Opening MiraMon layer..."); + /* -------------------------------------------------------------------- */ + /* Create the feature definition */ + /* -------------------------------------------------------------------- */ + m_poFeatureDefn = new OGRFeatureDefn(CPLGetBasename(pszFilename)); + SetDescription(m_poFeatureDefn->GetName()); + m_poFeatureDefn->Reference(); + + if (m_bUpdate) + { + /* ---------------------------------------------------------------- */ + /* Establish the version to use */ + /* ---------------------------------------------------------------- */ + const char *pszVersion = CSLFetchNameValue(papszOpenOptions, "Version"); + int nMMVersion; + + if (pszVersion) + { + if (EQUAL(pszVersion, "V1.1")) + nMMVersion = MM_32BITS_VERSION; + else if (EQUAL(pszVersion, "V2.0") || + EQUAL(pszVersion, "last_version")) + nMMVersion = MM_64BITS_VERSION; + else + nMMVersion = MM_32BITS_VERSION; // Default + } + else + nMMVersion = MM_32BITS_VERSION; // Default + + /* ---------------------------------------------------------------- */ + /* Establish the charset of the .dbf files */ + /* ---------------------------------------------------------------- */ + const char *pszdbfEncoding = + CSLFetchNameValue(papszOpenOptions, "DBFEncoding"); + char nMMRecode; + + if (pszdbfEncoding) + { + if (EQUAL(pszdbfEncoding, "UTF8")) + nMMRecode = MM_RECODE_UTF8; + else //if (EQUAL(pszdbfEncoding, "ANSI")) + nMMRecode = MM_RECODE_ANSI; + } + else + nMMRecode = MM_RECODE_ANSI; // Default + + /* ----------------------------------------------------------------- */ + /* Establish the descriptors language when */ + /* creating .rel files */ + /* ----------------------------------------------------------------- */ + const char *pszLanguage = + CSLFetchNameValue(papszOpenOptions, "CreationLanguage"); + char nMMLanguage; + + if (pszLanguage) + { + if (EQUAL(pszLanguage, "CAT")) + nMMLanguage = MM_CAT_LANGUAGE; + else if (EQUAL(pszLanguage, "SPA")) + nMMLanguage = MM_SPA_LANGUAGE; + else + nMMLanguage = MM_ENG_LANGUAGE; + } + else + nMMLanguage = MM_DEF_LANGUAGE; // Default + + /* ---------------------------------------------------------------- */ + /* Preparing to write the layer */ + /* ---------------------------------------------------------------- */ + // Init the feature (memory, num,...) + if (MMInitFeature(&hMMFeature)) + { + bValidFile = false; + return; + } + + // Init the Layers (not in disk, only in memory until + // the first element is read) + CPLDebugOnly("MiraMon", "Initializing MiraMon points layer..."); + if (MMInitLayer(&hMiraMonLayerPNT, pszFilename, nMMVersion, nMMRecode, + nMMLanguage, nullptr, MM_WRITING_MODE, MMMap)) + { + bValidFile = false; + return; + } + hMiraMonLayerPNT.bIsBeenInit = 0; + + CPLDebugOnly("MiraMon", "Initializing MiraMon arcs layer..."); + if (MMInitLayer(&hMiraMonLayerARC, pszFilename, nMMVersion, nMMRecode, + nMMLanguage, nullptr, MM_WRITING_MODE, MMMap)) + { + bValidFile = false; + return; + } + hMiraMonLayerARC.bIsBeenInit = 0; + + CPLDebugOnly("MiraMon", "Initializing MiraMon polygons layer..."); + if (MMInitLayer(&hMiraMonLayerPOL, pszFilename, nMMVersion, nMMRecode, + nMMLanguage, nullptr, MM_WRITING_MODE, MMMap)) + { + bValidFile = false; + return; + } + hMiraMonLayerPOL.bIsBeenInit = 0; + + // Just in case that there is no geometry but some other + // information to get. A DBF will be generated + CPLDebugOnly("MiraMon", "Initializing MiraMon only-ext-DBF layer..."); + if (MMInitLayer(&hMiraMonLayerReadOrNonGeom, pszFilename, nMMVersion, + nMMRecode, nMMLanguage, nullptr, MM_WRITING_MODE, + nullptr)) + { + bValidFile = false; + return; + } + hMiraMonLayerPOL.bIsBeenInit = 0; + + // This helps the map to be created + //GetLayerDefn()->SetName(hMiraMonLayerPNT.pszSrcLayerName); + m_poFeatureDefn->SetName(hMiraMonLayerPNT.pszSrcLayerName); + + // Saving the HRS in the layer structure + if (poSRS) + { + const char *pszAuthorityName = poSRS->GetAuthorityName(nullptr); + const char *pszAuthorityCode = poSRS->GetAuthorityCode(nullptr); + + if (pszAuthorityName && pszAuthorityCode && + EQUAL(pszAuthorityName, "EPSG")) + { + CPLDebugOnly("MiraMon", "Setting EPSG code %s", + pszAuthorityCode); + hMiraMonLayerPNT.pSRS = CPLStrdup(pszAuthorityCode); + hMiraMonLayerARC.pSRS = CPLStrdup(pszAuthorityCode); + hMiraMonLayerPOL.pSRS = CPLStrdup(pszAuthorityCode); + } + } + } + else + { + if (m_fp == nullptr) + { + bValidFile = false; + return; + } + + /* ------------------------------------------------------------------*/ + /* Read the header. */ + /* ------------------------------------------------------------------*/ + int nMMLayerVersion; + + if (MMInitLayerToRead(&hMiraMonLayerReadOrNonGeom, m_fp, pszFilename)) + { + phMiraMonLayer = &hMiraMonLayerReadOrNonGeom; + bValidFile = false; + return; + } + phMiraMonLayer = &hMiraMonLayerReadOrNonGeom; + + nMMLayerVersion = MMGetVectorVersion(&phMiraMonLayer->TopHeader); + if (nMMLayerVersion == MM_UNKNOWN_VERSION) + { + CPLError(CE_Failure, CPLE_NotSupported, + "MiraMon version file unknown."); + bValidFile = false; + return; + } + if (phMiraMonLayer->bIsPoint) + { + if (phMiraMonLayer->TopHeader.bIs3d) + m_poFeatureDefn->SetGeomType(wkbPoint25D); + else + m_poFeatureDefn->SetGeomType(wkbPoint); + } + else if (phMiraMonLayer->bIsArc && !phMiraMonLayer->bIsPolygon) + { + if (phMiraMonLayer->TopHeader.bIs3d) + m_poFeatureDefn->SetGeomType(wkbLineString25D); + else + m_poFeatureDefn->SetGeomType(wkbLineString); + } + else if (phMiraMonLayer->bIsPolygon) + { + // 3D + if (phMiraMonLayer->TopHeader.bIs3d) + { + if (phMiraMonLayer->TopHeader.bIsMultipolygon) + m_poFeatureDefn->SetGeomType(wkbMultiPolygon25D); + else + m_poFeatureDefn->SetGeomType(wkbPolygon25D); + } + else + { + if (phMiraMonLayer->TopHeader.bIsMultipolygon) + m_poFeatureDefn->SetGeomType(wkbMultiPolygon); + else + m_poFeatureDefn->SetGeomType(wkbPolygon); + } + } + else + { + CPLError(CE_Failure, CPLE_NotSupported, + "MiraMon file type not supported."); + bValidFile = false; + return; + } + + if (phMiraMonLayer->TopHeader.bIs3d) + { + const char *szHeight = + CSLFetchNameValue(papszOpenOptions, "Height"); + if (szHeight) + { + if (EQUAL(szHeight, "Highest")) + phMiraMonLayer->nSelectCoordz = MM_SELECT_HIGHEST_COORDZ; + else if (EQUAL(szHeight, "Lowest")) + phMiraMonLayer->nSelectCoordz = MM_SELECT_LOWEST_COORDZ; + else + phMiraMonLayer->nSelectCoordz = MM_SELECT_FIRST_COORDZ; + } + else + phMiraMonLayer->nSelectCoordz = MM_SELECT_FIRST_COORDZ; + } + + /* ------------------------------------------------------------ */ + /* Establish the descriptors language when */ + /* opening .rel files */ + /* ------------------------------------------------------------ */ + const char *pszLanguage = + CSLFetchNameValue(papszOpenOptions, "OpenLanguage"); + + if (pszLanguage) + { + if (EQUAL(pszLanguage, "CAT")) + phMiraMonLayer->nMMLanguage = MM_CAT_LANGUAGE; + else if (EQUAL(pszLanguage, "SPA")) + phMiraMonLayer->nMMLanguage = MM_SPA_LANGUAGE; + else + phMiraMonLayer->nMMLanguage = MM_ENG_LANGUAGE; + } + else + phMiraMonLayer->nMMLanguage = MM_DEF_LANGUAGE; // Default + + if (phMiraMonLayer->nSRS_EPSG != 0) + { + m_poSRS = new OGRSpatialReference(); + m_poSRS->SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); + if (m_poSRS->importFromEPSG(phMiraMonLayer->nSRS_EPSG) != + OGRERR_NONE) + { + delete m_poSRS; + m_poSRS = nullptr; + } + else + m_poFeatureDefn->GetGeomFieldDefn(0)->SetSpatialRef(m_poSRS); + } + + // If there is associated information + if (phMiraMonLayer->pMMBDXP) + { + if (!phMiraMonLayer->pMMBDXP->pfDataBase) + { + if ((phMiraMonLayer->pMMBDXP->pfDataBase = fopen_function( + phMiraMonLayer->pMMBDXP->szFileName, "r")) == nullptr) + { + CPLDebugOnly("MiraMon", "File '%s' cannot be opened.", + phMiraMonLayer->pMMBDXP->szFileName); + bValidFile = false; + return; + } + + if (phMiraMonLayer->pMMBDXP->nFields == 0) + { + // TODO: is this correct? At least this prevents a + // nullptr dereference of phMiraMonLayer->pMMBDXP->pField + // below + CPLDebug("MiraMon", + "phMiraMonLayer->pMMBDXP->nFields == 0"); + bValidFile = false; + return; + } + + // First time we open the extended DBF we create an index + // to fastly find all non geometrical features. + phMiraMonLayer->pMultRecordIndex = MMCreateExtendedDBFIndex( + phMiraMonLayer->pMMBDXP->pfDataBase, + phMiraMonLayer->pMMBDXP->nRecords, + phMiraMonLayer->pMMBDXP->FirstRecordOffset, + phMiraMonLayer->pMMBDXP->BytesPerRecord, + phMiraMonLayer->pMMBDXP + ->pField[phMiraMonLayer->pMMBDXP->IdGraficField] + .AccumulatedBytes, + phMiraMonLayer->pMMBDXP + ->pField[phMiraMonLayer->pMMBDXP->IdGraficField] + .BytesPerField, + &phMiraMonLayer->isListField, &phMiraMonLayer->nMaxN); + + // Creation of maximum number needed for processing + // multiple records + if (phMiraMonLayer->pMultRecordIndex) + { + padfValues = static_cast<double *>(CPLCalloc( + (size_t)phMiraMonLayer->nMaxN, sizeof(*padfValues))); + + pnInt64Values = static_cast<GInt64 *>(CPLCalloc( + (size_t)phMiraMonLayer->nMaxN, sizeof(*pnInt64Values))); + } + + phMiraMonLayer->iMultiRecord = + MM_MULTIRECORD_NO_MULTIRECORD; // No option iMultiRecord + const char *szMultiRecord = + CSLFetchNameValue(papszOpenOptions, "MultiRecordIndex"); + if (phMiraMonLayer->isListField && szMultiRecord) + { + if (EQUAL(szMultiRecord, "Last")) + phMiraMonLayer->iMultiRecord = MM_MULTIRECORD_LAST; + else if (EQUAL(szMultiRecord, "JSON")) + phMiraMonLayer->iMultiRecord = MM_MULTIRECORD_JSON; + else + phMiraMonLayer->iMultiRecord = atoi(szMultiRecord); + } + } + + for (MM_EXT_DBF_N_FIELDS nIField = 0; + nIField < phMiraMonLayer->pMMBDXP->nFields; nIField++) + { + OGRFieldDefn oField("", OFTString); + oField.SetName( + phMiraMonLayer->pMMBDXP->pField[nIField].FieldName); + + oField.SetAlternativeName( + phMiraMonLayer->pMMBDXP->pField[nIField] + .FieldDescription[phMiraMonLayer->nMMLanguage < + MM_NUM_IDIOMES_MD_MULTIDIOMA + ? phMiraMonLayer->nMMLanguage + : 0]); + + if (phMiraMonLayer->pMMBDXP->pField[nIField].FieldType == 'C') + { + // It's a list? + if (phMiraMonLayer->iMultiRecord == + MM_MULTIRECORD_NO_MULTIRECORD) + { + if (phMiraMonLayer->isListField) + oField.SetType(OFTStringList); + else + oField.SetType(OFTString); + } + // It's a serialized JSON array + else if (phMiraMonLayer->iMultiRecord == + MM_MULTIRECORD_JSON) + { + oField.SetType(OFTString); + oField.SetSubType(OFSTJSON); + } + else // iMultiRecord decides which Record translate + oField.SetType(OFTString); + } + else if (phMiraMonLayer->pMMBDXP->pField[nIField].FieldType == + 'N') + { + // It's a list? + if (phMiraMonLayer->iMultiRecord == + MM_MULTIRECORD_NO_MULTIRECORD) + { + if (phMiraMonLayer->pMMBDXP->pField[nIField] + .DecimalsIfFloat) + oField.SetType(phMiraMonLayer->isListField + ? OFTRealList + : OFTReal); + else + oField.SetType(phMiraMonLayer->isListField + ? OFTIntegerList + : OFTInteger); + } + // It's a serialized JSON array + else if (phMiraMonLayer->iMultiRecord == + MM_MULTIRECORD_JSON) + { + oField.SetType(OFTString); + oField.SetSubType(OFSTJSON); + } + else + { + if (phMiraMonLayer->pMMBDXP->pField[nIField] + .DecimalsIfFloat) + oField.SetType(OFTReal); + else + oField.SetType(OFTInteger); + } + } + else if (phMiraMonLayer->pMMBDXP->pField[nIField].FieldType == + 'D') + { + // It's a serialized JSON array + oField.SetType(OFTDate); + if (phMiraMonLayer->iMultiRecord == MM_MULTIRECORD_JSON) + { + oField.SetType(OFTString); + oField.SetSubType(OFSTJSON); + } + } + + oField.SetWidth( + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + oField.SetPrecision( + phMiraMonLayer->pMMBDXP->pField[nIField].DecimalsIfFloat); + + m_poFeatureDefn->AddFieldDefn(&oField); + } + } + } + + bValidFile = true; +} + +/****************************************************************************/ +/* ~OGRMiraMonLayer() */ +/****************************************************************************/ + +OGRMiraMonLayer::~OGRMiraMonLayer() + +{ + if (m_nFeaturesRead > 0 && m_poFeatureDefn != nullptr) + { + CPLDebugOnly("MiraMon", "%d features read on layer '%s'.", + static_cast<int>(m_nFeaturesRead), + m_poFeatureDefn->GetName()); + } + + if (hMiraMonLayerPOL.bIsPolygon) + { + CPLDebugOnly("MiraMon", "Closing MiraMon polygons layer..."); + if (MMCloseLayer(&hMiraMonLayerPOL)) + { + CPLDebugOnly("MiraMon", "Error closing polygons layer"); + } + if (hMiraMonLayerPOL.TopHeader.nElemCount) + { + CPLDebugOnly("MiraMon", + sprintf_UINT64 " polygon(s) written in file %s.pol", + hMiraMonLayerPOL.TopHeader.nElemCount, + hMiraMonLayerPOL.pszSrcLayerName); + } + CPLDebugOnly("MiraMon", "MiraMon polygons layer closed"); + } + else if (hMiraMonLayerPOL.ReadOrWrite == MM_WRITING_MODE) + { + CPLDebugOnly("MiraMon", "No MiraMon polygons layer created."); + } + + if (hMiraMonLayerARC.bIsArc) + { + CPLDebugOnly("MiraMon", "Closing MiraMon arcs layer..."); + if (MMCloseLayer(&hMiraMonLayerARC)) + { + CPLDebugOnly("MiraMon", "Error closing arcs layer"); + } + if (hMiraMonLayerARC.TopHeader.nElemCount) + { + CPLDebugOnly("MiraMon", + sprintf_UINT64 " arc(s) written in file %s.arc", + hMiraMonLayerARC.TopHeader.nElemCount, + hMiraMonLayerARC.pszSrcLayerName); + } + + CPLDebugOnly("MiraMon", "MiraMon arcs layer closed"); + } + else if (hMiraMonLayerARC.ReadOrWrite == MM_WRITING_MODE) + { + CPLDebugOnly("MiraMon", "No MiraMon arcs layer created."); + } + + if (hMiraMonLayerPNT.bIsPoint) + { + CPLDebugOnly("MiraMon", "Closing MiraMon points layer..."); + if (MMCloseLayer(&hMiraMonLayerPNT)) + { + CPLDebugOnly("MiraMon", "Error closing points layer"); + } + if (hMiraMonLayerPNT.TopHeader.nElemCount) + { + CPLDebugOnly("MiraMon", + sprintf_UINT64 " point(s) written in file %s.pnt", + hMiraMonLayerPNT.TopHeader.nElemCount, + hMiraMonLayerPNT.pszSrcLayerName); + } + CPLDebugOnly("MiraMon", "MiraMon points layer closed"); + } + else if (hMiraMonLayerPNT.ReadOrWrite == MM_WRITING_MODE) + { + CPLDebugOnly("MiraMon", "No MiraMon points layer created."); + } + + if (hMiraMonLayerARC.ReadOrWrite == MM_WRITING_MODE) + { + if (hMiraMonLayerReadOrNonGeom.bIsDBF) + { + if (hMiraMonLayerReadOrNonGeom.ReadOrWrite == MM_WRITING_MODE) + { + CPLDebugOnly("MiraMon", "Closing MiraMon DBF table ..."); + } + MMCloseLayer(&hMiraMonLayerReadOrNonGeom); + if (hMiraMonLayerReadOrNonGeom.ReadOrWrite == MM_WRITING_MODE) + { + CPLDebugOnly("MiraMon", "MiraMon DBF table closed"); + } + } + else if (hMiraMonLayerReadOrNonGeom.ReadOrWrite == MM_WRITING_MODE) + { + CPLDebugOnly("MiraMon", "No MiraMon DBF table created."); + } + } + else + { + if (hMiraMonLayerReadOrNonGeom.ReadOrWrite == MM_WRITING_MODE) + { + CPLDebugOnly("MiraMon", "Closing MiraMon layer ..."); + } + MMCloseLayer(&hMiraMonLayerReadOrNonGeom); + if (hMiraMonLayerReadOrNonGeom.ReadOrWrite == MM_WRITING_MODE) + { + CPLDebugOnly("MiraMon", "MiraMon layer closed"); + } + } + + if (hMiraMonLayerPOL.ReadOrWrite == MM_WRITING_MODE) + { + MMCPLDebug("MiraMon", "Destroying MiraMon polygons layer memory"); + } + MMDestroyLayer(&hMiraMonLayerPOL); + if (hMiraMonLayerPOL.ReadOrWrite == MM_WRITING_MODE) + { + MMCPLDebug("MiraMon", "MiraMon polygons layer memory destroyed"); + } + + if (hMiraMonLayerARC.ReadOrWrite == MM_WRITING_MODE) + { + MMCPLDebug("MiraMon", "Destroying MiraMon arcs layer memory"); + } + MMDestroyLayer(&hMiraMonLayerARC); + if (hMiraMonLayerARC.ReadOrWrite == MM_WRITING_MODE) + { + MMCPLDebug("MiraMon", "MiraMon arcs layer memory destroyed"); + } + + if (hMiraMonLayerPNT.ReadOrWrite == MM_WRITING_MODE) + { + MMCPLDebug("MiraMon", "Destroying MiraMon points layer memory"); + } + MMDestroyLayer(&hMiraMonLayerPNT); + if (hMiraMonLayerPNT.ReadOrWrite == MM_WRITING_MODE) + { + MMCPLDebug("MiraMon", "MiraMon points layer memory destroyed"); + } + + if (hMiraMonLayerReadOrNonGeom.ReadOrWrite == MM_WRITING_MODE) + { + MMCPLDebug("MiraMon", "Destroying MiraMon DBF table layer memory"); + } + else + { + MMCPLDebug("MiraMon", "Destroying MiraMon layer memory"); + } + + MMDestroyLayer(&hMiraMonLayerReadOrNonGeom); + if (hMiraMonLayerReadOrNonGeom.ReadOrWrite == MM_WRITING_MODE) + { + MMCPLDebug("MiraMon", "MiraMon DBF table layer memory destroyed"); + } + else + { + MMCPLDebug("MiraMon", "MiraMon layer memory destroyed"); + } + + memset(&hMiraMonLayerReadOrNonGeom, 0, sizeof(hMiraMonLayerReadOrNonGeom)); + memset(&hMiraMonLayerPNT, 0, sizeof(hMiraMonLayerPNT)); + memset(&hMiraMonLayerARC, 0, sizeof(hMiraMonLayerARC)); + memset(&hMiraMonLayerPOL, 0, sizeof(hMiraMonLayerPOL)); + + MMCPLDebug("MiraMon", "Destroying MiraMon temporary feature memory"); + MMDestroyFeature(&hMMFeature); + MMCPLDebug("MiraMon", "MiraMon temporary feature memory"); + memset(&hMMFeature, 0, sizeof(hMMFeature)); + + /* -------------------------------------------------------------------- */ + /* Clean up. */ + /* -------------------------------------------------------------------- */ + + if (m_poFeatureDefn) + m_poFeatureDefn->Release(); + + if (m_poSRS) + m_poSRS->Release(); + + if (m_fp != nullptr) + VSIFCloseL(m_fp); + + if (padfValues != nullptr) + CPLFree(padfValues); + + if (pnInt64Values != nullptr) + CPLFree(pnInt64Values); +} + +/****************************************************************************/ +/* ResetReading() */ +/****************************************************************************/ + +void OGRMiraMonLayer::ResetReading() + +{ + if (m_iNextFID == 0) + return; + + m_iNextFID = 0; + + //VSIFSeekL(m_fp, 0, SEEK_SET); + if (!phMiraMonLayer) + return; + + if (phMiraMonLayer->bIsPoint && phMiraMonLayer->MMPoint.pF) + { + VSIFSeekL(phMiraMonLayer->MMPoint.pF, 0, SEEK_SET); + return; + } + if (phMiraMonLayer->bIsArc && !phMiraMonLayer->bIsPolygon && + phMiraMonLayer->MMArc.pF) + { + VSIFSeekL(phMiraMonLayer->MMArc.pF, 0, SEEK_SET); + return; + } + if (phMiraMonLayer->bIsPolygon && phMiraMonLayer->MMPolygon.pF) + { + VSIFSeekL(phMiraMonLayer->MMPolygon.pF, 0, SEEK_SET); + return; + } +} + +/****************************************************************************/ +/* GetNextRawFeature() */ +/****************************************************************************/ + +void OGRMiraMonLayer::GoToFieldOfMultipleRecord(MM_INTERNAL_FID iFID, + MM_EXT_DBF_N_RECORDS nIRecord, + MM_EXT_DBF_N_FIELDS nIField) + +{ + // Not an error. Simply there are no features, but there are fields + if (!phMiraMonLayer->pMultRecordIndex) + return; + + fseek_function( + phMiraMonLayer->pMMBDXP->pfDataBase, + phMiraMonLayer->pMultRecordIndex[iFID].offset + + (MM_FILE_OFFSET)nIRecord * phMiraMonLayer->pMMBDXP->BytesPerRecord + + phMiraMonLayer->pMMBDXP->pField[nIField].AccumulatedBytes, + SEEK_SET); +} + +/****************************************************************************/ +/* GetNextRawFeature() */ +/****************************************************************************/ + +OGRFeature *OGRMiraMonLayer::GetNextRawFeature() +{ + if (!phMiraMonLayer) + return nullptr; + + if (m_iNextFID >= (GUInt64)phMiraMonLayer->TopHeader.nElemCount) + return nullptr; + + OGRFeature *poFeature = GetFeature(m_iNextFID); + + if (!poFeature) + return nullptr; + + m_iNextFID++; + return poFeature; +} + +/****************************************************************************/ +/* GetFeature() */ +/****************************************************************************/ + +OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) + +{ + OGRGeometry *poGeom = nullptr; + OGRPoint *poPoint = nullptr; + OGRLineString *poLS = nullptr; + MM_INTERNAL_FID nIElem; + MM_EXT_DBF_N_MULTIPLE_RECORDS nIRecord = 0; + + if (!phMiraMonLayer) + return nullptr; + + if (nFeatureId < 0) + return nullptr; + + if (phMiraMonLayer->bIsPolygon) + { + if (nFeatureId == GINTBIG_MAX) + return nullptr; + + nIElem = (MM_INTERNAL_FID)(nFeatureId + 1); + } + else + nIElem = (MM_INTERNAL_FID)nFeatureId; + + if (nIElem >= phMiraMonLayer->TopHeader.nElemCount) + return nullptr; + + /* -------------------------------------------------------------------- */ + /* Read nFeatureId feature directly from the file. */ + /* -------------------------------------------------------------------- */ + if (nIElem < phMiraMonLayer->TopHeader.nElemCount) + { + switch (phMiraMonLayer->eLT) + { + case MM_LayerType_Point: + case MM_LayerType_Point3d: + // Read point + poGeom = new OGRPoint(); + poPoint = poGeom->toPoint(); + + // Get X,Y (z). MiraMon has no multipoints + if (MMGetGeoFeatureFromVector(phMiraMonLayer, nIElem)) + { + delete poGeom; + return nullptr; + } + + poPoint->setX(phMiraMonLayer->ReadFeature.pCoord[0].dfX); + poPoint->setY(phMiraMonLayer->ReadFeature.pCoord[0].dfY); + if (phMiraMonLayer->TopHeader.bIs3d) + poPoint->setZ(phMiraMonLayer->ReadFeature.pZCoord[0]); + break; + + case MM_LayerType_Arc: + case MM_LayerType_Arc3d: + poGeom = new OGRLineString(); + poLS = poGeom->toLineString(); + + // Get X,Y (Z) n times MiraMon has no multilines + if (MMGetGeoFeatureFromVector(phMiraMonLayer, nIElem)) + { + delete poGeom; + return nullptr; + } + + for (MM_N_VERTICES_TYPE nIVrt = 0; + nIVrt < phMiraMonLayer->ReadFeature.pNCoordRing[0]; + nIVrt++) + { + if (phMiraMonLayer->TopHeader.bIs3d) + poLS->addPoint( + phMiraMonLayer->ReadFeature.pCoord[nIVrt].dfX, + phMiraMonLayer->ReadFeature.pCoord[nIVrt].dfY, + phMiraMonLayer->ReadFeature.pZCoord[nIVrt]); + else + poLS->addPoint( + phMiraMonLayer->ReadFeature.pCoord[nIVrt].dfX, + phMiraMonLayer->ReadFeature.pCoord[nIVrt].dfY); + } + break; + + case MM_LayerType_Pol: + case MM_LayerType_Pol3d: + // Read polygon + auto poPoly = std::make_unique<OGRPolygon>(); + MM_POLYGON_RINGS_COUNT nIRing; + MM_N_VERTICES_TYPE nIVrtAcum; + + if (phMiraMonLayer->TopHeader.bIsMultipolygon) + { + OGRMultiPolygon *poMP = nullptr; + + poGeom = new OGRMultiPolygon(); + poMP = poGeom->toMultiPolygon(); + + // Get X,Y (Z) n times MiraMon has no multilines + if (MMGetGeoFeatureFromVector(phMiraMonLayer, nIElem)) + { + delete poGeom; + return nullptr; + } + + nIVrtAcum = 0; + if (!phMiraMonLayer->bIsPolygon && + !(phMiraMonLayer->ReadFeature.flag_VFG[0] & + MM_EXTERIOR_ARC_SIDE)) + { + CPLError(CE_Failure, CPLE_NoWriteAccess, + "\nWrong polygon format."); + delete poGeom; + return nullptr; + } + + for (nIRing = 0; + nIRing < phMiraMonLayer->ReadFeature.nNRings; nIRing++) + { + auto poRing = std::make_unique<OGRLinearRing>(); + + for (MM_N_VERTICES_TYPE nIVrt = 0; + nIVrt < + phMiraMonLayer->ReadFeature.pNCoordRing[nIRing]; + nIVrt++) + { + if (phMiraMonLayer->TopHeader.bIs3d) + { + poRing->addPoint(phMiraMonLayer->ReadFeature + .pCoord[nIVrtAcum] + .dfX, + phMiraMonLayer->ReadFeature + .pCoord[nIVrtAcum] + .dfY, + phMiraMonLayer->ReadFeature + .pZCoord[nIVrtAcum]); + } + else + { + poRing->addPoint(phMiraMonLayer->ReadFeature + .pCoord[nIVrtAcum] + .dfX, + phMiraMonLayer->ReadFeature + .pCoord[nIVrtAcum] + .dfY); + } + + nIVrtAcum++; + } + + // If I'm going to start a new polygon... + if ((nIRing + 1 < phMiraMonLayer->ReadFeature.nNRings && + ((phMiraMonLayer->ReadFeature + .flag_VFG[nIRing + 1]) & + MM_EXTERIOR_ARC_SIDE)) || + nIRing + 1 >= phMiraMonLayer->ReadFeature.nNRings) + { + poPoly->addRingDirectly(poRing.release()); + poMP->addGeometryDirectly(poPoly.release()); + poPoly = std::make_unique<OGRPolygon>(); + } + else + poPoly->addRingDirectly(poRing.release()); + } + } + else + { + OGRPolygon *poP = nullptr; + + poGeom = new OGRPolygon(); + poP = poGeom->toPolygon(); + + // Get X,Y (Z) n times because MiraMon has no multilinetrings + if (MMGetGeoFeatureFromVector(phMiraMonLayer, nIElem)) + { + delete poGeom; + return nullptr; + } + + if (phMiraMonLayer->ReadFeature.nNRings && + phMiraMonLayer->ReadFeature.nNumpCoord) + { + nIVrtAcum = 0; + if (!(phMiraMonLayer->ReadFeature.flag_VFG[0] & + MM_EXTERIOR_ARC_SIDE)) + { + CPLError(CE_Failure, CPLE_AssertionFailed, + "\nWrong polygon format."); + delete poGeom; + return nullptr; + } + + for (nIRing = 0; + nIRing < phMiraMonLayer->ReadFeature.nNRings; + nIRing++) + { + auto poRing = std::make_unique<OGRLinearRing>(); + + for (MM_N_VERTICES_TYPE nIVrt = 0; + nIVrt < phMiraMonLayer->ReadFeature + .pNCoordRing[nIRing]; + nIVrt++) + { + if (phMiraMonLayer->TopHeader.bIs3d) + { + poRing->addPoint(phMiraMonLayer->ReadFeature + .pCoord[nIVrtAcum] + .dfX, + phMiraMonLayer->ReadFeature + .pCoord[nIVrtAcum] + .dfY, + phMiraMonLayer->ReadFeature + .pZCoord[nIVrtAcum]); + } + else + { + poRing->addPoint(phMiraMonLayer->ReadFeature + .pCoord[nIVrtAcum] + .dfX, + phMiraMonLayer->ReadFeature + .pCoord[nIVrtAcum] + .dfY); + } + + nIVrtAcum++; + } + poP->addRingDirectly(poRing.release()); + } + } + } + + break; + } + + if (poGeom == nullptr) + return nullptr; + } + + /* -------------------------------------------------------------------- */ + /* Create feature. */ + /* -------------------------------------------------------------------- */ + auto poFeature = std::make_unique<OGRFeature>(m_poFeatureDefn); + if (poGeom) + { + poGeom->assignSpatialReference(m_poSRS); + poFeature->SetGeometryDirectly(poGeom); + } + + /* -------------------------------------------------------------------- */ + /* Process field values if its possible. */ + /* -------------------------------------------------------------------- */ + if (phMiraMonLayer->pMMBDXP && + (MM_EXT_DBF_N_RECORDS)nIElem < phMiraMonLayer->pMMBDXP->nRecords) + { + MM_EXT_DBF_N_FIELDS nIField; + + for (nIField = 0; nIField < phMiraMonLayer->pMMBDXP->nFields; nIField++) + { + if (MMResizeStringToOperateIfNeeded( + phMiraMonLayer, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField)) + { + return nullptr; + } + + if (poFeature->GetDefnRef()->GetFieldDefn(nIField)->GetType() == + OFTStringList || + (poFeature->GetDefnRef()->GetFieldDefn(nIField)->GetType() == + OFTString && + poFeature->GetDefnRef()->GetFieldDefn(nIField)->GetSubType() == + OFSTJSON)) + { + if (!phMiraMonLayer->pMultRecordIndex || + phMiraMonLayer->pMultRecordIndex[nIElem].nMR == 0) + { + memset( + phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + continue; + } + if (poFeature->GetDefnRef() + ->GetFieldDefn(nIField) + ->GetSubType() == OFSTJSON) + { + if (MMResizeStringToOperateIfNeeded( + phMiraMonLayer, + phMiraMonLayer->pMMBDXP->BytesPerRecord + + 2 * phMiraMonLayer->pMultRecordIndex[nIElem] + .nMR + + 8)) + { + return nullptr; + } + std::string szStringToOperate = "["; + for (nIRecord = 0; + nIRecord < + phMiraMonLayer->pMultRecordIndex[nIElem].nMR; + nIRecord++) + { + GoToFieldOfMultipleRecord(nIElem, nIRecord, nIField); + + fread_function(phMiraMonLayer->szStringToOperate, + phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField, + 1, phMiraMonLayer->pMMBDXP->pfDataBase); + phMiraMonLayer + ->szStringToOperate[phMiraMonLayer->pMMBDXP + ->pField[nIField] + .BytesPerField] = '\0'; + MM_RemoveLeadingWhitespaceOfString( + phMiraMonLayer->szStringToOperate); + MM_RemoveWhitespacesFromEndOfString( + phMiraMonLayer->szStringToOperate); + + if (phMiraMonLayer->pMMBDXP->CharSet == + MM_JOC_CARAC_OEM850_DBASE) + MM_oemansi_n( + phMiraMonLayer->szStringToOperate, + phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField); + + if (phMiraMonLayer->pMMBDXP->CharSet != + MM_JOC_CARAC_UTF8_DBF) + { + // MiraMon encoding is ISO 8859-1 (Latin1) -> Recode to UTF-8 + char *pszString = + CPLRecode(phMiraMonLayer->szStringToOperate, + CPL_ENC_ISO8859_1, CPL_ENC_UTF8); + + CPLStrlcpy( + phMiraMonLayer->szStringToOperate, pszString, + (size_t)phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField + + 1); + + CPLFree(pszString); + } + szStringToOperate.append( + phMiraMonLayer->szStringToOperate); + + if (nIRecord < + phMiraMonLayer->pMultRecordIndex[nIElem].nMR - 1) + { + szStringToOperate.append(","); + } + else + { + szStringToOperate.append("]"); + } + } + poFeature->SetField(nIField, szStringToOperate.c_str()); + } + else + { + CPLStringList aosValues; + for (nIRecord = 0; + nIRecord < + phMiraMonLayer->pMultRecordIndex[nIElem].nMR; + nIRecord++) + { + GoToFieldOfMultipleRecord(nIElem, nIRecord, nIField); + memset(phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField); + fread_function(phMiraMonLayer->szStringToOperate, + phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField, + 1, phMiraMonLayer->pMMBDXP->pfDataBase); + phMiraMonLayer + ->szStringToOperate[phMiraMonLayer->pMMBDXP + ->pField[nIField] + .BytesPerField] = '\0'; + MM_RemoveWhitespacesFromEndOfString( + phMiraMonLayer->szStringToOperate); + + if (phMiraMonLayer->pMMBDXP->CharSet == + MM_JOC_CARAC_OEM850_DBASE) + MM_oemansi_n( + phMiraMonLayer->szStringToOperate, + phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField); + + if (phMiraMonLayer->pMMBDXP->CharSet != + MM_JOC_CARAC_UTF8_DBF) + { + // MiraMon encoding is ISO 8859-1 (Latin1) -> Recode to UTF-8 + char *pszString = + CPLRecode(phMiraMonLayer->szStringToOperate, + CPL_ENC_ISO8859_1, CPL_ENC_UTF8); + + CPLStrlcpy( + phMiraMonLayer->szStringToOperate, pszString, + (size_t)phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField + + 1); + + CPLFree(pszString); + } + aosValues.AddString(phMiraMonLayer->szStringToOperate); + } + poFeature->SetField(nIField, aosValues.List()); + } + } + else if (poFeature->GetDefnRef() + ->GetFieldDefn(nIField) + ->GetType() == OFTString) + { + if (!phMiraMonLayer->pMultRecordIndex || + phMiraMonLayer->pMultRecordIndex[nIElem].nMR == 0) + { + memset( + phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + continue; + } + if (phMiraMonLayer->iMultiRecord != + MM_MULTIRECORD_NO_MULTIRECORD) + { + if (phMiraMonLayer->iMultiRecord == MM_MULTIRECORD_LAST) + GoToFieldOfMultipleRecord( + nIElem, + phMiraMonLayer->pMultRecordIndex[nIElem].nMR - 1, + nIField); + else if ((MM_EXT_DBF_N_MULTIPLE_RECORDS) + phMiraMonLayer->iMultiRecord < + phMiraMonLayer->pMultRecordIndex[nIElem].nMR) + GoToFieldOfMultipleRecord( + nIElem, + (MM_EXT_DBF_N_MULTIPLE_RECORDS) + phMiraMonLayer->iMultiRecord, + nIField); + else + { + memset(phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField); + continue; + } + } + else + GoToFieldOfMultipleRecord(nIElem, 0, nIField); + + memset(phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + fread_function( + phMiraMonLayer->szStringToOperate, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField, 1, + phMiraMonLayer->pMMBDXP->pfDataBase); + phMiraMonLayer + ->szStringToOperate[phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField] = '\0'; + MM_RemoveWhitespacesFromEndOfString( + phMiraMonLayer->szStringToOperate); + + if (phMiraMonLayer->pMMBDXP->CharSet == + MM_JOC_CARAC_OEM850_DBASE) + MM_oemansi(phMiraMonLayer->szStringToOperate); + + if (phMiraMonLayer->pMMBDXP->CharSet != MM_JOC_CARAC_UTF8_DBF) + { + // MiraMon encoding is ISO 8859-1 (Latin1) -> Recode to UTF-8 + char *pszString = + CPLRecode(phMiraMonLayer->szStringToOperate, + CPL_ENC_ISO8859_1, CPL_ENC_UTF8); + CPLStrlcpy(phMiraMonLayer->szStringToOperate, pszString, + (size_t)phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField + + 1); + CPLFree(pszString); + } + poFeature->SetField(nIField, phMiraMonLayer->szStringToOperate); + } + else if (poFeature->GetDefnRef() + ->GetFieldDefn(nIField) + ->GetType() == OFTIntegerList || + poFeature->GetDefnRef() + ->GetFieldDefn(nIField) + ->GetType() == OFTRealList) + { + if (!phMiraMonLayer->pMultRecordIndex || + phMiraMonLayer->pMultRecordIndex[nIElem].nMR == 0) + { + memset( + phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + continue; + } + for (nIRecord = 0; + nIRecord < phMiraMonLayer->pMultRecordIndex[nIElem].nMR; + nIRecord++) + { + GoToFieldOfMultipleRecord(nIElem, nIRecord, nIField); + memset( + phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + fread_function( + phMiraMonLayer->szStringToOperate, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField, + 1, phMiraMonLayer->pMMBDXP->pfDataBase); + phMiraMonLayer->szStringToOperate[phMiraMonLayer->pMMBDXP + ->pField[nIField] + .BytesPerField] = + '\0'; + + padfValues[nIRecord] = + atof(phMiraMonLayer->szStringToOperate); + } + + poFeature->SetField( + nIField, phMiraMonLayer->pMultRecordIndex[nIElem].nMR, + padfValues); + } + else if (poFeature->GetDefnRef() + ->GetFieldDefn(nIField) + ->GetType() == OFTInteger64List) + { + if (!phMiraMonLayer->pMultRecordIndex || + phMiraMonLayer->pMultRecordIndex[nIElem].nMR == 0) + { + memset( + phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + continue; + } + for (nIRecord = 0; + nIRecord < phMiraMonLayer->pMultRecordIndex[nIElem].nMR; + nIRecord++) + { + GoToFieldOfMultipleRecord(nIElem, nIRecord, nIField); + memset( + phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + fread_function( + phMiraMonLayer->szStringToOperate, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField, + 1, phMiraMonLayer->pMMBDXP->pfDataBase); + phMiraMonLayer->szStringToOperate[phMiraMonLayer->pMMBDXP + ->pField[nIField] + .BytesPerField] = + '\0'; + + pnInt64Values[nIRecord] = + CPLAtoGIntBig(phMiraMonLayer->szStringToOperate); + } + + poFeature->SetField( + nIField, phMiraMonLayer->pMultRecordIndex[nIElem].nMR, + pnInt64Values); + } + else if (poFeature->GetDefnRef() + ->GetFieldDefn(nIField) + ->GetType() == OFTInteger || + poFeature->GetDefnRef() + ->GetFieldDefn(nIField) + ->GetType() == OFTInteger64 || + poFeature->GetDefnRef() + ->GetFieldDefn(nIField) + ->GetType() == OFTReal) + { + if (!phMiraMonLayer->pMultRecordIndex || + phMiraMonLayer->pMultRecordIndex[nIElem].nMR == 0) + { + memset( + phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + continue; + } + if (phMiraMonLayer->iMultiRecord != + MM_MULTIRECORD_NO_MULTIRECORD) + { + if (phMiraMonLayer->iMultiRecord == MM_MULTIRECORD_LAST) + GoToFieldOfMultipleRecord( + nIElem, + phMiraMonLayer->pMultRecordIndex[nIElem].nMR - 1, + nIField); + else if ((MM_EXT_DBF_N_MULTIPLE_RECORDS) + phMiraMonLayer->iMultiRecord < + phMiraMonLayer->pMultRecordIndex[nIElem].nMR) + GoToFieldOfMultipleRecord( + nIElem, + (MM_EXT_DBF_N_MULTIPLE_RECORDS) + phMiraMonLayer->iMultiRecord, + nIField); + else + { + memset(phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField); + continue; + } + } + else + GoToFieldOfMultipleRecord(nIElem, 0, nIField); + + memset(phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + fread_function( + phMiraMonLayer->szStringToOperate, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField, 1, + phMiraMonLayer->pMMBDXP->pfDataBase); + phMiraMonLayer + ->szStringToOperate[phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField] = '\0'; + MM_RemoveWhitespacesFromEndOfString( + phMiraMonLayer->szStringToOperate); + poFeature->SetField(nIField, + atof(phMiraMonLayer->szStringToOperate)); + } + else if (poFeature->GetDefnRef() + ->GetFieldDefn(nIField) + ->GetType() == OFTDate) + { + if (!phMiraMonLayer->pMultRecordIndex || + phMiraMonLayer->pMultRecordIndex[nIElem].nMR == 0) + { + memset( + phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + continue; + } + if (phMiraMonLayer->iMultiRecord != + MM_MULTIRECORD_NO_MULTIRECORD) + { + if (phMiraMonLayer->iMultiRecord == MM_MULTIRECORD_LAST) + GoToFieldOfMultipleRecord( + nIElem, + phMiraMonLayer->pMultRecordIndex[nIElem].nMR - 1, + nIField); + else if ((MM_EXT_DBF_N_MULTIPLE_RECORDS) + phMiraMonLayer->iMultiRecord < + phMiraMonLayer->pMultRecordIndex[nIElem].nMR) + GoToFieldOfMultipleRecord( + nIElem, + (MM_EXT_DBF_N_MULTIPLE_RECORDS) + phMiraMonLayer->iMultiRecord, + nIField); + else + { + memset(phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField); + continue; + } + } + else + GoToFieldOfMultipleRecord(nIElem, 0, nIField); + + memset(phMiraMonLayer->szStringToOperate, 0, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); + fread_function( + phMiraMonLayer->szStringToOperate, + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField, 1, + phMiraMonLayer->pMMBDXP->pfDataBase); + phMiraMonLayer + ->szStringToOperate[phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField] = '\0'; + + MM_RemoveWhitespacesFromEndOfString( + phMiraMonLayer->szStringToOperate); + if (!MMIsEmptyString(phMiraMonLayer->szStringToOperate)) + { + char pszDate_5[5]; + char pszDate_3[3]; + int Year, Month, Day; + + CPLStrlcpy(pszDate_5, phMiraMonLayer->szStringToOperate, 5); + pszDate_5[4] = '\0'; + Year = atoi(pszDate_5); + + CPLStrlcpy(pszDate_3, phMiraMonLayer->szStringToOperate + 4, + 3); + (pszDate_3)[2] = '\0'; + Month = atoi(pszDate_3); + + CPLStrlcpy(pszDate_3, phMiraMonLayer->szStringToOperate + 6, + 3); + (pszDate_3)[2] = '\0'; + Day = atoi(pszDate_3); + + poFeature->SetField(nIField, Year, Month, Day); + } + else + poFeature->SetField(nIField, + phMiraMonLayer->szStringToOperate); + } + } + } + + // Even in case of polygons, where the first feature is jumped + // the ID of the first feature has to be 0, the second, 1,... + poFeature->SetFID(nFeatureId); + + m_nFeaturesRead++; + return poFeature.release(); +} + +/****************************************************************************/ +/* GetFeatureCount() */ +/****************************************************************************/ +GIntBig OGRMiraMonLayer::GetFeatureCount(int bForce) +{ + if (!phMiraMonLayer || m_poFilterGeom != nullptr || + m_poAttrQuery != nullptr) + return OGRLayer::GetFeatureCount(bForce); + + if (phMiraMonLayer->bIsPolygon) + { + return std::max((GIntBig)0, + (GIntBig)(phMiraMonLayer->TopHeader.nElemCount - 1)); + } + return (GIntBig)phMiraMonLayer->TopHeader.nElemCount; +} + +/****************************************************************************/ +/* MMProcessMultiGeometry() */ +/****************************************************************************/ +OGRErr OGRMiraMonLayer::MMProcessMultiGeometry(OGRGeometryH hGeom, + OGRFeature *poFeature) + +{ + OGRErr eErr = OGRERR_NONE; + OGRGeometry *poGeom = LOG_ACTION(OGRGeometry::FromHandle(hGeom)); + + if (poGeom == nullptr) + { + CPLError( + CE_Failure, CPLE_AppDefined, + "\nFeatures without geometry not supported by MiraMon writer."); + return LOG_ACTION(OGRERR_FAILURE); + } + + // Multigeometry field processing (just in case of a MG inside a MG) + if (wkbFlatten(poGeom->getGeometryType()) == wkbGeometryCollection) + { + int nGeom = OGR_G_GetGeometryCount(OGRGeometry::ToHandle(poGeom)); + for (int iGeom = 0; iGeom < nGeom; iGeom++) + { + OGRGeometryH poSubGeometry = + OGR_G_GetGeometryRef(OGRGeometry::ToHandle(poGeom), iGeom); + eErr = MMProcessMultiGeometry(poSubGeometry, poFeature); + if (eErr != OGRERR_NONE) + return eErr; + } + return eErr; + } + // Converting multilines and multi points to simple ones + if (wkbFlatten(poGeom->getGeometryType()) == wkbMultiLineString || + wkbFlatten(poGeom->getGeometryType()) == wkbMultiPoint) + { + int nGeom = OGR_G_GetGeometryCount(OGRGeometry::ToHandle(poGeom)); + for (int iGeom = 0; iGeom < nGeom; iGeom++) + { + OGRGeometryH poSubGeometry = + OGR_G_GetGeometryRef(OGRGeometry::ToHandle(poGeom), iGeom); + eErr = MMProcessGeometry(poSubGeometry, poFeature, (iGeom == 0)); + if (eErr != OGRERR_NONE) + return eErr; + } + return eErr; + } + + // Processing a simple geometry + return LOG_ACTION( + MMProcessGeometry(OGRGeometry::ToHandle(poGeom), poFeature, TRUE)); +} + +/****************************************************************************/ +/* MMProcessGeometry() */ +/****************************************************************************/ +OGRErr OGRMiraMonLayer::MMProcessGeometry(OGRGeometryH hGeom, + OGRFeature *poFeature, + MM_BOOLEAN bcalculateRecord) + +{ + OGRErr eErr = OGRERR_NONE; + OGRGeometry *poGeom = nullptr; + if (hGeom) + { + poGeom = OGRGeometry::FromHandle(hGeom); + + // Translating types from GDAL to MiraMon + int eLT = LOG_ACTION(poGeom->getGeometryType()); + switch (wkbFlatten(eLT)) + { + case wkbPoint: + phMiraMonLayer = LOG_ACTION(&hMiraMonLayerPNT); + if (OGR_G_Is3D(hGeom)) + phMiraMonLayer->eLT = MM_LayerType_Point3d; + else + phMiraMonLayer->eLT = MM_LayerType_Point; + break; + case wkbLineString: + phMiraMonLayer = LOG_ACTION(&hMiraMonLayerARC); + if (OGR_G_Is3D(hGeom)) + phMiraMonLayer->eLT = MM_LayerType_Arc3d; + else + phMiraMonLayer->eLT = MM_LayerType_Arc; + break; + case wkbPolygon: + case wkbMultiPolygon: + case wkbPolyhedralSurface: + case wkbTIN: + case wkbTriangle: + phMiraMonLayer = LOG_ACTION(&hMiraMonLayerPOL); + if (OGR_G_Is3D(hGeom)) + phMiraMonLayer->eLT = MM_LayerType_Pol3d; + else + phMiraMonLayer->eLT = MM_LayerType_Pol; + break; + case wkbUnknown: + default: + { + CPLError(CE_Warning, CPLE_NotSupported, + "MiraMon " + "does not support geometry type '%d'", + eLT); + return OGRERR_UNSUPPORTED_GEOMETRY_TYPE; + } + } + } + else + { + // Processing only the table. A DBF will be generated + phMiraMonLayer = LOG_ACTION(&hMiraMonLayerReadOrNonGeom); + phMiraMonLayer->eLT = MM_LayerType_Unknown; + } + + /* -------------------------------------------------------------------- */ + /* Field translation from GDAL to MiraMon */ + /* -------------------------------------------------------------------- */ + // Reset the object where read coordinates are going to be stored + MMResetFeatureGeometry(&hMMFeature); + if (bcalculateRecord) + { + MMResetFeatureRecord(&hMMFeature); + if (!phMiraMonLayer->pLayerDB) + { + eErr = TranslateFieldsToMM(); + if (eErr != OGRERR_NONE) + return eErr; + } + // Content field translation from GDAL to MiraMon + eErr = TranslateFieldsValuesToMM(poFeature); + if (eErr != OGRERR_NONE) + { + CPLDebugOnly("MiraMon", "Error in MMProcessGeometry()"); + return eErr; + } + } + + /* -------------------------------------------------------------------- */ + /* Write Geometry */ + /* -------------------------------------------------------------------- */ + + // Reads objects with coordinates and transform them to MiraMon + if (poGeom) + { + eErr = MMLoadGeometry(OGRGeometry::ToHandle(poGeom)); + } + else + { + if (!phMiraMonLayer->bIsBeenInit) + { + phMiraMonLayer->bIsDBF = TRUE; + if (MMInitLayerByType(phMiraMonLayer)) + eErr = OGRERR_FAILURE; + + phMiraMonLayer->bIsBeenInit = 1; + } + } + + // Writes coordinates to the disk + if (eErr == OGRERR_NONE) + return MMWriteGeometry(); + CPLDebugOnly("MiraMon", "Error in MMProcessGeometry()"); + return eErr; +} + +/****************************************************************************/ +/* ICreateFeature() */ +/****************************************************************************/ + +OGRErr OGRMiraMonLayer::ICreateFeature(OGRFeature *poFeature) + +{ + OGRErr eErr = OGRERR_NONE; + + if (!m_bUpdate) + { + CPLError(CE_Failure, CPLE_NoWriteAccess, + "Cannot create features on a read-only dataset."); + return OGRERR_FAILURE; + } + + /* -------------------------------------------------------------------- */ + /* Write out the feature */ + /* -------------------------------------------------------------------- */ + OGRGeometry *poGeom = LOG_ACTION(poFeature->GetGeometryRef()); + + // Processing a feature without geometry. + if (poGeom == nullptr) + { + eErr = LOG_ACTION(MMProcessGeometry(nullptr, poFeature, TRUE)); + if (phMiraMonLayer->bIsDBF) + poFeature->SetFID(phMiraMonLayer->TopHeader.nElemCount - 1); + return eErr; + } + + // Converting to simple geometries + if (wkbFlatten(poGeom->getGeometryType()) == wkbGeometryCollection) + { + int nGeom = + LOG_ACTION(OGR_G_GetGeometryCount(OGRGeometry::ToHandle(poGeom))); + for (int iGeom = 0; iGeom < nGeom; iGeom++) + { + OGRGeometryH poSubGeometry = LOG_ACTION( + OGR_G_GetGeometryRef(OGRGeometry::ToHandle(poGeom), iGeom)); + eErr = LOG_ACTION(MMProcessMultiGeometry(poSubGeometry, poFeature)); + if (eErr != OGRERR_NONE) + return eErr; + } + + return eErr; + } + + // Processing the geometry + eErr = LOG_ACTION( + MMProcessMultiGeometry(OGRGeometry::ToHandle(poGeom), poFeature)); + + // Set the FID from 0 index + if (phMiraMonLayer) + { + if (phMiraMonLayer->bIsPolygon && + phMiraMonLayer->TopHeader.nElemCount > 1) + poFeature->SetFID((GIntBig)phMiraMonLayer->TopHeader.nElemCount - + 2); + else if (phMiraMonLayer->TopHeader.nElemCount > 0) + poFeature->SetFID((GIntBig)phMiraMonLayer->TopHeader.nElemCount - + 1); + } + return eErr; +} + +/****************************************************************************/ +/* MMDumpVertices() */ +/****************************************************************************/ + +OGRErr OGRMiraMonLayer::MMDumpVertices(OGRGeometryH hGeom, + MM_BOOLEAN bExternalRing, + MM_BOOLEAN bUseVFG) +{ + // If the MiraMonLayer structure has not been init, + // here is the moment to do that. + if (!phMiraMonLayer) + return OGRERR_FAILURE; + + if (!phMiraMonLayer->bIsBeenInit) + { + if (MMInitLayerByType(phMiraMonLayer)) + return OGRERR_FAILURE; + phMiraMonLayer->bIsBeenInit = 1; + } + if (MMResize_MM_N_VERTICES_TYPE_Pointer( + &hMMFeature.pNCoordRing, &hMMFeature.nMaxpNCoordRing, + (MM_N_VERTICES_TYPE)hMMFeature.nNRings + 1, MM_MEAN_NUMBER_OF_RINGS, + 0)) + return OGRERR_FAILURE; + + if (bUseVFG) + { + if (MMResizeVFGPointer(&hMMFeature.flag_VFG, &hMMFeature.nMaxVFG, + (MM_INTERNAL_FID)hMMFeature.nNRings + 1, + MM_MEAN_NUMBER_OF_RINGS, 0)) + return OGRERR_FAILURE; + + hMMFeature.flag_VFG[hMMFeature.nIRing] = MM_END_ARC_IN_RING; + if (bExternalRing) + hMMFeature.flag_VFG[hMMFeature.nIRing] |= MM_EXTERIOR_ARC_SIDE; + // In MiraMon the external ring is clockwise and the internals are + // coounterclockwise. + OGRGeometry *poGeom = OGRGeometry::FromHandle(hGeom); + if ((bExternalRing && !poGeom->toLinearRing()->isClockwise()) || + (!bExternalRing && poGeom->toLinearRing()->isClockwise())) + hMMFeature.flag_VFG[hMMFeature.nIRing] |= MM_ROTATE_ARC; + } + + hMMFeature.pNCoordRing[hMMFeature.nIRing] = OGR_G_GetPointCount(hGeom); + + if (MMResizeMM_POINT2DPointer(&hMMFeature.pCoord, &hMMFeature.nMaxpCoord, + hMMFeature.nICoord + + hMMFeature.pNCoordRing[hMMFeature.nIRing], + MM_MEAN_NUMBER_OF_NCOORDS, 0)) + return OGRERR_FAILURE; + if (MMResizeDoublePointer(&hMMFeature.pZCoord, &hMMFeature.nMaxpZCoord, + hMMFeature.nICoord + + hMMFeature.pNCoordRing[hMMFeature.nIRing], + MM_MEAN_NUMBER_OF_NCOORDS, 0)) + return OGRERR_FAILURE; + + for (int iPoint = 0; + (MM_N_VERTICES_TYPE)iPoint < hMMFeature.pNCoordRing[hMMFeature.nIRing]; + iPoint++) + { + hMMFeature.pCoord[hMMFeature.nICoord].dfX = OGR_G_GetX(hGeom, iPoint); + hMMFeature.pCoord[hMMFeature.nICoord].dfY = OGR_G_GetY(hGeom, iPoint); + if (OGR_G_GetCoordinateDimension(hGeom) == 2) + hMMFeature.pZCoord[hMMFeature.nICoord] = + MM_NODATA_COORD_Z; // Possible rare case + else + { + hMMFeature.pZCoord[hMMFeature.nICoord] = OGR_G_GetZ(hGeom, iPoint); + phMiraMonLayer->bIsReal3d = 1; + } + + hMMFeature.nICoord++; + } + hMMFeature.nIRing++; + hMMFeature.nNRings++; + return OGRERR_NONE; +} + +/****************************************************************************/ +/* MMLoadGeometry() */ +/* */ +/* Loads on a MiraMon object Feature all coordinates from feature */ +/* */ +/****************************************************************************/ +OGRErr OGRMiraMonLayer::MMLoadGeometry(OGRGeometryH hGeom) + +{ + OGRErr eErr = OGRERR_NONE; + MM_BOOLEAN bExternalRing; + + /* -------------------------------------------------------------------- */ + /* This is a geometry with sub-geometries. */ + /* -------------------------------------------------------------------- */ + int nGeom = OGR_G_GetGeometryCount(hGeom); + + int eLT = LOG_ACTION(wkbFlatten(OGR_G_GetGeometryType(hGeom))); + + if (eLT == wkbMultiPolygon || eLT == wkbPolyhedralSurface || + eLT == wkbTIN || eLT == wkbTriangle) + { + for (int iGeom = 0; iGeom < nGeom; iGeom++) + { + OGRGeometryH poSubGeometry = OGR_G_GetGeometryRef(hGeom, iGeom); + + // Reads all coordinates + eErr = MMLoadGeometry(poSubGeometry); + if (eErr != OGRERR_NONE) + return eErr; + } + } + else if (eLT == wkbPolygon) + { + for (int iGeom = 0; iGeom < nGeom && eErr == OGRERR_NONE; iGeom++) + { + OGRGeometryH poSubGeometry = OGR_G_GetGeometryRef(hGeom, iGeom); + + if (iGeom == 0) + bExternalRing = true; + else + bExternalRing = false; + + eErr = MMDumpVertices(poSubGeometry, bExternalRing, TRUE); + if (eErr != OGRERR_NONE) + return eErr; + } + } + else if (eLT == wkbPoint || eLT == wkbLineString) + { + // Reads all coordinates + eErr = MMDumpVertices(hGeom, true, FALSE); + + if (eErr != OGRERR_NONE) + return eErr; + } + else if (eLT == wkbGeometryCollection) + { + CPLError( + CE_Failure, CPLE_NotSupported, + "MiraMon: wkbGeometryCollection inside a wkbGeometryCollection?"); + return OGRERR_UNSUPPORTED_GEOMETRY_TYPE; + } + + return OGRERR_NONE; +} + +/****************************************************************************/ +/* WriteGeometry() */ +/* */ +/* Writes a geometry to the file. */ +/****************************************************************************/ + +OGRErr OGRMiraMonLayer::MMWriteGeometry() + +{ + OGRErr eErr = MMAddFeature(phMiraMonLayer, &hMMFeature); + + if (eErr == MM_FATAL_ERROR_WRITING_FEATURES) + { + CPLDebugOnly("MiraMon", "Error in MMAddFeature() " + "MM_FATAL_ERROR_WRITING_FEATURES"); + CPLError(CE_Failure, CPLE_FileIO, "MiraMon write failure: %s", + VSIStrerror(errno)); + return OGRERR_FAILURE; + } + if (eErr == MM_STOP_WRITING_FEATURES) + { + CPLDebugOnly("MiraMon", "Error in MMAddFeature() " + "MM_STOP_WRITING_FEATURES"); + CPLError(CE_Failure, CPLE_FileIO, "MiraMon format limitations."); + CPLError(CE_Failure, CPLE_FileIO, + "Try V2.0 option (-lco Version=V2.0)."); + return OGRERR_FAILURE; + } + + return OGRERR_NONE; +} + +/****************************************************************************/ +/* TranslateFieldsToMM() */ +/* */ +/* Translase ogr Fields to a structure that MiraMon can understand */ +/****************************************************************************/ + +OGRErr OGRMiraMonLayer::TranslateFieldsToMM() + +{ + if (m_poFeatureDefn->GetFieldCount() == 0) + return OGRERR_NONE; + + CPLDebugOnly("MiraMon", "Translating fields to MiraMon..."); + // If the structure is filled we do anything + if (phMiraMonLayer->pLayerDB) + return OGRERR_NONE; + + phMiraMonLayer->pLayerDB = static_cast<struct MiraMonDataBase *>( + VSICalloc(sizeof(*phMiraMonLayer->pLayerDB), 1)); + if (!phMiraMonLayer->pLayerDB) + return OGRERR_NOT_ENOUGH_MEMORY; + + phMiraMonLayer->pLayerDB->pFields = + static_cast<struct MiraMonDataBaseField *>( + VSICalloc(m_poFeatureDefn->GetFieldCount(), + sizeof(*(phMiraMonLayer->pLayerDB->pFields)))); + if (!phMiraMonLayer->pLayerDB->pFields) + return OGRERR_NOT_ENOUGH_MEMORY; + + phMiraMonLayer->pLayerDB->nNFields = 0; + if (phMiraMonLayer->pLayerDB->pFields) + { + memset(phMiraMonLayer->pLayerDB->pFields, 0, + m_poFeatureDefn->GetFieldCount() * + sizeof(*phMiraMonLayer->pLayerDB->pFields)); + for (MM_EXT_DBF_N_FIELDS iField = 0; + iField < (MM_EXT_DBF_N_FIELDS)m_poFeatureDefn->GetFieldCount(); + iField++) + { + switch (m_poFeatureDefn->GetFieldDefn(iField)->GetType()) + { + case OFTInteger: + case OFTIntegerList: + phMiraMonLayer->pLayerDB->pFields[iField].eFieldType = + MM_Numeric; + phMiraMonLayer->pLayerDB->pFields[iField] + .nNumberOfDecimals = 0; + break; + + case OFTInteger64: + case OFTInteger64List: + phMiraMonLayer->pLayerDB->pFields[iField].bIs64BitInteger = + TRUE; + phMiraMonLayer->pLayerDB->pFields[iField].eFieldType = + MM_Numeric; + phMiraMonLayer->pLayerDB->pFields[iField] + .nNumberOfDecimals = 0; + break; + + case OFTReal: + case OFTRealList: + phMiraMonLayer->pLayerDB->pFields[iField].eFieldType = + MM_Numeric; + phMiraMonLayer->pLayerDB->pFields[iField] + .nNumberOfDecimals = + m_poFeatureDefn->GetFieldDefn(iField)->GetPrecision(); + break; + + case OFTBinary: + phMiraMonLayer->pLayerDB->pFields[iField].eFieldType = + MM_Character; + break; + + case OFTDate: + phMiraMonLayer->pLayerDB->pFields[iField].eFieldType = + MM_Data; + break; + + case OFTTime: + case OFTDateTime: + phMiraMonLayer->pLayerDB->pFields[iField].eFieldType = + MM_Character; + break; + + case OFTString: + case OFTStringList: + default: + phMiraMonLayer->pLayerDB->pFields[iField].eFieldType = + MM_Character; + break; + } + if (m_poFeatureDefn->GetFieldDefn(iField)->GetType() == OFTDate) + phMiraMonLayer->pLayerDB->pFields[iField].nFieldSize = 8; + else + { + // As https://gdal.org/api/ogrfeature_cpp.html indicates that + // precision (number of digits after decimal point) is optional, + // and a 0 is probably the default value, in that case we prefer + // to save all the guaranteed significant figures in a double + // (needed if a field contains, for instance, coordinates in + // geodetic degrees and a 1:1000 map precision applies). + if (m_poFeatureDefn->GetFieldDefn(iField)->GetPrecision() == 0) + { + if (m_poFeatureDefn->GetFieldDefn(iField)->GetType() == + OFTReal || + m_poFeatureDefn->GetFieldDefn(iField)->GetType() == + OFTRealList) + { + phMiraMonLayer->pLayerDB->pFields[iField].nFieldSize = + 20; + phMiraMonLayer->pLayerDB->pFields[iField] + .nNumberOfDecimals = MAX_RELIABLE_SF_DOUBLE; + } + else + { + phMiraMonLayer->pLayerDB->pFields[iField].nFieldSize = + m_poFeatureDefn->GetFieldDefn(iField)->GetWidth(); + if (phMiraMonLayer->pLayerDB->pFields[iField] + .nFieldSize == 0) + phMiraMonLayer->pLayerDB->pFields[iField] + .nFieldSize = 1; + } + } + else + { + // One more space for the "." + phMiraMonLayer->pLayerDB->pFields[iField].nFieldSize = + (unsigned int)(m_poFeatureDefn->GetFieldDefn(iField) + ->GetWidth() + + 1); + } + } + + // Recode from UTF-8 if necessary + if (phMiraMonLayer->nCharSet != MM_JOC_CARAC_UTF8_DBF) + { + char *pszString = CPLRecode( + m_poFeatureDefn->GetFieldDefn(iField)->GetNameRef(), + CPL_ENC_UTF8, CPL_ENC_ISO8859_1); + CPLStrlcpy( + phMiraMonLayer->pLayerDB->pFields[iField].pszFieldName, + pszString, MM_MAX_LON_FIELD_NAME_DBF); + CPLFree(pszString); + } + else + { + CPLStrlcpy( + phMiraMonLayer->pLayerDB->pFields[iField].pszFieldName, + m_poFeatureDefn->GetFieldDefn(iField)->GetNameRef(), + MM_MAX_LON_FIELD_NAME_DBF); + } + + if (m_poFeatureDefn->GetFieldDefn(iField)->GetAlternativeNameRef()) + { + if (phMiraMonLayer->nCharSet != MM_JOC_CARAC_UTF8_DBF) + { + char *pszString = + CPLRecode(m_poFeatureDefn->GetFieldDefn(iField) + ->GetAlternativeNameRef(), + CPL_ENC_UTF8, CPL_ENC_ISO8859_1); + CPLStrlcpy(phMiraMonLayer->pLayerDB->pFields[iField] + .pszFieldDescription, + pszString, MM_MAX_BYTES_FIELD_DESC); + CPLFree(pszString); + } + else + { + CPLStrlcpy(phMiraMonLayer->pLayerDB->pFields[iField] + .pszFieldDescription, + m_poFeatureDefn->GetFieldDefn(iField) + ->GetAlternativeNameRef(), + MM_MAX_BYTES_FIELD_DESC); + } + } + phMiraMonLayer->pLayerDB->nNFields++; + } + } + + CPLDebugOnly("MiraMon", "Fields to MiraMon translated."); + return OGRERR_NONE; +} + +/****************************************************************************/ +/* TranslateFieldsValuesToMM() */ +/* */ +/* Translate ogr Fields to a structure that MiraMon can understand */ +/****************************************************************************/ + +OGRErr OGRMiraMonLayer::TranslateFieldsValuesToMM(OGRFeature *poFeature) + +{ + if (m_poFeatureDefn->GetFieldCount() == 0) + { + // MiraMon have private DataBase records + hMMFeature.nNumMRecords = 1; + return OGRERR_NONE; + } + + MM_EXT_DBF_N_MULTIPLE_RECORDS nIRecord; + int nNumFields = m_poFeatureDefn->GetFieldCount(); + MM_EXT_DBF_N_MULTIPLE_RECORDS nNumRecords, nRealNumRecords; + hMMFeature.nNumMRecords = 0; + + for (int iField = 0; iField < nNumFields; iField++) + { + OGRFieldType eFType = m_poFeatureDefn->GetFieldDefn(iField)->GetType(); + const char *pszRawValue = poFeature->GetFieldAsString(iField); + + if (eFType == OFTStringList) + { + char **papszValues = poFeature->GetFieldAsStringList(iField); + nRealNumRecords = nNumRecords = CSLCount(papszValues); + if (nNumRecords == 0) + nNumRecords++; + hMMFeature.nNumMRecords = + max_function(hMMFeature.nNumMRecords, nNumRecords); + if (MMResizeMiraMonRecord( + &hMMFeature.pRecords, &hMMFeature.nMaxMRecords, + hMMFeature.nNumMRecords, MM_INC_NUMBER_OF_RECORDS, + hMMFeature.nNumMRecords)) + return OGRERR_NOT_ENOUGH_MEMORY; + + for (nIRecord = 0; nIRecord < nRealNumRecords; nIRecord++) + { + hMMFeature.pRecords[nIRecord].nNumField = + m_poFeatureDefn->GetFieldCount(); + + if (MMResizeMiraMonFieldValue( + &(hMMFeature.pRecords[nIRecord].pField), + &hMMFeature.pRecords[nIRecord].nMaxField, + hMMFeature.pRecords[nIRecord].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[nIRecord].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + if (phMiraMonLayer->nCharSet != MM_JOC_CARAC_UTF8_DBF) + { + // MiraMon encoding is ISO 8859-1 (Latin1) -> Recode from UTF-8 + char *pszString = CPLRecode( + papszValues[nIRecord], CPL_ENC_UTF8, CPL_ENC_ISO8859_1); + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[nIRecord] + .pField[iField] + .pDinValue, + pszString, + &hMMFeature.pRecords[nIRecord] + .pField[iField] + .nNumDinValue)) + { + CPLFree(pszString); + return OGRERR_NOT_ENOUGH_MEMORY; + } + CPLFree(pszString); + } + else + { + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[nIRecord] + .pField[iField] + .pDinValue, + papszValues[nIRecord], + &hMMFeature.pRecords[nIRecord] + .pField[iField] + .nNumDinValue)) + return OGRERR_NOT_ENOUGH_MEMORY; + } + hMMFeature.pRecords[nIRecord].pField[iField].bIsValid = 1; + } + } + else if (eFType == OFTIntegerList) + { + int nCount = 0; + const int *panValues = + poFeature->GetFieldAsIntegerList(iField, &nCount); + + nRealNumRecords = nNumRecords = nCount; + if (nNumRecords == 0) + nNumRecords++; + hMMFeature.nNumMRecords = + max_function(hMMFeature.nNumMRecords, nNumRecords); + if (MMResizeMiraMonRecord( + &hMMFeature.pRecords, &hMMFeature.nMaxMRecords, + hMMFeature.nNumMRecords, MM_INC_NUMBER_OF_RECORDS, + hMMFeature.nNumMRecords)) + return OGRERR_NOT_ENOUGH_MEMORY; + + // It will contains the i-th element of the list. + for (nIRecord = 0; nIRecord < nRealNumRecords; nIRecord++) + { + hMMFeature.pRecords[nIRecord].nNumField = nNumFields; + + if (MMResizeMiraMonFieldValue( + &(hMMFeature.pRecords[nIRecord].pField), + &hMMFeature.pRecords[nIRecord].nMaxField, + hMMFeature.pRecords[nIRecord].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[nIRecord].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + hMMFeature.pRecords[nIRecord].pField[iField].dValue = + panValues[nIRecord]; + + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[nIRecord].pField[iField].pDinValue, + CPLSPrintf("%d", panValues[nIRecord]), + &hMMFeature.pRecords[nIRecord] + .pField[iField] + .nNumDinValue)) + return OGRERR_NOT_ENOUGH_MEMORY; + + hMMFeature.pRecords[nIRecord].pField[iField].bIsValid = 1; + } + } + else if (eFType == OFTInteger64List) + { + int nCount = 0; + const GIntBig *panValues = + poFeature->GetFieldAsInteger64List(iField, &nCount); + + nRealNumRecords = nNumRecords = nCount; + if (nNumRecords == 0) + nNumRecords++; + hMMFeature.nNumMRecords = + max_function(hMMFeature.nNumMRecords, nNumRecords); + if (MMResizeMiraMonRecord( + &hMMFeature.pRecords, &hMMFeature.nMaxMRecords, + hMMFeature.nNumMRecords, MM_INC_NUMBER_OF_RECORDS, + hMMFeature.nNumMRecords)) + return OGRERR_NOT_ENOUGH_MEMORY; + + // It will contains the i-th element of the list. + for (nIRecord = 0; nIRecord < nRealNumRecords; nIRecord++) + { + hMMFeature.pRecords[nIRecord].nNumField = nNumFields; + + if (MMResizeMiraMonFieldValue( + &(hMMFeature.pRecords[nIRecord].pField), + &hMMFeature.pRecords[nIRecord].nMaxField, + hMMFeature.pRecords[nIRecord].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[nIRecord].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + hMMFeature.pRecords[nIRecord].pField[iField].iValue = + panValues[nIRecord]; + + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[nIRecord].pField[iField].pDinValue, + CPLSPrintf("%" CPL_FRMT_GB_WITHOUT_PREFIX "d", + panValues[nIRecord]), + &hMMFeature.pRecords[nIRecord] + .pField[iField] + .nNumDinValue)) + return OGRERR_NOT_ENOUGH_MEMORY; + hMMFeature.pRecords[nIRecord].pField[iField].bIsValid = 1; + } + } + else if (eFType == OFTRealList) + { + int nCount = 0; + const double *padfRLValues = + poFeature->GetFieldAsDoubleList(iField, &nCount); + //char format[23]; + + nRealNumRecords = nNumRecords = nCount; + if (nNumRecords == 0) + nNumRecords++; + hMMFeature.nNumMRecords = + max_function(hMMFeature.nNumMRecords, nNumRecords); + if (MMResizeMiraMonRecord( + &hMMFeature.pRecords, &hMMFeature.nMaxMRecords, + hMMFeature.nNumMRecords, MM_INC_NUMBER_OF_RECORDS, + hMMFeature.nNumMRecords)) + return OGRERR_NOT_ENOUGH_MEMORY; + + // It will contains the i-th element of the list. + for (nIRecord = 0; nIRecord < nRealNumRecords; nIRecord++) + { + hMMFeature.pRecords[nIRecord].nNumField = iField; + + if (MMResizeMiraMonFieldValue( + &(hMMFeature.pRecords[nIRecord].pField), + &hMMFeature.pRecords[nIRecord].nMaxField, + hMMFeature.pRecords[nIRecord].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[nIRecord].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + hMMFeature.pRecords[nIRecord].pField[iField].dValue = + padfRLValues[nIRecord]; + + // TODO: decide how many decimals use. If possible. + //CPLStrlcpy(format, CPLSPrintf("%f", padfRLValues[nIRecord]),23); + + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[nIRecord].pField[iField].pDinValue, + CPLSPrintf("%f", padfRLValues[nIRecord]), + &hMMFeature.pRecords[nIRecord] + .pField[iField] + .nNumDinValue)) + return OGRERR_NOT_ENOUGH_MEMORY; + hMMFeature.pRecords[nIRecord].pField[iField].bIsValid = 1; + } + } + else if (eFType == OFTString) + { + hMMFeature.nNumMRecords = max_function(hMMFeature.nNumMRecords, 1); + hMMFeature.pRecords[0].nNumField = nNumFields; + if (MMResizeMiraMonFieldValue(&(hMMFeature.pRecords[0].pField), + &hMMFeature.pRecords[0].nMaxField, + hMMFeature.pRecords[0].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[0].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + if (MMIsEmptyString(pszRawValue)) + hMMFeature.pRecords[0].pField[iField].bIsValid = 0; + { + if (phMiraMonLayer->nCharSet != MM_JOC_CARAC_UTF8_DBF) + { + // MiraMon encoding is ISO 8859-1 (Latin1) -> Recode from UTF-8 + char *pszString = + CPLRecode(pszRawValue, CPL_ENC_UTF8, CPL_ENC_ISO8859_1); + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[0].pField[iField].pDinValue, + pszString, + &hMMFeature.pRecords[0] + .pField[iField] + .nNumDinValue)) + { + CPLFree(pszString); + return OGRERR_NOT_ENOUGH_MEMORY; + } + CPLFree(pszString); + } + else + { + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[0].pField[iField].pDinValue, + pszRawValue, + &hMMFeature.pRecords[0] + .pField[iField] + .nNumDinValue)) + { + return OGRERR_NOT_ENOUGH_MEMORY; + } + } + } + hMMFeature.pRecords[0].pField[iField].bIsValid = 1; + } + else if (eFType == OFTDate) + { + char szDate[15]; + + hMMFeature.nNumMRecords = max_function(hMMFeature.nNumMRecords, 1); + hMMFeature.pRecords[0].nNumField = nNumFields; + if (MMResizeMiraMonFieldValue(&(hMMFeature.pRecords[0].pField), + &hMMFeature.pRecords[0].nMaxField, + hMMFeature.pRecords[0].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[0].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + if (MMIsEmptyString(pszRawValue)) + hMMFeature.pRecords[0].pField[iField].bIsValid = 0; + else + { + const OGRField *poField = poFeature->GetRawFieldRef(iField); + if (poField->Date.Year >= 0) + snprintf(szDate, sizeof(szDate), "%04d%02d%02d", + poField->Date.Year, poField->Date.Month, + poField->Date.Day); + else + snprintf(szDate, sizeof(szDate), "%04d%02d%02d", 0, 0, 0); + + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[0].pField[iField].pDinValue, + szDate, + &hMMFeature.pRecords[0].pField[iField].nNumDinValue)) + return OGRERR_NOT_ENOUGH_MEMORY; + hMMFeature.pRecords[0].pField[iField].bIsValid = 1; + } + } + else if (eFType == OFTTime || eFType == OFTDateTime) + { + hMMFeature.nNumMRecords = max_function(hMMFeature.nNumMRecords, 1); + hMMFeature.pRecords[0].nNumField = nNumFields; + if (MMResizeMiraMonFieldValue(&(hMMFeature.pRecords[0].pField), + &hMMFeature.pRecords[0].nMaxField, + hMMFeature.pRecords[0].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[0].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + if (MMIsEmptyString(pszRawValue)) + hMMFeature.pRecords[0].pField[iField].bIsValid = 0; + else + { + // MiraMon encoding is ISO 8859-1 (Latin1) -> Recode from UTF-8 + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[0].pField[iField].pDinValue, + pszRawValue, + &hMMFeature.pRecords[0].pField[iField].nNumDinValue)) + return OGRERR_NOT_ENOUGH_MEMORY; + + hMMFeature.pRecords[0].pField[iField].bIsValid = 1; + } + } + else if (eFType == OFTInteger) + { + hMMFeature.nNumMRecords = max_function(hMMFeature.nNumMRecords, 1); + hMMFeature.pRecords[0].nNumField = nNumFields; + if (MMResizeMiraMonFieldValue(&(hMMFeature.pRecords[0].pField), + &hMMFeature.pRecords[0].nMaxField, + hMMFeature.pRecords[0].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[0].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + if (MMIsEmptyString(pszRawValue)) + hMMFeature.pRecords[0].pField[iField].bIsValid = 0; + else + { + hMMFeature.pRecords[0].pField[iField].dValue = + poFeature->GetFieldAsInteger(iField); + + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[0].pField[iField].pDinValue, + pszRawValue, + &hMMFeature.pRecords[0].pField[iField].nNumDinValue)) + return OGRERR_NOT_ENOUGH_MEMORY; + hMMFeature.pRecords[0].pField[iField].bIsValid = 1; + } + } + else if (eFType == OFTInteger64) + { + hMMFeature.nNumMRecords = max_function(hMMFeature.nNumMRecords, 1); + hMMFeature.pRecords[0].nNumField = nNumFields; + if (MMResizeMiraMonFieldValue(&(hMMFeature.pRecords[0].pField), + &hMMFeature.pRecords[0].nMaxField, + hMMFeature.pRecords[0].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[0].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + if (MMIsEmptyString(pszRawValue)) + hMMFeature.pRecords[0].pField[iField].bIsValid = 0; + else + { + hMMFeature.pRecords[0].pField[iField].iValue = + poFeature->GetFieldAsInteger64(iField); + + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[0].pField[iField].pDinValue, + pszRawValue, + &hMMFeature.pRecords[0].pField[iField].nNumDinValue)) + return OGRERR_NOT_ENOUGH_MEMORY; + hMMFeature.pRecords[0].pField[iField].bIsValid = 1; + } + } + else if (eFType == OFTReal) + { + hMMFeature.nNumMRecords = max_function(hMMFeature.nNumMRecords, 1); + hMMFeature.pRecords[0].nNumField = nNumFields; + if (MMResizeMiraMonFieldValue(&(hMMFeature.pRecords[0].pField), + &hMMFeature.pRecords[0].nMaxField, + hMMFeature.pRecords[0].nNumField, + MM_INC_NUMBER_OF_FIELDS, + hMMFeature.pRecords[0].nNumField)) + return OGRERR_NOT_ENOUGH_MEMORY; + + if (MMIsEmptyString(pszRawValue)) + hMMFeature.pRecords[0].pField[iField].bIsValid = 0; + else + { + hMMFeature.pRecords[0].pField[iField].dValue = + poFeature->GetFieldAsDouble(iField); + + if (MM_SecureCopyStringFieldValue( + &hMMFeature.pRecords[0].pField[iField].pDinValue, + pszRawValue, + &hMMFeature.pRecords[0].pField[iField].nNumDinValue)) + return OGRERR_NOT_ENOUGH_MEMORY; + hMMFeature.pRecords[0].pField[iField].bIsValid = 1; + } + } + else + { + CPLError(CE_Warning, CPLE_NotSupported, + "MiraMon: Field type %d not processed by MiraMon\n", + eFType); + hMMFeature.pRecords[0].pField[iField].bIsValid = 0; + } + } + + return OGRERR_NONE; +} + +/****************************************************************************/ +/* GetLayerDefn() */ +/* */ +/****************************************************************************/ +OGRFeatureDefn *OGRMiraMonLayer::GetLayerDefn() +{ + return m_poFeatureDefn; +} + +/****************************************************************************/ +/* GetExtent() */ +/* */ +/* Fetch extent of the data currently stored in the dataset. */ +/* The bForce flag has no effect on SHO files since that value */ +/* is always in the header. */ +/****************************************************************************/ + +OGRErr OGRMiraMonLayer::GetExtent(OGREnvelope *psExtent, int bForce) + +{ + if (phMiraMonLayer) + { + if (phMiraMonLayer->bIsDBF) + return OGRERR_FAILURE; + + // For polygons we need another polygon apart from the universal one + // to have a valid extension + if (phMiraMonLayer->bIsPolygon && + phMiraMonLayer->TopHeader.nElemCount < 1) + return OGRERR_FAILURE; + + if (phMiraMonLayer->TopHeader.nElemCount < 1) + return OGRERR_FAILURE; + + psExtent->MinX = phMiraMonLayer->TopHeader.hBB.dfMinX; + psExtent->MaxX = phMiraMonLayer->TopHeader.hBB.dfMaxX; + psExtent->MinY = phMiraMonLayer->TopHeader.hBB.dfMinY; + psExtent->MaxY = phMiraMonLayer->TopHeader.hBB.dfMaxY; + } + else + { + if (!bForce) + return OGRERR_FAILURE; + } + + return OGRERR_NONE; +} + +/****************************************************************************/ +/* TestCapability() */ +/****************************************************************************/ + +int OGRMiraMonLayer::TestCapability(const char *pszCap) + +{ + if (EQUAL(pszCap, OLCRandomRead)) + return TRUE; + + if (EQUAL(pszCap, OLCSequentialWrite)) + return m_bUpdate; + + if (EQUAL(pszCap, OLCFastFeatureCount)) + return !m_poFilterGeom && !m_poAttrQuery; + + if (EQUAL(pszCap, OLCFastGetExtent)) + return TRUE; + + if (EQUAL(pszCap, OLCCreateField)) + return m_bUpdate; + + if (EQUAL(pszCap, OLCZGeometries)) + return TRUE; + + if (EQUAL(pszCap, OLCStringsAsUTF8)) + return TRUE; + + return FALSE; +} + +/****************************************************************************/ +/* CreateField() */ +/****************************************************************************/ + +OGRErr OGRMiraMonLayer::CreateField(const OGRFieldDefn *poField, int bApproxOK) + +{ + if (!m_bUpdate) + { + CPLError(CE_Failure, CPLE_NoWriteAccess, + "Cannot create fields on a read-only dataset."); + return OGRERR_FAILURE; + } + + if (phMiraMonLayer && phMiraMonLayer->TopHeader.nElemCount > 0) + { + CPLError(CE_Failure, CPLE_NoWriteAccess, + "Cannot create fields to a layer with " + "already existing features in it."); + return OGRERR_FAILURE; + } + + switch (poField->GetType()) + { + case OFTInteger: + case OFTIntegerList: + case OFTInteger64: + case OFTInteger64List: + case OFTReal: + case OFTRealList: + case OFTString: + case OFTStringList: + case OFTDate: + m_poFeatureDefn->AddFieldDefn(poField); + return OGRERR_NONE; + default: + if (!bApproxOK) + { + CPLError(CE_Failure, CPLE_AppDefined, + "\nField %s is of an unsupported type: %s.", + poField->GetNameRef(), + poField->GetFieldTypeName(poField->GetType())); + return OGRERR_FAILURE; + } + else + { + OGRFieldDefn oModDef(poField); + oModDef.SetType(OFTString); + m_poFeatureDefn->AddFieldDefn(poField); + return OGRERR_NONE; + } + } +} + +/************************************************************************/ +/* AddToFileList() */ +/************************************************************************/ + +void OGRMiraMonLayer::AddToFileList(CPLStringList &oFileList) +{ + if (!phMiraMonLayer) + return; + + char szAuxFile[MM_CPL_PATH_BUF_SIZE]; + + oFileList.AddStringDirectly( + VSIGetCanonicalFilename(phMiraMonLayer->pszSrcLayerName)); + char *pszMMExt = + CPLStrdup(CPLGetExtension(phMiraMonLayer->pszSrcLayerName)); + + if (phMiraMonLayer->bIsPoint) + { + // As it's explicit on documentation a point has also two more files: + + // FILE_NAME_WITHOUT_EXTENSION.pnt --> FILE_NAME_WITHOUT_EXTENSION + T.rel + CPLStrlcpy(szAuxFile, CPLGetBasename(phMiraMonLayer->pszSrcLayerName), + MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'p') ? "T.rel" : "T.REL", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.pnt --> FILE_NAME_WITHOUT_EXTENSION + T.dbf + CPLStrlcpy(szAuxFile, CPLGetBasename(phMiraMonLayer->pszSrcLayerName), + MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'p') ? "T.dbf" : "T.DBF", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr))); + } + else if (phMiraMonLayer->bIsArc && !phMiraMonLayer->bIsPolygon) + { + // As it's explicit on documentation a point has also five more files: + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + A.rel + CPLStrlcpy(szAuxFile, CPLGetBasename(phMiraMonLayer->pszSrcLayerName), + MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'a') ? "A.rel" : "A.REL", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + A.dbf + CPLStrlcpy(szAuxFile, CPLGetBasename(phMiraMonLayer->pszSrcLayerName), + MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'a') ? "A.dbf" : "A.DBF", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + .nod + CPLStrlcpy(szAuxFile, CPLGetBasename(phMiraMonLayer->pszSrcLayerName), + MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'a') ? ".nod" : ".NOD", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + N.rel + CPLStrlcpy(szAuxFile, CPLGetBasename(phMiraMonLayer->pszSrcLayerName), + MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'a') ? "N.rel" : "N.REL", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + N.dbf + CPLStrlcpy(szAuxFile, CPLGetBasename(phMiraMonLayer->pszSrcLayerName), + MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'a') ? "N.dbf" : "N.DBF", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr))); + } + else if (phMiraMonLayer->bIsPolygon) + { + // As it's explicit on documentation a point has also eight more files: + const char *szCompleteArcFileName; + char szArcFileName[MM_CPL_PATH_BUF_SIZE]; + + // FILE_NAME_WITHOUT_EXTENSION.pol --> FILE_NAME_WITHOUT_EXTENSION + P.rel + CPLStrlcpy(szAuxFile, CPLGetBasename(phMiraMonLayer->pszSrcLayerName), + MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'p') ? "P.rel" : "P.REL", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr))); + + // The name of the arc is in THIS metadata file + char *pszArcLayerName = MMReturnValueFromSectionINIFile( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr), + SECTION_OVVW_ASPECTES_TECNICS, KEY_ArcSource); + if (!pszArcLayerName) + { + CPLFree(pszMMExt); + return; //Some files are missing + } + CPLStrlcpy(szArcFileName, pszArcLayerName, MM_CPL_PATH_BUF_SIZE); + + MM_RemoveInitial_and_FinalQuotationMarks(szArcFileName); + + // If extension is not specified ".arc" will be used + if (MMIsEmptyString(CPLGetExtension(pszArcLayerName))) + CPLStrlcat(szArcFileName, (pszMMExt[0] == 'p') ? ".arc" : ".ARC", + MM_CPL_PATH_BUF_SIZE); + + CPLFree(pszArcLayerName); + + szCompleteArcFileName = + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szArcFileName, nullptr); + + // The arc that has the coordinates of the polygon + oFileList.AddStringDirectly( + VSIGetCanonicalFilename(szCompleteArcFileName)); + + // FILE_NAME_WITHOUT_EXTENSION.pol --> FILE_NAME_WITHOUT_EXTENSION + P.dbf + CPLStrlcpy(szAuxFile, CPLGetBasename(phMiraMonLayer->pszSrcLayerName), + MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'p') ? "P.dbf" : "P.DBF", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename( + CPLFormFilename(CPLGetDirname(phMiraMonLayer->pszSrcLayerName), + szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + A.rel + const char *pszBaseArcName = CPLGetBasename(szCompleteArcFileName); + CPLStrlcpy(szAuxFile, pszBaseArcName, MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'p') ? "A.rel" : "A.REL", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename(CPLFormFilename( + CPLGetDirname(szCompleteArcFileName), szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + A.dbf + CPLStrlcpy(szAuxFile, pszBaseArcName, MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'p') ? "A.dbf" : "A.DBF", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename(CPLFormFilename( + CPLGetDirname(szCompleteArcFileName), szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + .nod + CPLStrlcpy(szAuxFile, pszBaseArcName, MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'p') ? ".nod" : ".NOD", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename(CPLFormFilename( + CPLGetDirname(szCompleteArcFileName), szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + N.rel + CPLStrlcpy(szAuxFile, pszBaseArcName, MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'p') ? "N.rel" : "N.REL", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename(CPLFormFilename( + CPLGetDirname(szCompleteArcFileName), szAuxFile, nullptr))); + + // FILE_NAME_WITHOUT_EXTENSION.arc --> FILE_NAME_WITHOUT_EXTENSION + N.dbf + CPLStrlcpy(szAuxFile, pszBaseArcName, MM_CPL_PATH_BUF_SIZE); + CPLStrlcat(szAuxFile, (pszMMExt[0] == 'p') ? "N.dbf" : "N.DBF", + MM_CPL_PATH_BUF_SIZE); + oFileList.AddStringDirectly(VSIGetCanonicalFilename(CPLFormFilename( + CPLGetDirname(szCompleteArcFileName), szAuxFile, nullptr))); + } + CPLFree(pszMMExt); +} diff --git a/ogr/ogrsf_frmts/ogrsf_frmts.h b/ogr/ogrsf_frmts/ogrsf_frmts.h index 6316b1bbf945..47ec16626c09 100644 --- a/ogr/ogrsf_frmts/ogrsf_frmts.h +++ b/ogr/ogrsf_frmts/ogrsf_frmts.h @@ -741,6 +741,7 @@ void DeclareDeferredOGRArrowPlugin(); void CPL_DLL RegisterOGRGTFS(); void CPL_DLL RegisterOGRPMTiles(); void CPL_DLL RegisterOGRJSONFG(); +void CPL_DLL RegisterOGRMiraMon(); // @endcond CPL_C_END diff --git a/scripts/fix_typos.sh b/scripts/fix_typos.sh index 2fb0aacf283b..f52f92c9c00e 100755 --- a/scripts/fix_typos.sh +++ b/scripts/fix_typos.sh @@ -126,6 +126,7 @@ AUTHORIZED_LIST="$AUTHORIZED_LIST,cJP2_Colorspace_RGBa,cJP2_Colorspace_Palette_R AUTHORIZED_LIST="$AUTHORIZED_LIST,CURLE_FILE_COULDNT_READ_FILE" AUTHORIZED_LIST="$AUTHORIZED_LIST,nParms,ProjParm,ProjParmId,GTIFFetchProjParms,gdal_GTIFFetchProjParms" # API of libgeotiff AUTHORIZED_LIST="$AUTHORIZED_LIST,lon,Lon,LON" +AUTHORIZED_LIST="$AUTHORIZED_LIST,MM_MARCA_VERSIO_1_DBF_ESTESA,MM_PERIMETRE_INIT_SIZE,MM_PERIMETRE_DECIMALS_SIZE,szMMNomCampPerimetreDefecte,MM_CAMP_ES_PERIMETRE,szMMNomCampNPoligonsDefecte,MM_CAMP_MOSTRABLE_QUAN_TE_CONTINGUT,MM_CAMP_ES_PERIMETRE_3D,SECTION_VERSIO" python3 fix_typos/codespell/codespell.py -w -i 3 -q 2 -S "$EXCLUDED_FILES,./autotest/*,./build*/*" \ -x scripts/typos_allowlist.txt --words-white-list=$AUTHORIZED_LIST \ diff --git a/scripts/typos_allowlist.txt b/scripts/typos_allowlist.txt index a2a7bf6390d7..be5ac1e0ee82 100644 --- a/scripts/typos_allowlist.txt +++ b/scripts/typos_allowlist.txt @@ -306,3 +306,25 @@ either 2 or 4 comma separated values. The same rules apply for the source and de * Esben Mose Hansen, Ange Optimization ApS SetLinearUnits("kilometre", 1000.0); // F(ixed) S(ize) L(ist) of (x,y[,z][,m]) values / Interleaved layout + * https://www.miramon.cat/eng/QuiSom.htm +#define SECTION_VERSIO "VERSIO" +#define szMMNomCampPerimetreDefecte "PERIMETRE" +#define szMMNomCampNPoligonsDefecte "N_POLIG" +#define MM_MIN_WIDTH_LONG 14 // For LONG_ARC and PERIMETRE +#define MM_MIN_WIDTH_AREA 19 // For LONG_ARC and PERIMETRE + CPLStrlcpy(szPerimeterOfThePolygonCat, "Perimetre del poligon", + CPLStrlcpy(szPerimeterOfThePolygonCat, "Perimetre del poligon", + CPLStrlcpy(szAreaOfThePolygonCat, "Area del poligon", + CPLStrlcpy(szNumberOfElementaryPolygonsCat, "Nombre de poligons elementals", + VSIFPrintfL(MMMap.fMMMap, "[VERSIO]\n"); +#define MM_IsDoubleInfinite(x) EsDoubleInfinit((x)) + https://www.miramon.cat/help/eng/GeMPlus/ClausREL.htm + fprintf_function(pF, "[%s]" LineReturn, SECTION_VERSIO); + fprintf_function(pF, "NomCampPerimetre=%s" LineReturn, + fprintf_function(pF, "NomCampNPoligons=%s" LineReturn, + MMReturnValueFromSectionINIFile(szREL_file, SECTION_VERSIO, nullptr); + MMReturnValueFromSectionINIFile(szREL_file, SECTION_VERSIO, KEY_Vers); + pszLine = MMReturnValueFromSectionINIFile(szREL_file, SECTION_VERSIO, + assert f.GetField("PERIMETRE") == pytest.approx(1289.866489495, abs=1e-5) + assert f.GetField("PERIMETRE") == pytest.approx(1123.514024, abs=1e-5) + assert f.GetField("PERIMETRE") == pytest.approx(680.544697, abs=1e-5) From c2489e1c41cdafb8d4de79dc6f8963fa67f20945 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 13:28:31 +0200 Subject: [PATCH 090/230] cog.rst: update version of availability for STATISTICS option Fixes #9704 --- doc/source/drivers/raster/cog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/drivers/raster/cog.rst b/doc/source/drivers/raster/cog.rst index 09bb961de23a..2279c3412866 100644 --- a/doc/source/drivers/raster/cog.rst +++ b/doc/source/drivers/raster/cog.rst @@ -340,7 +340,7 @@ General creation options - .. co:: STATISTICS :choices: AUTO, YES, NO :default: AUTO - :since: 3.7 + :since: 3.8 Whether band statistics should be included in the output file. In ``AUTO`` mode, they will be included only if available in the source From cc519c2c049fd0290649cec9897117380be9dfee Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 16:16:54 +0200 Subject: [PATCH 091/230] Miramon: more C++ification and simplification of OGRMiraMonDataSource --- ogr/ogrsf_frmts/miramon/ogrmiramon.h | 26 +-- .../miramon/ogrmiramondatasource.cpp | 155 +++++++----------- ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp | 35 ++-- 3 files changed, 87 insertions(+), 129 deletions(-) diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramon.h b/ogr/ogrsf_frmts/miramon/ogrmiramon.h index a57f6570db97..8e4f9485322a 100644 --- a/ogr/ogrsf_frmts/miramon/ogrmiramon.h +++ b/ogr/ogrsf_frmts/miramon/ogrmiramon.h @@ -128,32 +128,24 @@ class OGRMiraMonLayer final /* OGRMiraMonDataSource */ /************************************************************************/ -class OGRMiraMonDataSource final : public OGRDataSource +class OGRMiraMonDataSource final : public GDALDataset { - OGRMiraMonLayer **papoLayers; - int nLayers; - char *pszRootName; - char *pszDSName; - bool bUpdate; - struct MiraMonVectMapInfo MMMap; + std::vector<std::unique_ptr<OGRMiraMonLayer>> m_apoLayers; + std::string m_osRootName{}; + bool m_bUpdate = false; + struct MiraMonVectMapInfo m_MMMap; public: OGRMiraMonDataSource(); ~OGRMiraMonDataSource(); - int Open(const char *pszFilename, VSILFILE *fp, - const OGRSpatialReference *poSRS, int bUpdate, - CSLConstList papszOpenOptions); - int Create(const char *pszFilename, char **papszOptions); - - const char *GetName() override - { - return pszDSName; - } + bool Open(const char *pszFilename, VSILFILE *fp, + const OGRSpatialReference *poSRS, CSLConstList papszOpenOptions); + bool Create(const char *pszFilename, CSLConstList papszOptions); int GetLayerCount() override { - return nLayers; + return static_cast<int>(m_apoLayers.size()); } OGRLayer *GetLayer(int) override; diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramondatasource.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramondatasource.cpp index 92b2572c29dd..777c767ffc18 100644 --- a/ogr/ogrsf_frmts/miramon/ogrmiramondatasource.cpp +++ b/ogr/ogrsf_frmts/miramon/ogrmiramondatasource.cpp @@ -31,12 +31,8 @@ /* OGRMiraMonDataSource() */ /****************************************************************************/ OGRMiraMonDataSource::OGRMiraMonDataSource() - : papoLayers(nullptr), nLayers(0), pszRootName(nullptr), pszDSName(nullptr), - bUpdate(false) - { - MMMap.nNumberOfLayers = 0; - MMMap.fMMMap = nullptr; + memset(&m_MMMap, 0, sizeof(m_MMMap)); } /****************************************************************************/ @@ -46,86 +42,73 @@ OGRMiraMonDataSource::OGRMiraMonDataSource() OGRMiraMonDataSource::~OGRMiraMonDataSource() { - for (int i = 0; i < nLayers; i++) - delete papoLayers[i]; - CPLFree(papoLayers); - CPLFree(pszDSName); - CPLFree(pszRootName); - - if (MMMap.fMMMap) - VSIFCloseL(MMMap.fMMMap); + m_apoLayers.clear(); + + if (m_MMMap.fMMMap) + VSIFCloseL(m_MMMap.fMMMap); } /****************************************************************************/ /* Open() */ /****************************************************************************/ -int OGRMiraMonDataSource::Open(const char *pszFilename, VSILFILE *fp, - const OGRSpatialReference *poSRS, int bUpdateIn, - CSLConstList papszOpenOptionsUsr) +bool OGRMiraMonDataSource::Open(const char *pszFilename, VSILFILE *fp, + const OGRSpatialReference *poSRS, + CSLConstList papszOpenOptionsUsr) { - bUpdate = CPL_TO_BOOL(bUpdateIn); - - OGRMiraMonLayer *poLayer = new OGRMiraMonLayer( - this, pszFilename, fp, poSRS, bUpdate, papszOpenOptionsUsr, &MMMap); + auto poLayer = std::make_unique<OGRMiraMonLayer>( + this, pszFilename, fp, poSRS, m_bUpdate, papszOpenOptionsUsr, &m_MMMap); if (!poLayer->bValidFile) { - delete poLayer; - return FALSE; + return false; } - papoLayers = static_cast<OGRMiraMonLayer **>(CPLRealloc( - papoLayers, - (size_t)(sizeof(OGRMiraMonLayer *) * ((size_t)nLayers + (size_t)1)))); - papoLayers[nLayers] = poLayer; - nLayers++; - if (pszDSName) + if (!m_osRootName.empty()) { - const char *pszExtension = CPLGetExtension(pszDSName); + const char *pszExtension = CPLGetExtension(m_osRootName.c_str()); if (!EQUAL(pszExtension, "pol") && !EQUAL(pszExtension, "arc") && !EQUAL(pszExtension, "pnt")) { - CPLStrlcpy( - MMMap.pszMapName, - CPLFormFilename(pszDSName, CPLGetBasename(pszDSName), "mmm"), - sizeof(MMMap.pszMapName)); - if (!MMMap.nNumberOfLayers) + CPLStrlcpy(m_MMMap.pszMapName, + CPLFormFilename(m_osRootName.c_str(), + CPLGetBasename(m_osRootName.c_str()), + "mmm"), + sizeof(m_MMMap.pszMapName)); + if (!m_MMMap.nNumberOfLayers) { - MMMap.fMMMap = VSIFOpenL(MMMap.pszMapName, "w+"); - if (!MMMap.fMMMap) + m_MMMap.fMMMap = VSIFOpenL(m_MMMap.pszMapName, "w+"); + if (!m_MMMap.fMMMap) { // It could be an error but it is not so important // to stop the process. This map is an extra element // to open all layers in one click, at least in MiraMon // software. - *MMMap.pszMapName = '\0'; + *m_MMMap.pszMapName = '\0'; } else { - VSIFPrintfL(MMMap.fMMMap, "[VERSIO]\n"); - VSIFPrintfL(MMMap.fMMMap, "Vers=2\n"); - VSIFPrintfL(MMMap.fMMMap, "SubVers=0\n"); - VSIFPrintfL(MMMap.fMMMap, "variant=b\n"); - VSIFPrintfL(MMMap.fMMMap, "\n"); - VSIFPrintfL(MMMap.fMMMap, "[DOCUMENT]\n"); - VSIFPrintfL(MMMap.fMMMap, "Titol= %s(map)\n", + VSIFPrintfL(m_MMMap.fMMMap, "[VERSIO]\n"); + VSIFPrintfL(m_MMMap.fMMMap, "Vers=2\n"); + VSIFPrintfL(m_MMMap.fMMMap, "SubVers=0\n"); + VSIFPrintfL(m_MMMap.fMMMap, "variant=b\n"); + VSIFPrintfL(m_MMMap.fMMMap, "\n"); + VSIFPrintfL(m_MMMap.fMMMap, "[DOCUMENT]\n"); + VSIFPrintfL(m_MMMap.fMMMap, "Titol= %s(map)\n", CPLGetBasename(poLayer->GetName())); - VSIFPrintfL(MMMap.fMMMap, "\n"); + VSIFPrintfL(m_MMMap.fMMMap, "\n"); } } } else - *MMMap.pszMapName = '\0'; + *m_MMMap.pszMapName = '\0'; } else - *MMMap.pszMapName = '\0'; + *m_MMMap.pszMapName = '\0'; - if (pszDSName) - CPLFree(pszDSName); - pszDSName = CPLStrdup(pszFilename); + m_apoLayers.emplace_back(std::move(poLayer)); - return TRUE; + return true; } /****************************************************************************/ @@ -135,15 +118,14 @@ int OGRMiraMonDataSource::Open(const char *pszFilename, VSILFILE *fp, /* currently but save the name. */ /****************************************************************************/ -int OGRMiraMonDataSource::Create(const char *pszDataSetName, - char ** /* papszOptions */) +bool OGRMiraMonDataSource::Create(const char *pszDataSetName, + CSLConstList /* papszOptions */) { - bUpdate = TRUE; - pszDSName = CPLStrdup(pszDataSetName); - pszRootName = CPLStrdup(pszDataSetName); + m_bUpdate = true; + m_osRootName = pszDataSetName; - return TRUE; + return true; } /****************************************************************************/ @@ -176,56 +158,50 @@ OGRMiraMonDataSource::ICreateLayer(const char *pszLayerName, /* of the file is where to write, and the layer name is the */ /* dataset name (without extension). */ /* -------------------------------------------------------------------- */ - const char *pszExtension = CPLGetExtension(pszRootName); - char *pszFullMMLayerName; + const char *pszExtension = CPLGetExtension(m_osRootName.c_str()); + std::string osFullMMLayerName; if (EQUAL(pszExtension, "pol") || EQUAL(pszExtension, "arc") || EQUAL(pszExtension, "pnt")) { - char *pszMMLayerName; - pszMMLayerName = CPLStrdup(CPLResetExtension(pszRootName, "")); - pszMMLayerName[strlen(pszMMLayerName) - 1] = '\0'; - - pszFullMMLayerName = CPLStrdup((const char *)pszMMLayerName); + osFullMMLayerName = CPLResetExtension(m_osRootName.c_str(), ""); + if (!osFullMMLayerName.empty()) + osFullMMLayerName.pop_back(); // Checking that the folder where to write exists - const char *szDestFolder = CPLGetDirname(pszFullMMLayerName); - if (!STARTS_WITH(szDestFolder, "/vsimem")) + const std::string osDestFolder = + CPLGetDirname(osFullMMLayerName.c_str()); + if (!STARTS_WITH(osDestFolder.c_str(), "/vsimem")) { VSIStatBufL sStat; - if (VSIStatL(szDestFolder, &sStat) != 0 || + if (VSIStatL(osDestFolder.c_str(), &sStat) != 0 || !VSI_ISDIR(sStat.st_mode)) { - CPLFree(pszMMLayerName); - CPLFree(pszFullMMLayerName); CPLError(CE_Failure, CPLE_AppDefined, - "The folder %s does not exist.", szDestFolder); + "The folder %s does not exist.", osDestFolder.c_str()); return nullptr; } } - CPLFree(pszMMLayerName); } else { - const char *osPath; - - osPath = pszRootName; - pszFullMMLayerName = - CPLStrdup(CPLFormFilename(pszRootName, pszLayerName, "")); + osFullMMLayerName = + CPLFormFilename(m_osRootName.c_str(), pszLayerName, ""); /* -------------------------------------------------------------------- */ /* Let's create the folder if it's not already created. */ /* (only the las level of the folder) */ /* -------------------------------------------------------------------- */ - if (!STARTS_WITH(osPath, "/vsimem")) + if (!STARTS_WITH(m_osRootName.c_str(), "/vsimem")) { VSIStatBufL sStat; - if (VSIStatL(osPath, &sStat) != 0 || !VSI_ISDIR(sStat.st_mode)) + if (VSIStatL(m_osRootName.c_str(), &sStat) != 0 || + !VSI_ISDIR(sStat.st_mode)) { - if (VSIMkdir(osPath, 0755) != 0) + if (VSIMkdir(m_osRootName.c_str(), 0755) != 0) { - CPLFree(pszFullMMLayerName); CPLError(CE_Failure, CPLE_AppDefined, - "Unable to create the folder %s.", pszRootName); + "Unable to create the folder %s.", + m_osRootName.c_str()); return nullptr; } } @@ -235,14 +211,11 @@ OGRMiraMonDataSource::ICreateLayer(const char *pszLayerName, /* -------------------------------------------------------------------- */ /* Return open layer handle. */ /* -------------------------------------------------------------------- */ - if (Open(pszFullMMLayerName, nullptr, poSRS, TRUE, papszOptions)) + if (Open(osFullMMLayerName.c_str(), nullptr, poSRS, papszOptions)) { - CPLFree(pszFullMMLayerName); - auto poLayer = papoLayers[nLayers - 1]; - return poLayer; + return m_apoLayers.back().get(); } - CPLFree(pszFullMMLayerName); return nullptr; } @@ -254,7 +227,7 @@ int OGRMiraMonDataSource::TestCapability(const char *pszCap) { if (EQUAL(pszCap, ODsCCreateLayer)) - return bUpdate; + return m_bUpdate; else if (EQUAL(pszCap, ODsCZGeometries)) return TRUE; @@ -268,10 +241,10 @@ int OGRMiraMonDataSource::TestCapability(const char *pszCap) OGRLayer *OGRMiraMonDataSource::GetLayer(int iLayer) { - if (iLayer < 0 || iLayer >= nLayers) + if (iLayer < 0 || iLayer >= static_cast<int>(m_apoLayers.size())) return nullptr; - return papoLayers[iLayer]; + return m_apoLayers[iLayer].get(); } /************************************************************************/ @@ -281,10 +254,8 @@ OGRLayer *OGRMiraMonDataSource::GetLayer(int iLayer) char **OGRMiraMonDataSource::GetFileList() { CPLStringList oFileList; - GetLayerCount(); - for (int i = 0; i < nLayers; i++) + for (auto &poLayer : m_apoLayers) { - OGRMiraMonLayer *poLayer = papoLayers[i]; poLayer->AddToFileList(oFileList); } return oFileList.StealList(); diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp index a4b96da1fe2a..6795c7895863 100644 --- a/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp +++ b/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp @@ -79,27 +79,21 @@ static GDALDataset *OGRMiraMonDriverOpen(GDALOpenInfo *poOpenInfo) if (OGRMiraMonDriverIdentify(poOpenInfo) == FALSE) return nullptr; - OGRMiraMonDataSource *poDS = new OGRMiraMonDataSource(); + auto poDS = std::make_unique<OGRMiraMonDataSource>(); + if (!poDS->Open(poOpenInfo->pszFilename, nullptr, nullptr, + poOpenInfo->papszOpenOptions)) + { + poDS.reset(); + } - if (poDS != nullptr && poOpenInfo->eAccess == GA_Update) + if (poDS && poOpenInfo->eAccess == GA_Update) { CPLError(CE_Failure, CPLE_OpenFailed, "MiraMonVector driver does not support update."); - delete poDS; - poDS = nullptr; - } - else - { - if (!poDS->Open(poOpenInfo->pszFilename, nullptr, nullptr, - poOpenInfo->eAccess == GA_Update, - poOpenInfo->papszOpenOptions)) - { - delete poDS; - poDS = nullptr; - } + return nullptr; } - return poDS; + return poDS.release(); } /****************************************************************************/ @@ -111,13 +105,14 @@ OGRMiraMonDriverCreate(const char *pszName, CPL_UNUSED int /*nBands*/, CPL_UNUSED int /*nXSize*/, CPL_UNUSED int /*nYSize*/, CPL_UNUSED GDALDataType /*eDT*/, char **papszOptions) { - OGRMiraMonDataSource *poDS = new OGRMiraMonDataSource(); + auto poDS = std::make_unique<OGRMiraMonDataSource>(); - if (poDS->Create(pszName, papszOptions)) - return poDS; + if (!poDS->Create(pszName, papszOptions)) + { + poDS.reset(); + } - delete poDS; - return nullptr; + return poDS.release(); } /****************************************************************************/ From c4ef9f0593e5a62265f76b6c9ea7ae12fe0ecbb7 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 15:48:58 +0200 Subject: [PATCH 092/230] netCDF: fix memleak in error code path (master only, CID 1544059) --- frmts/netcdf/netcdfdataset.cpp | 42 ++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/frmts/netcdf/netcdfdataset.cpp b/frmts/netcdf/netcdfdataset.cpp index 067ca209634b..3d49bae803bd 100644 --- a/frmts/netcdf/netcdfdataset.cpp +++ b/frmts/netcdf/netcdfdataset.cpp @@ -9270,8 +9270,6 @@ GDALDataset *netCDFDataset::Create(const char *pszFilename, int nXSize, return nullptr; } - CPLMutexHolderD(&hNCMutex); - CPLStringList aosOptions(CSLDuplicate(papszOptions)); if (aosOptions.FetchNameValue("FORMAT") == nullptr && (eType == GDT_UInt16 || eType == GDT_UInt32 || eType == GDT_UInt64 || @@ -9280,8 +9278,28 @@ GDALDataset *netCDFDataset::Create(const char *pszFilename, int nXSize, CPLDebug("netCDF", "Selecting FORMAT=NC4 due to data type"); aosOptions.SetNameValue("FORMAT", "NC4"); } - netCDFDataset *poDS = netCDFDataset::CreateLL(pszFilename, nXSize, nYSize, - nBandsIn, aosOptions.List()); + + CPLStringList aosBandNames; + if (const char *pszBandNames = aosOptions.FetchNameValue("BAND_NAMES")) + { + aosBandNames = + CSLTokenizeString2(pszBandNames, ",", CSLT_HONOURSTRINGS); + + if (aosBandNames.Count() != nBandsIn) + { + CPLError(CE_Failure, CPLE_OpenFailed, + "Attempted to create netCDF with %d bands but %d names " + "provided in BAND_NAMES.", + nBandsIn, aosBandNames.Count()); + + return nullptr; + } + } + + CPLMutexHolderD(&hNCMutex); + + auto poDS = netCDFDataset::CreateLL(pszFilename, nXSize, nYSize, nBandsIn, + aosOptions.List()); if (!poDS) return nullptr; @@ -9320,22 +9338,6 @@ GDALDataset *netCDFDataset::Create(const char *pszFilename, int nXSize, : GDAL_DEFAULT_NCDF_CONVENTIONS); } - CPLStringList aosBandNames; - if (const char *pszBandNames = aosOptions.FetchNameValue("BAND_NAMES")) - { - aosBandNames = - CSLTokenizeString2(pszBandNames, ",", CSLT_HONOURSTRINGS); - - if (aosBandNames.Count() != nBandsIn) - { - CPLError(CE_Failure, CPLE_OpenFailed, - "Attempted to create netCDF with %d bands but %d names " - "provided in BAND_NAMES.", - nBandsIn, aosBandNames.Count()); - return nullptr; - } - } - // Define bands. for (int iBand = 1; iBand <= nBandsIn; iBand++) { From b3629a70a8dcfe942b8fe053d9a3031213b785cc Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 15:49:46 +0200 Subject: [PATCH 093/230] CPLZLibInflateEx(): fix Coverity Scan issues about dead code, missing free (CID 1544058, 1544057, 1544055) --- port/cpl_vsil_gzip.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/port/cpl_vsil_gzip.cpp b/port/cpl_vsil_gzip.cpp index 51eae04f11a5..96cd950cfb75 100644 --- a/port/cpl_vsil_gzip.cpp +++ b/port/cpl_vsil_gzip.cpp @@ -5081,6 +5081,7 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, VSIFree(outptr); return nullptr; } + outptr = nullptr; nOutAvailableBytes = nOutBufSize; } else @@ -5119,6 +5120,7 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, { if (bAllowResizeOutptr) VSIFree(outptr); + VSIFree(pszReallocatableBuf); return nullptr; } @@ -5150,6 +5152,7 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, pszReallocatableBuf = pszOutBuf; bAllowResizeOutptr = true; } +#ifndef HAVE_LIBDEFLATE else { pszOutBuf = static_cast<char *>(outptr); @@ -5157,6 +5160,7 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, if (bAllowResizeOutptr) pszReallocatableBuf = pszOutBuf; } +#endif strm.next_in = static_cast<Bytef *>(const_cast<void *>(ptr)); strm.next_out = reinterpret_cast<Bytef *>(pszOutBuf); @@ -5177,12 +5181,16 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, if (ret == Z_BUF_ERROR && strm.avail_out == 0) { +#ifdef HAVE_LIBDEFLATE + CPLAssert(bAllowResizeOutptr); +#else if (!bAllowResizeOutptr) { VSIFree(pszReallocatableBuf); inflateEnd(&strm); return nullptr; } +#endif const size_t nAlreadyWritten = nOutBufSize - nOutBytesRemaining; if (nOutBufSize > (std::numeric_limits<size_t>::max() - 1) / 2) From c5219823d869faf7fd63223270ef4a9ef04cb9fc Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 17:50:08 +0200 Subject: [PATCH 094/230] GPKG: for robustness, use same page size for auxiliary RTRee DB than main DB --- .../gpkg/ogrgeopackagetablelayer.cpp | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp index 57ffc309ccf0..6268d1111ac4 100644 --- a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp +++ b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp @@ -2715,9 +2715,22 @@ void OGRGeoPackageTableLayer::StartAsyncRTree() } if (m_hAsyncDBHandle != nullptr) { + /* Make sure our auxiliary DB has the same page size as the main one. + * Because the number of RTree cells depends on the SQLite page size. + * However the sqlite implementation limits to 51 cells maximum per page, + * which is reached starting with a page size of 2048 bytes. + * As the default SQLite page size is 4096 currently, having potentially + * different page sizes >= 4096 between the main and auxiliary DBs would + * not be a practical issue, but better be consistent. + */ + const int nPageSize = + SQLGetInteger(m_poDS->GetDB(), "PRAGMA page_size", nullptr); + if (SQLCommand(m_hAsyncDBHandle, - "PRAGMA journal_mode = OFF;\n" - "PRAGMA synchronous = OFF;") == OGRERR_NONE) + CPLSPrintf("PRAGMA page_size = %d;\n" + "PRAGMA journal_mode = OFF;\n" + "PRAGMA synchronous = OFF;", + nPageSize)) == OGRERR_NONE) { char *pszSQL = sqlite3_mprintf("ATTACH DATABASE '%q' AS '%q'", m_osAsyncDBName.c_str(), @@ -2727,7 +2740,7 @@ void OGRGeoPackageTableLayer::StartAsyncRTree() if (eErr == OGRERR_NONE) { - m_hRTree = gdal_sqlite_rtree_bl_new(4096); + m_hRTree = gdal_sqlite_rtree_bl_new(nPageSize); try { m_oThreadRTree = From 5f4ca74e703ddc0cf0a6597b5f6c58e0ea136722 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 19 Apr 2024 20:08:55 +0200 Subject: [PATCH 095/230] GDALChecksumImage(): read by multiple of blocks for floating-point bands to improve performance This is similar to optimization done in c854d6c97588bffa144e3daaec99ac6dd87e26d7 for the integer case. --- alg/gdalchecksum.cpp | 153 ++++++++++++++++++++++++++++++++------- autotest/alg/checksum.py | 63 ++++++++++++++++ 2 files changed, 190 insertions(+), 26 deletions(-) create mode 100644 autotest/alg/checksum.py diff --git a/alg/gdalchecksum.cpp b/alg/gdalchecksum.cpp index 4968e58b9f0e..e0f0dcafacf8 100644 --- a/alg/gdalchecksum.cpp +++ b/alg/gdalchecksum.cpp @@ -73,9 +73,132 @@ int CPL_STDCALL GDALChecksumImage(GDALRasterBandH hBand, int nXOff, int nYOff, int iPrime = 0; const GDALDataType eDataType = GDALGetRasterDataType(hBand); const bool bComplex = CPL_TO_BOOL(GDALDataTypeIsComplex(eDataType)); + const bool bIsFloatingPoint = + (eDataType == GDT_Float32 || eDataType == GDT_Float64 || + eDataType == GDT_CFloat32 || eDataType == GDT_CFloat64); - if (eDataType == GDT_Float32 || eDataType == GDT_Float64 || - eDataType == GDT_CFloat32 || eDataType == GDT_CFloat64) + const auto IntFromDouble = [](double dfVal) + { + int nVal; + if (CPLIsNan(dfVal) || CPLIsInf(dfVal)) + { + // Most compilers seem to cast NaN or Inf to 0x80000000. + // but VC7 is an exception. So we force the result + // of such a cast. + nVal = 0x80000000; + } + else + { + // Standard behavior of GDALCopyWords when converting + // from floating point to Int32. + dfVal += 0.5; + + if (dfVal < -2147483647.0) + nVal = -2147483647; + else if (dfVal > 2147483647) + nVal = 2147483647; + else + nVal = static_cast<GInt32>(floor(dfVal)); + } + return nVal; + }; + + if (bIsFloatingPoint && nXOff == 0 && nYOff == 0) + { + const GDALDataType eDstDataType = bComplex ? GDT_CFloat64 : GDT_Float64; + int nBlockXSize = 0; + int nBlockYSize = 0; + GDALGetBlockSize(hBand, &nBlockXSize, &nBlockYSize); + const int nDstDataTypeSize = GDALGetDataTypeSizeBytes(eDstDataType); + int nChunkXSize = nBlockXSize; + const int nChunkYSize = nBlockYSize; + if (nBlockXSize < nXSize) + { + const GIntBig nMaxChunkSize = + std::max(static_cast<GIntBig>(10 * 1000 * 1000), + GDALGetCacheMax64() / 10); + if (nDstDataTypeSize > 0 && + static_cast<GIntBig>(nXSize) * nChunkYSize < + nMaxChunkSize / nDstDataTypeSize) + { + // A full line of height nChunkYSize can fit in the maximum + // allowed memory + nChunkXSize = nXSize; + } + else + { + // Otherwise compute a size that is a multiple of nBlockXSize + nChunkXSize = static_cast<int>(std::min( + static_cast<GIntBig>(nXSize), + nBlockXSize * + std::max(static_cast<GIntBig>(1), + nMaxChunkSize / + (static_cast<GIntBig>(nBlockXSize) * + nChunkYSize * nDstDataTypeSize)))); + } + } + + double *padfLineData = static_cast<double *>( + VSI_MALLOC3_VERBOSE(nChunkXSize, nChunkYSize, nDstDataTypeSize)); + if (padfLineData == nullptr) + { + return -1; + } + const int nValsPerIter = bComplex ? 2 : 1; + + const int nYBlocks = DIV_ROUND_UP(nYSize, nChunkYSize); + const int nXBlocks = DIV_ROUND_UP(nXSize, nChunkXSize); + for (int iYBlock = 0; iYBlock < nYBlocks; ++iYBlock) + { + const int iYStart = iYBlock * nChunkYSize; + const int iYEnd = + iYBlock == nYBlocks - 1 ? nYSize : iYStart + nChunkYSize; + const int nChunkActualHeight = iYEnd - iYStart; + for (int iXBlock = 0; iXBlock < nXBlocks; ++iXBlock) + { + const int iXStart = iXBlock * nChunkXSize; + const int iXEnd = + iXBlock == nXBlocks - 1 ? nXSize : iXStart + nChunkXSize; + const int nChunkActualXSize = iXEnd - iXStart; + if (GDALRasterIO( + hBand, GF_Read, iXStart, iYStart, nChunkActualXSize, + nChunkActualHeight, padfLineData, nChunkActualXSize, + nChunkActualHeight, eDstDataType, 0, 0) != CE_None) + { + CPLError(CE_Failure, CPLE_FileIO, + "Checksum value could not be computed due to I/O " + "read error."); + nChecksum = -1; + iYBlock = nYBlocks; + break; + } + const size_t xIters = + static_cast<size_t>(nValsPerIter) * nChunkActualXSize; + for (int iY = iYStart; iY < iYEnd; ++iY) + { + // Initialize iPrime so that it is consistent with a + // per full line iteration strategy + iPrime = (nValsPerIter * + (static_cast<int64_t>(iY) * nXSize + iXStart)) % + 11; + const size_t nOffset = nValsPerIter * + static_cast<size_t>(iY - iYStart) * + nChunkActualXSize; + for (size_t i = 0; i < xIters; ++i) + { + const double dfVal = padfLineData[nOffset + i]; + nChecksum += IntFromDouble(dfVal) % anPrimes[iPrime++]; + if (iPrime > 10) + iPrime = 0; + } + nChecksum &= 0xffff; + } + } + } + + CPLFree(padfLineData); + } + else if (bIsFloatingPoint) { const GDALDataType eDstDataType = bComplex ? GDT_CFloat64 : GDT_Float64; @@ -103,30 +226,8 @@ int CPL_STDCALL GDALChecksumImage(GDALRasterBandH hBand, int nXOff, int nYOff, for (size_t i = 0; i < nCount; i++) { - double dfVal = padfLineData[i]; - int nVal; - if (CPLIsNan(dfVal) || CPLIsInf(dfVal)) - { - // Most compilers seem to cast NaN or Inf to 0x80000000. - // but VC7 is an exception. So we force the result - // of such a cast. - nVal = 0x80000000; - } - else - { - // Standard behavior of GDALCopyWords when converting - // from floating point to Int32. - dfVal += 0.5; - - if (dfVal < -2147483647.0) - nVal = -2147483647; - else if (dfVal > 2147483647) - nVal = 2147483647; - else - nVal = static_cast<GInt32>(floor(dfVal)); - } - - nChecksum += nVal % anPrimes[iPrime++]; + const double dfVal = padfLineData[i]; + nChecksum += IntFromDouble(dfVal) % anPrimes[iPrime++]; if (iPrime > 10) iPrime = 0; diff --git a/autotest/alg/checksum.py b/autotest/alg/checksum.py new file mode 100644 index 000000000000..8fc880ffba12 --- /dev/null +++ b/autotest/alg/checksum.py @@ -0,0 +1,63 @@ +#!/usr/bin/env pytest +############################################################################### +# Project: GDAL/OGR Test Suite +# Purpose: GDALChecksumImage() testing +# Author: Even Rouault <even.rouault @ spatialys.com> +# +############################################################################### +# Copyright (c) 2024, Even Rouault <even.rouault @ spatialys.com> +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. +############################################################################### + +import pytest + +from osgeo import gdal + + +@pytest.mark.parametrize("source", ["byte", "float32"]) +def test_checksum(tmp_vsimem, source): + + tmpfilename = str(tmp_vsimem / "tmp.tif") + + src_ds = gdal.Open(f"../gcore/data/{source}.tif") + ds = gdal.GetDriverByName("GTiff").CreateCopy(tmpfilename, src_ds) + assert ds.GetRasterBand(1).Checksum() == 4672 + + ds = gdal.GetDriverByName("GTiff").CreateCopy( + tmpfilename, src_ds, options=["TILED=YES", "BLOCKXSIZE=16", "BLOCKYSIZE=16"] + ) + assert ds.GetRasterBand(1).Checksum() == 4672 + + ds = gdal.GetDriverByName("GTiff").CreateCopy( + tmpfilename, src_ds, options=["TILED=YES", "BLOCKXSIZE=32", "BLOCKYSIZE=16"] + ) + assert ds.GetRasterBand(1).Checksum() == 4672 + + ds = gdal.GetDriverByName("GTiff").CreateCopy( + tmpfilename, src_ds, options=["TILED=YES", "BLOCKXSIZE=16", "BLOCKYSIZE=32"] + ) + assert ds.GetRasterBand(1).Checksum() == 4672 + + mem_ds = gdal.GetDriverByName("MEM").Create( + "", 21, 21, 1, src_ds.GetRasterBand(1).DataType + ) + mem_ds.WriteRaster(1, 1, 20, 20, src_ds.ReadRaster()) + assert mem_ds.GetRasterBand(1).Checksum(1, 1, 20, 20) == 4672 + assert mem_ds.GetRasterBand(1).Checksum() == 4568 From 415348a46a680a41654c1a15b61ef1a27981f0be Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 00:13:50 +0200 Subject: [PATCH 096/230] HDF5: improve performance of band IRasterIO() on hyperspectral products Currently running gdalcompare.py on a hyperspectral HDF5 product with bands=424, height=501, width=600 and band_chunk_size=27, height_chunk_size=32, width_chunk_size=38 takes more than one hour. With this optimization, this takes 15 seconds. --- .../data/hdf5/dummy_HDFEOS_swath_chunked.h5 | Bin 0 -> 128709 bytes autotest/gdrivers/hdf5.py | 182 +++++++++ frmts/hdf5/hdf5imagedataset.cpp | 371 +++++++++++++++--- 3 files changed, 507 insertions(+), 46 deletions(-) create mode 100644 autotest/gdrivers/data/hdf5/dummy_HDFEOS_swath_chunked.h5 diff --git a/autotest/gdrivers/data/hdf5/dummy_HDFEOS_swath_chunked.h5 b/autotest/gdrivers/data/hdf5/dummy_HDFEOS_swath_chunked.h5 new file mode 100644 index 0000000000000000000000000000000000000000..4ef0faa00f25bf23e74286a668c227a5bac0e977 GIT binary patch literal 128709 zcmeEv2Urxz*7gu2Iivw4XNDk>QGy_8$T_HBA`Qtvl2t?m$w6{PiIUeuTE$EbibMk_ zDj+H-Dj+Bz;jf;VcHQ3P?!Eu_{m*m%duMp;>aKq4^r^SbIaOU<-NqW?^cfkr8BoxV zjt)hQVj=9pn>Es}^(zG-3qFR=JCb&YXHlpa(s2bPafbp0{h?5tq<mJAd?e1+T2BuJ zkg$9>B^5)BY#{E@!5QTAzls4vJ#9;<!gi!uOOVnIlAVzbQNWz_ENldg%=Im-&9rTe zEX;{#i20C(5r1-Ufxk&9P*i`gUlx-6O3)GudXUQho&6S)43})be?$0x=xL<=3X<v> zLyBu~wQc<UesS?PbM<$^JNY}w{4i74ZdG95KbhK?Ti_^Alqf+Gej<vv1IuHHCLYVh z5>wbd1*!gsGb9~*AoL}o{`0qRzlG!hjWQ%uo&v>$q62@#IJ4W;@0h2z7dXQP{t1GA zFyF-U4%=kpW#qSkeB_S;MMK1r3Qz!<&<`$81&*P8NG|F$3blcBOi4*efY6b8S}KYO zdI$o30QLJ1^OHRLN?|t?D*N}7|Au=+`-NFRp^`~)_uo*}e`}QttSxp~YS^4`^7kNy zm|y%{83e(PbYhRTt)YWFAqUDO0L`6_x@riUog^Fx|2Ri1f%81->IE73k9??{g^sbF zuC2z8lRt@sPD8&R=ZNBUoV@VA%O*m=^*sY!H5BCjAY0EI=kPc3|8!d64-)?3^4dqd zz1(g6oLmmCqlDr=p~N4r@+Tz}|4s>AKksA5{*e+&|AZ31Uj-s+y+%Uhn>qRXQ0^~^ z{d}5O(#Q+%8t|hDe?I?v@%1r883wN2`kt;w@IS5$wF`+g_}pI@4t$DeH>OVhp8mh! zUH{x{IM8w0p7e3mFx+PAU=GHByY(FOO)a!-|8PB$Etq;9^Vg6N{>cos3jfp`l7A{= z<`jVUJo+yc`^j+OIv}ctto$Dh3Q`p=NUA(A+W((x|Ht|vYyZ0?Lp6tff2_SN2tEJn z&Hm4~^dFIb)wsV`;CH8htMJ31NS+9cbzNtGESYc;VJ;9QKhD8X`bWn94mE_hy!HRq zG5)2}KT-Ij5<k`WcMgA?MUZIe=k4Zs#PyFn{$A`aPZQ}kbM<p~#S;o1`yurAl7Bk= z`zaezkNH#1A3Il4tn>54|K8#L4mLpM6K7!lt;Y~#<;V<>86Y!2W`N89nE^5bWCq9# zkQpE|KxTl<0GR<Y17rrs43HThGeBm5%mA4IG6Q4=$PADfATvN_fXo1y0Wt$*2FMJM z86Y!2W`N89nE^5bWCq9#kQpE|KxTl<0GR<Y17rrs43HThGeBm5%mA4IG6Q4=$PADf zATvN_fXo1y0Wt$*2FMJM86Y!2W`N89nE^5bWCq9#kQpE|KxTl<0GR<Y17rrs43HTh zGeBm5%mA4IG6Q4=$PADfATvN_fXo1y0Wt$*2FMJM86Y!2W`N89nE^5bWCq9#kQpE| zKxTl<0GR<Y17rrs43HW4&t!lC1^uB=oTTcpqF4xfEYZXaxmaRqYpth;0?0MQk5mlh z5l`HM<-qym^uL1vLp^OvsHS$L>I#xlOOpMN4pG`ToRO`Og}JsVF^BNq1=~QDMf|D2 zz<)Ab&whJJ*PT8~6!drY>yu2x(fyOxCBy!+7(m)D9LHElaSg7mzLBY(gSM@$wUN#) zTRoc}cB=&g{>9YB+yX~|qC~-VS3$B{*dLH~4C_^&Lc9=+8zCK|(*H#T{zKsF#eu;D zVg(nHh$%4+pw{ij{zK>>SL6SZ0c0G2{gRW^Kfq#Pzaw;daOVFYCB^}YA90|HWVi5j zkaqk(xYU1i(e>g0GCuy@ct9nKNE6)t|Iwt8EB7Y`ka2*8gb+mZuVyE0PB{5{{1^uq z{z=lmma(4w7Lu;pkV%yHclN7wo`{3T3;&uHWc2^<3?S`Sj#Qx$lKsH-!#Vjo3F>>g z9>E_YWEr@6AMti^^7r)i`Z@cDeM`Uv*GZB6SOCdx;eHHh$Lomsug_jD4pd|lsp&XJ zO#dzpWakla<bLeG)1;7b0PYVaNcID>Vs;XW1cX39$b$V<3kLj)De}Jd49RX`xkx*P z^%A^Dybz2tBprKP{udSa4}nvlXo#_wL<j{+g-|mJDq;pZsh<3#6pqj9<x_!~UZ^Ci zpa6=RP(~2NL5Jc9A(h_%?m(dPlvKpaZbS(Zb|{FK1N+c<3KZVe?TC}VD@g_l<O&ib zP*GBk<P&94fdsk)4Jjq&lJqAX{L2Zle~HQcmqm~Z00y9bgMxzgPxBy%+uU`za(YEP z1&0x+v48$SjsEjbXn=TBPRc}{NB;~4`=%R-23J7Jr$?N}AdVB%i1<QMK2jbWU*KZ9 zJc;-?QU)`8f|SDk1ZNj#67epi4CM7iVIV0Lx<|yr*_%i^f}|9dPxL_;MNY{KkQpE| zKxTl<0GR<Y17rrs4E%dCu-^O>@_zmA=BK8w5bH;tpZfP~=l|DQA?K&y`HL9De2=NO zm%FF`FL+myLZ)Df`lmy}90ha=p6C8My(v%|h-FB(X%r~pl_@BR8OZq)<ncc}k3vZl zNJm;>M@6w=n0N(8#4T;W->Lln^gKLY2j3^VzWL+*EAry7zm6c<35ok}I}SLw>Aw6S zN-Opaz1Jgbg(E^Gt?legoeGjv?wDkkE@SP6N>0YGR{PP{M%c$0(mB7Gndz36Q64)z zzmMGl>-m(`VXIw#wXmwscuUSk<xk5qJ>lZt&Fa``wiqo>NTO|14K*S|jWPEE8K^Qu z-A)Jf_FD~FzdJo;cqQJnrCdXrFH-uqYVjKi3fopTp5oNbh4iVWr<6zOk2)L}*<#fq zB2rRU6?2esGgAsH@2Gyg@kZ%x9jO#S=R7NgFshh^t#{wfluVUSUM^vv*?ji5x`^<0 zntj7Q22E5Jx2GF94wX<uac;YR>MAAQZ8Wd&YzkA}gilO|2A|r%C`*d0|AcY0?lKSU z$KDINNf8S4fw}{QK#p%)(q_Iu(R5W!+b_)xPey51j_N2ws%bD$SaIr)UY~wm8Iv4a zidMW{pi^Wy+(hX{?@;;3D!I^cqp+~Ur3TC=j2nI8n5TJSdQT|>>o)ZBkbQ<Gm#Xwz zXqeS!ns^eK_eMU_r#&#&D~tbb?XH6Hm)I80H|Zlffr~yc0TScr7f@mz%^ZH$(4)|* z7qzHTp@U;=X4*MwphIQ0nBu-1Q{gn|I!a5k0e3UtAme=@FG_9BGkp%a6hp}@O{{?t zMbGDXom}-=8K^HfC!J|YHkG@K+np$AHjo@8(#5KxQxNd>0Y|li0dGf;-`lfK-^C~g zUYiPd*QBhu?Dr<gicS7)D7}rFSxK{rKN`=F;s1!MgMG_niDI&(ZGUvPR+*)}#<dxo zv#x<f{&n9npJG4v-=@<Bd;8L|f*F*zd~^NI%y+?fCxJF#VVsep-t^oeVRYS?{Y~@k zfmw1UH)3M!O3g$#j%;<Ox7o|29cEYBr4Y20pXqjPj2D-0@w-NWweejoi5j$txf6?{ z-LlO2=qIeYJ2_`Mvp<{JigGJNs(gU9^%I2cd$4_TCr$0HM4kU!OI;$xrj3|=vt0!` zCI%K=E0uQ1gWVrE+B<X|A-byRo3wzH-KzpW1^F$VbX=oKH#EGc4LVT5(J_UQG<Cf) zBEy1U{}*Euhna0j;`1hxw3i`!atfDLKR)>SjQGKrfY;%&Vj1azt?XQG8BNu@Bg}te z;8clF<h$AHG{SPle?mVxRJVShh?b9rNxu4h(W680l-T`lzv)u}3meN7*gIXUMW6Mh zIMm>*%=M68f5E8)95lP%NFT1nqCYAr6wQkpJ|cyAt%0=*!<L{~V^ssH&q@kmyR2|w z*zp7V-ABSlXU^{bB)kVN;j_Q}l}p3(`N)vgsDclASd5SECF|gM=0z<h!_y_YPQ@SC z_!!RbFPO9(4Em~Y@l7m^bmGA6Oj-1);I9f*2I1T5Pve=J`EAyyR-_d_Kos4e*U{#T z?7K&8!kHCaiA7IP?Wu~`6Lxs#h6=l;7BTMZZ2K_m$*|_gB%YKq*3&d-u0A>U#?(!R zw@(co8|jEu%<^O(I~02!%U^4VS7XQscu&}VfVK-0(AMzioS|vQ5y|lFpKK2XEwUzP z#?t3VyG~iWY0Y|7zd|)vrGtkI=$2WtBPz0Q2c?KzA&a;YSEKdVX1cA@Nm?1x9=El- zXex^u^dt&rCHRFzd?XUD%uR|t{eU-r7=~3I_G{PJ#~*-i&v!o<@Ft;Ijc-!=<n?%g z^QPm+(&D0;7E05!%Zudl{bw%^SaOE=&t4p`T-|jve_}#aLGHjc)wGM-IczUUD2Av7 zCZbJ`g;;;Lss8o3W}Keh5+PQZg;_z0g^4)QMNwKQ3x?h+*NRMJ<wVUVX!TM|l_pbe zJo=I@aa!MzI;pkO?CvmL$?OW(v1aKLTkGG28f0D?oNZB*P4{FN+BbWs^Zd#;`3E~P z^qFesBns3r^b^(Il-PKVo0JEAu;pIttGpv~PpNyn=D~)v;4Ia{+u4sl3e`+dyfN#p zFz5a-s>y3`h2o*SF*DS7^5!5MM2)UVaI)2wYNsDh{pHjl?O~BARvMM?!EOqPs_LQ~ z5uXUvCjQ3qfLSqzyD_c;Gfi=OUfO+W%;YPWS{&;&5KA=9O<&kOcBv&PRpzsNs-!LV z<Wuu9OV1L`9XUFsDPG4FB8^>U<XRMC8AaIM_{K<O>`2IIP1A%9OTU!+mCK)K*1Y@N zJPYDV-dsl;rPp_zI&<1AhMuqVf>_$fXZt3J`XtHpXP62%D$$mxwlHj!C=(#+;KA8C z(|r5sCMj>*oeTJfQL=ZlJjV?>cXyh19_-ZaoYA0)G%ill+yQNC^K?r4I`2W-&!;)R zn7nI=l*&oWk&`R0x}dQ%pz1qn?6!*TlDy>hn(<+<zBt5>hB+nFa<{0n<S^kyE*qBc z*{5&c7x38M<Zo<{Bz8ngr>a3G&Hnz@MyyX<+WDXxod?bj+XOgjKM=7{|I!q<&HHHJ ztXF4SY93Y~GiYzrRabpKyRsrnE_1cTCz7^NwdTS83>l+)#6#2~?+3hiQ($^?f1h0W zOaGIl?ho=sUOy@rUK)@q?_Qo9s(GjKK_>0C(M8Dj?=XgIp`%8<GwJB7H(>VnyHV40 zFpF45^XZ5g>gtA%vnqrcI7ZxN%$3%deIaFXR0Nyr+qhHt<q7p^^vYw8Gx}dSm(FV{ zRhJJ(dtYQmCl>eka|SV;E@~bznULb6aX5C>!f22GM0L_8BTbcS!ryL=^Ybk!4DT6k zuHI#^Mzs*9;zSIOl-ej<pJXFOW^-e0_=Y>fm@DbhjhNk1a+bQRrnW3sP`_<<m|l=G z?SE2<zdBwxUNU}Z{K9zAc;$HMc=7nf@$&JTMg~qL{T(mNp=~o^`<$>1>WELwZ_2#M zSySvg-iFIu^`B$E_9#O$K}5Oi@ic8>k_{_lFpXxQiS+vRP-45DY2mhHQwIxxSd@*C zaKgCuFb$4~-t6t@4b!DfA93>aOUuf-eR&||1a=3O7HfbN$KErRG1kY17>i+*T~)jl zQj*NL_ikw}&kGvztVF*$KI>9mst|wN@3#SKfqdhGlY{-W?&q?GJ1^k%n^TvjEZQ^k zH3rUV+Aj`N?av7L^l`T@(chabflA7hB9?4=f)<XF#|xkaSr7?`ux&t}Nq5jnVUIOf z2ty~c`aywcF7>Gy!%3UjE91JI<*Bk*cPt0?Y^noL>&nt+IlM(_irRR}?9uZ6j-))l zI^gQ~^4yQM+&2a*EnGHpE7x#;(YnW9>)MWUD%<&epgx5oWU0G=k1g^B&*$Wg_I*%G z-8u@~3ba%tuf2|K6((2}dmAuhGQ^%+PjTKXH+lN7Bb%u)eeTGFak0_Y^R|ALv!%e1 zuBH~JUQ9iodMh<LH3Vx>I@i->%blmaSZm|yPxA&}-amo9KV)9o$F()nW;gq_t|<=z zo4b?yId9utT9!PLGt~EF)n5jE@WUjd@3RB)fSU(k+@{SAnkx)@M5ZH*5k2uXv#QjT zK2r|rE)F?5*M#m3-y7LJ;xjrE;x||VuAi1_)T!6Gt5dhrrqiTzU#CImo_lL+N7Wca zGW5NUL)%Y;t=o>s6yxtFRPh$CyIM?l>uv`2a{SRtjRZwDMf+Rj#pPA|m^Jquu+1Y_ zF4R{<^zjIZ)QjurA8f#U<zJ)YY7_~e_C7PzAR5YUq`gJw-A+My%BulOB?DO59<Gi{ z<GVmivFf}9nn)S@%J`FUaQgC=2m7)zrf%8Rv1c;xP#yA=-0HsL#vn)v2v%b!u(X_V zw19Ebwl~#1iJ>X6Ilp}sw^+4(l-^bVj&4k@Q7j+XvjX{aHRRL!qalJ35q2oaq8_^V z2D<oEuG9^e*l$D$*YyTa0*{W`v2O{#*xCP~j6Se7<=q2D>~V(EZ=$w;-MrMz$Tw9y zH+k$S8;!xISf=puUUxP0&gi?nPSd4xSKiMzoMC-x_u>O>yM5NnZObkVgAr?)Au8S5 zziC>;Y7Y@eISSf_x|g*?-!Ec7DikGTFDljeCf%oW8M2ka2x*EG6<;0N!A;X)<$dvD zC-bbwEmA{_cE3}K(|)41n9Lr2#J?tkJv?Y~+_xL&QZzB%ZVqjq61JY@)t3G>1z$Q_ z65Arxx^DE}kgrek4iP)9B=e}RpgW;)Mdcnv=VcQMf(sGNg!bGPH*PO_o)}>-xe-$V zn=tG$=30cblgYw1kHSWV;!SjiKIM%}kIa_z7}1=zpJ?A*D-r$Hih}0qEyp4(A4uGa z7(pUqwc)1zHoA^!9=@9ctjVKne1W|nu~3`4AR@Tud5I!ggC<_$R*7<*#oi+Avq2-w zx=9ow_fs?<%@~AVGYPqS#wvwqHdIzD)V)TgaW*j$lo9c~x#PETG5f<z2BFOmCRy+d zYR;A0hdSc+h^JDqQSfoeS*gwf(Tl2VdTCO5`%U7+4(AOU#)(~5n^${>Q|x%2yW{-} zbH$G6yrsGU^sH-n_wjSucR$Xpq;k-3%vRwa&ef0SXwb%q+?jFr5wch{-52teS0eHG z8-eqzR-n<qjSy%wmZ-j1pi-J;7+VDXKo#bMDt148dR6HW`gA$$ek~p6%Q&i%sFu%R z<=%6Lg;&k3VzV&b2TsSQw7A9VV4r$rEY#U_{^npjHD&8}$J+(?xKa5qh%iRw%g-v6 zQq4~ef^GMzT&r@Vq9VsL-#DpGop+t9?V5zckpg_t7v8cE)?Icy1dTkYATy%>*kxC_ zVNVz)RGw~g$%CsAQg5;fG1sUws<0BiV=b}wlOyxP+vS=?zq!OM8_gz<ze(4Z@3?6G z65A@slNV$dr+rOrai{BrYHNQGhN10YXzN!yU?;g0^66bJh%@^;-{mR;3)=ktK|yz9 zV+QYX&S0M%(-*ewCZ^s5UCgMUxMw6(LoGRB>_e$@&3td{t)17XjR`I0L;swbY3j^I z!E&na2d0(!pNd}ZQA(AJ)OYySm}-y*lFn3jC}~aI1CrAg_8lkhYZdj&s@-_czGcNg z{H%FVf5<Ph<u(;ty^pPSe$Qp3+V{RaSC$*xjo2Mu#eeQ;$W&VzQ1Ka8>ON7cY@rBw zdJ$uUC5v94eS-c#Yg9yX5Jj*_A+>R~wZxl@Du!^b@N()ap_;O)Z{Oye7L)R!`^>Gz zd&t^*(x<}gPH}V5mE*6(p9{9+FKI`!7`z9YlqI8R7Wa>Dc06a^xLe&u^SMCi#jAJF z%his2{UPH?4cqRn&{WN;HUd5#Rwn>!p>ALbW%=}?y%F^-1Z%gZQ*S9otux3%=cFsh zz{|@U#8CX`{S(8qtyxNOM#28GQj%grAI2pl#Z)K9Ev!vvucozQHzp)FCzt27ffjpU z?o!M-ezd&WD*oFy@1Dd+s)UR~8hdDz#ZLGg?`aBcGydfp+ROCQoe)%lrKuI6Ral$k zSa5yPp%`jOYIDH|m*oA?#)T15jo5nXET2+|Fcc-5R@a_sH}};`LFYI1$~+i}xqE7} zTJLDgU4zYPeIVi3tf0o*r%}T_*JUhc+j5ra&Ll{Tqj&mqZW%L)4)*6fbVBB4v%FJV zn46xIzw|RfC#^d((JoE#8!k^r>#rJew$0MUAF_!hxV4pz6|h(^a>?#vw@#QVUSr!w ziM4D33T}OqsYNe4fO2m@SzGwHxu4j6GFQ<`>12QBUTdYHiE(>trIUS~yTSHp<!4m~ zrSRo0dn@-H4+ZZXe}AZ)YrMz&#gtO`@W;6ischjJPwty-s5>y*nNQW4vN{bsx>j?b zy8!t2Yp+FG6I1I>0y{*HmW)JMpYKhyJAdn}<S1ZA($&O=-i8u;WP+MLC3#jRU`hX; zn4hK|YqTV{(a{ylu>o`IefQEAuBAPC;(+Zwp8D+5n&j?Ob9s*wJB-RX>&o*=Y|zgh znD<N4+#29D;O5wUWv)PVRl*C+KP9%NBy(y1<k^rop73^sdlU~k_EHd4Y1U$=vP>Ld zwH><L|16R@l=?cqcJVT+dQ(Cns~7d<(KfSl=_e)m=`g0<Cfu!p9C=--&dIWQ`+*ZH z=WPZ1yTm_Z*%A^iq$L4s>FXqJuR{B4U+%uub9eVi{=7|G%#GXP&`f!)qDn_|L2ORj zm+DoD0cJBZf)Yd@mJG$ng!d+gwJ>~qb`(UmD#%zyFxS#e%+5$_J1V@~U?Fnatj*=2 zsKvI(#^J9+sT_GAd7P>Yl56%gPr14tmbzgbwrz~;UVhTKtGqWyQ*H6Fr2pFk(D1u^ z>7}1Y>6_3<zLnxPX^kmt?oZxZoYUO6W2vzK9KF;1PK%kejf8t@V7^DAvpFXtR1Uny zNQ7dpv1ch@>mw431O<g&i@!Zyk`<INRAm>c8=%}j+MB(Ei`KPypTHEo_w!xZuaPXL z-qZ5w@K!qb)A^PL4+yoqI3@Xbr(N}^=EKLqbh*bqm(I4wT`miGozyV&auqdIuu}-K z(?(Cw9Mru+B`6R{ZJYSF`k>pI?miny8)%ai!XBaD;$&i}aYOX;G^bnkxW6fIOA~Qr zBl(U4;>=)cBkm2h*Uf{Iay)KJs4dnG1X%gcR*yHdn7Wki%v$ap;+z^E?}3iGSN*rs z?6|R@C_i9Vc~LVjp)vTtD#gYrgV%%xq1Tb;)EhnP@tKZGNaVIK#cAQsq1TMnr1?e# zghcQW@yTG??YGT=)<OI0d57dHw2$>t>++m(Dw5JfADv2w{Io1rSIxXRaJIQO%EB`G zt~L`J=2?<}^&@^JeowafFrnhY3*N;qdlSL&q!zQTzP4V$WD%PNuX=wDe^h)}+}&PX zDGlCb?zP<+#yg1pa<e6vgpjgaif@p(T7|uU^3#2Gd%G?Fs6eBL+<l%<R`sw`5-P2I zk2CI$tCd=zXWyn-n8<fL6hC1iZ}_dV*&03j#^#>QU9PEd@B162k8M_yYHbTR_oca8 zJO5p-r?^I5hO^7<_d}{3dKS0dPhMi@d3~U#TYKkRot}kz>0suf!0~tb??^5q2l`{G zu&E+2=x3;u<K?+Dmg)6TU&Mz!AsP5gLIbOkR-F1~=2xTB2Hy_~$3Dj>Ch)FUjg4A} z7+l0ygm9}aS&x0R5NThzjw`KBoAYhwz9&kXmp18=AnV4Ek+3q5KVEoD;lNamr0w>v z7d1My-{EgQSG1`(GSv#5$($Fs;b*ajzzY~gf$opBaS#lfq@1W^S#Uk3ihZ`H1~FSl zjq5??VZ-SSNlqi;?{v2$n9J<EVb7g6e0sJ{N$up~_)$;)*_qDI$vSVeUBu4Ip90gf zvDf&%xVjb9rsm~0X}MnDoA27{zuT|hhO@%7t~Ng>vfKCyG>bc_wn||?s{H~Yq!G;2 za^gl=TWhf;5$RRY<ApIF_~+SsFFMnQ(1U3zkeQ0PM$wwp+}<#aKV}(zMVk$2CT(}^ zCLS{D^4!(tTiT#%YS=9Ho(eLHSNfNGW!|G`lj_Q&A*&JigP8;uMY{#61e8QVMsu@8 zljplJ2q@4<J5mm<XC)%^_QDw&A?-uwlpf)!lV%NO=t8KIlIdBlhb|bEpS&sTnaax0 zd_7!%VMkNQtge&$f^4wF)S-JX)0Q2lZ>>CeI2&C&_F}N7Mx=P|<MmMfM74<EA_IMU zoqPSwR!@7q=Q<?zO#rLic6yDfTx`24+#tX@I&m!35z?c^gCjDUxa0>{sLi(7o0@`g zE!0lZogC^>Aq&F%$M6h!0nb_<l=jA6Hw$`eh(8Xd#Fo;AByGp4S|VF;Zl%0g3s&Gx z%(p)+yW>)NZN71_V$8*S=*;jObfY#rFDj-W<vIXv*V?WvQ?MP=X(!kyqmB@8;OATk zRtApL`h{mU#LKqrkq>)zXvScpG*>7qXQ<Bdhw=1;4cOBDeH@nc&~#IPqYo-9#s| zuEXsMG;@i5bR81{!4nUX&fK=c9&nPV@ZkJVB2Z_f*3c;8JxMs;5$IvTOq&!`5?xey z!6|p5C>HqhqA9bkwqA36udX8S;M~>a9)r&WVxU2zUSF0GCr>4HmO`VK42UBaq@5bg zu{7fi_rb6c3a^q*Ce!px1#9)4Pm{salDFoKo8UfpvGSXc1$W*u`ega)Ghnk-1KV9z zCobW4xLKU!I_BtHR?~ZM-hcOOS^3nQX_xKhXI2*OSb@O%Mj+O?7Ue;wD$Dmnr!pTb zFqg|35r?siV2~1Jl?A3)bil+MhmioikkFQ-WXioh+RdB!Ls`8d;zw8&D3it}&F|f{ z{p`Zf?_!!`y!?3pduPgaZ0%mVZG}>p`6?JYhjmw-?~m;=bt?K;K8LlNJc6dx$jNEa z+3}u!w;E5gy@gv@Rj*87SVni7&*QWl$*pzg^BqfTUz7=0-!|7JhFnT{_V&>|48p@; z4qD&P_bDwk>k%)8qXUC>syg}|P#|8k4^n?w;LvjHtf?!Dtftj<Lp;-#gn;Lrytdpk z)oH2~b!!?QXTZ(6>DcXd9cQdf1&2mLw<G3kev5IL<<&s$yhc&4K#Ww5&YcgysL}!^ zj4t8tyR1=k1?uKNO2{)dX@Q3aArUNe7;_!0zsDvmx{?P*j^Q($j%P|STZ%AS-iQi@ zwohluI;8L8*y#KDLTS}<hRl|k3E{p}`+T*Te$URW-gyfXb@=zcCdpQRa<RA|<h@jH z&6$&3xlgnDOPWW=Da}Z5mmE0FW}q1x6*OxDopRUoodKu93z|Aw)aNu?+6^@beoTKx zjwN#RupWznanYSfI_EUj{%1L22_`D{6GrHib@ZpbADx=smgImJ5FI%)_9BJ$Mq$;| zY7=whrf&tymV=9HMO>~rxTYOP4Q@D9d|Zo8<Y%oscG`kj_u~Q00TJPcR~#msD$ah` z6NoE$JkO(n{^k2B?b8Mo#(;wvUGj`)<bc7-QiO#XYk`|$RxLW~EoUfrZFR7M6h^!o zPj}kC^bxs&#pI;vVEHw{w)E6%ZmQUvF8!P*^_5Bociz1IAuYS}NV;x;@#{A`ZhqI? zkR!4Y60Oa8Mi4~pgu-Hq7)NTR-X>OI&QO$ExR@X%Ym5%H$bwXr;^&&^tH)J;nK-Uc z730CIIXBxZ?CkIRRV^ji%Ce8+D)ZhuVw^`f4-8L=5aQxok(L0>KF&{TiX2#A-QXIK z`qX_HC~6AB(cYJ=C{L_ylQPN^40E?o&}UfE$47e2%ADdHnw+~(sFO87d27+ivM5^M zj#Tl}=r|g)gH%t34(sATUwy*0JSmQbZ?2kVDaKH4y!XW#MWdLw6oDgF&_KApfd)ZA zOtR00NtZK5qBu$h4>=!jh!GwI2d{LFzumSksPA2R6c3GomP7EcoC@VdlYN;~abZzR z&ufEx(9~kPvt*Vm*_Hh5N}1cW9G|>dWxo1Y5_J4fD>TTVaed*IyWaEDMUE5==@zCI zWoz$XKO;n~BS>wn8JRnm$#ZvZ{}v#Y{`4}~#y)O6I4|5MFIQB2NVkz*xB2~8SrV?j zAtHAtgE{9p*>52a=tXScb^twB!pKB`I3=r!7CKKejMYPs_6}Pp9Zo2WHE7dd4rqt+ zU<1YqoGLZvQZw_NWjeVouS-S(by#Vi=_-v$9necPPqk?as7*FEHplN&VAyib>|2Pc z0oEnppo>TLNK+Lmh8Z<OB^VVNRjK7GxD(aFTqfLNw1KJV3YtPI7uB*s3r$6fJHRCa zp;iUsSj0D}v`{>x9itYMiP2Tir103uqKO(~rpKRWMx_PuK@(ivMNDCioLYUd+_5;} zK7+#L^C=#WZm9Bydx%PI7w)@ab0Ni0X_hnJ=NY5&ShjGnqluo`DdRr*X-8+V?m&iX zwvXx(@<VCa6n)LAg+$5=OKz49TGhVO(tTcD-`l4sr|`liD93)Gx_{@q{~S8rR8~WA zqvr0hkJ0hDVX5z5N1xN6J!i2*_3Pq(Z4f!SFMBni*w6GyrDPcL7x`h1v58_j6?C@m ziM@o|UF{WTP$4?HM>qwX!*pu%ev6fTTfntx8{<ATeg`g%>JAA`D+P!5m7atBGYe1H zGz0b1z0b!px0z-u6fZrrv#KpCDm|=te1E;)YxA|N8PQcm8g}&L*Fc&5H+;)ACt{29 z!-U>+GP{VG=2+#eSOU1lyNxtu2ccWh?u$%KTELB^Qi61_H8p-+W?L2`LX#=I@NYWq zZ#noY#(pTmz+m?*y-13P)KCo8yrwm!%4$(iSo_)*lkB|v+?$#BrFHa@VyK4(nt3Ok zMo0QqzZtzjMRW5r-mJaPr$Iw)8pDwxi*f7N<r+FwR-=<hNrN2==ciX3H%gk)OXA&t zHXc+C8L@0G)>fXSh496zD3@)HPEadKVZtm?THt92Xf9EV^i{Tm_}B4+F*4!fvaAWX z@Hi7e?pS@{J~s1Pqf!1my4~ke<D$GcbbF#f2=%|}Zf2yXd!qM?6W+N@qt&PAwRunY z3~zH*tbpRi&TUbfB)L>KypfIqlaq`Fspa|UpzUT$%P*9T8h-z(+n$$C`+6iAU>~ro zHT>r63+W>Sv8B~0o05@{VVG!HOJ9RAGNfZHufXk23Huf2hZ2-ZkxDU4Y>e@>xXpQ7 zQs3<po}t>ps;}xosf)@)&W$w57AuS9xXi>L^O>^g(FaM{)>|toJni}qT3?zRtSWV{ zFH@0MkUv&@dV+8m@SZCt`$mdGutd<ptVoT(mP6c`sw)(W68oVW_wFLduC)5ZfnDt^ z@~e4^H55&)+L~NxjK|h6FvM9@<PH{MVsW$d8EL$}!<&>3euz(5=#t2`s#&^U;puUs z_sg@1*vt7`*o)p8jM5x|ZMtomzFpata-~xWJ1eql9gR-kAMC3tFD=$|UzYQE*O<1n z$D%nuGjY|Try-BlqV1x<*xTqZ$u}d>ajF|Lb?;I<Q9cOzO0#ZhCh&ANYE;$O6^n`G zdL?qCKlzBI)D3#gUu!V2XQ~V)Sa<Kff}1^e+D=wz$m7u%B!LL(=tcP$DEU3c|X z*We9PK5dW`F`Tioy<*s^i2wd4{Y^<{g13G@Y<<$DQ7uE`;T8vcZtbLU;8r{3fzNXt zDkr}5G-_5@3cM}o(4?Dq9dZvvy-&B1U`9+$mY~^y`*qwm&b0i3!DBH)5y>pOEn_c< zj;7teFEh%@(qSx`>MG|v%qs4jb)gWiXQq4I0(g2#PHcF7Dlm~3Ry_?kH!&H>&Fotx zD=V%_K2bU{*x&!cI(I@<Q7-kf<jXG5M#T>T1G~}j<I(v!%nrdV1I!ytjJLec65!r` z#M5Dt`7PZ+a3{OzN5hFjj`s%t&?MPVG(TR+F*&2d7&AD+wk73-mS75KR7_`%%BMJ< zFM9fBx({L#-|cDB+N#LVmfm$3G?=(rzLZAw6UDOS>pcr4&Q0lD^Fl7V&6$~eFHW@O z<nx^1M{9D)j%CkiH^y9fI&kcDclhWYi{(j4POF2<mhw*b<Coq?M@dWHI((Nx`!`2< z!V5$wRTTUp5d6Z<Kl?(^@owVMAkxcXDC7%7$QO86dWd-V1tlaN`9jW>zrn)`orqFV zsGYsUz4i2mU)X{1-6TBeI~{-0ANfL%M<20dDoFsW|9UR~&6Ds*ePBHD1*+(NqG0%a zIHY_epG6;tc=)?PNIa5Hjj4aEKa$UG5}x!Of*<_A`Xl*t{7964mYDnrkL2?^36Hb~ zcwrhz%#S4M6LA;GCyYn(x%o2@kK_}^Bl)Z%;gNj8cqE^u(?t2};gNj4AmMXJHG<m< zmXGAqYlaAnUk8umlXmv6@yL3T#G%qi_zZ-67?0#r;0qC$whkUH2g|P`;gR~oI3%A2 zbHtKJ`+@ODJ_ks6B>ylT$>+g&qWq|J^hfgfnS@8`!va?jk^b_Pn1Qrs7?0!=^^J&6 zCCP#90mj4ifG?9w!pE<JNAfAWKm@L*Ka$TYB>Z~%Bl%QaB+8FjM;|1gog_R`e|TXo zNz9MLYKgduY+o3U<nzVf;E{ZKeJ9FaPah<oYkz}B@_BZdC?ClutUr=Zjuj#vX`e72 z$!9qUkJKNj2T>{tCB6FB`oMT3pEpQ&q<zA8B%elWMEOX2gYif{pOf(G=?|BK^>;%N z7c?X70mdQuoF(Cre8PAnpW&o8<UL3ig#D2hRsfOGlNOpEUI&ll)0c$zTnCTjbC`r* zPah<odZZ5utcQo|4Oinb34drEeUN-^q9*dccO5*E&kz#6goH=(0qX&$LnM6ZI(TIH z(;JBT{If5l!`C6F|2qu8_Yd&*gy9ba!13<yuA)*!M8M-qBp%t`9V9$Fo<ri1{^L^p zkL4r%XN`nUM+zn#BmL)G4e`W!`XK#Brk02&eV6%9S3&yEeG(r2zyVx<8s0&qR&~S- zczg+;hVe)~pOf(Lcp8aE^6CE@5s?Z5NhvHJ$tTk_A|P=cJd)2s5<Yt!{Ga%|P6Q(P zg!K_c$Y~?tBM|v?h-3KBte&_Bk9XmG7?0$$?{Dx(KJhn*5=q~Sg~fvpu@HYqKEILh z$oj%~B%fz)66LI?Kax+{TSR;q>H2U2%SZBgj)aH%dl-+bC!EiFo4A(+XOL1DkL0tQ zgh%QR<KYw*xAP8h4{4t;4#{U92_Fv&K<I<y)8#G^0N*dc#bEhJK0lN2r(hsLK9bLf zdqhAK0?&dth7Z?Bcx1eV@kl-+8;B*5{s`mYdcg8GH4^ub?F-|Pd=``Nk+1+#3geM{ z$}|xH@OTa`2IG-@){^iiVIV?2l22SS5rFg;csz{|gStb)Bkc#qBl*O&5G5e_f$>N_ zpOEnId=q?q7?0%Bsg;O`fPthG#v}RsM8dD9Kax+sHljeJeZumQd{Va)@kskb>H(`8 zLc$~c7sey`+|WTRiPRs)Bl*lG;gNj8cqE@&I*Ia;_6g(R6xRPb37-sSkWv_j<Wv1V z5pa4PJd)2Y5`G^7&x<&Q4~-rW_waBADTVP!K1)b=*LCnnKIyuMK<9PvNIs8}@CVkx z!#u&Kdq{W}ID?eJ`Xl+=`j7~)Uk8umGl_(U;~N#C9P)62gonpJNIa5HWO?%O|BM0R zlM(QuEqEX)^_?C(kpT}RlY^QJG2F9;jDDgRW@+#kaB(k7M{rZe{ge)w7L54N814Xm zxnGgxGuKt62k~6ek%`j3O-ypdWaPiqwJ<4i`96FrUNv$XA5Y@<cs6$#+fQ=^Tp?)n zn5*#|`qA;qZKfXC{d+X1n8)_@Oc?ZJ(=*yUyQs<j#Q%uO%jMCo$ongqYxy{5@EDiy zP#Jn0`HdbY0v;dlJt)o{OY_cWvu5~28T3FI_l7D)UpU2ME@Hr|Ug$-8a&1vz$pzJx z-hM@e>SX@;BN7so9_E38`eK{kA4YR<=hykiU+27Q+O>7r@RR%{!_E$S8+GSF>zd** z)7k#C)Rfw?fDe*GR=#Dc3q_+wChaN<PR?$l$}O7TTkzoN?axoS^)I9lpK>c<hI7v| zYbjw~<Qgk2(N(x8Vq*3Ao1sUf`n4u~{WB%*qF*Hy{1}Y4oD0uS6xR^(eUxTnY2D{w z9mJKhAkX%ceWOC=_$3K%$tSi)2CWB%ZE`(S2WAR`CM&@M%CeImJfWkt1t<JiyD2)j z!!EU!yI(*_|Dfleo=9S1Pts#z$EgHAWYU8tlBv|&7b1*B!?$S*LywzNMG|j~wH?dp zW4jn1#)>_g6d!jkN<_NHLSv@X9oIRw&%&%ZB{Mi{M))fi6+=|ICKvPwTfQ7T^R}ri zaxyv9q2l|tEsyqbLH;>L#(mGu<|`_HQlGdQ9226+j+We^2_BHUynMsMy36t8Up<n% z)S`5RF06SPJcH}dY4}D5J-R(@wqr-9%iY_~B>}&xkBrRRI#D{St##>$?4dxF1+__@ zK<bvZulvgV45Dt=zL?*FRe$#KtEc5jr#6K_e*FwK)~DHXQW|bs5BGb|#nfLLFUSi? z7J8Ky!n7J{aBBIR?DKtk6c9^7;JG$<Zp)DX7AH(#BtqJNS-&{*I^G35&u+WAx!9y| z!cYX0AVX}nDI0d%BNd(#%#t(;7W!^u73I#lX1XE$X(tsF*i>8N!cyb5&2MYXjLJ;p zTaj+f3Eh}O`&y$lIww+Ab15sIaJa9ht0_xhW=2=rsP$antgfy}yO(i@keDOTXH&OS z;*aOZnU=9Fx2a=sH4)sDIbEXQc_e;uD|m?PRHh=DinUD~7lc~vP@a|5KyUxdGd#Lw zd$FTV=X(<iGa&QBK8xjv9LwN3E*rNl`>GN({E^Bb@Bn#-+y46gFC(2k_ZP&wxkMP+ z{FOEK1oEqM1<$t><cG<-evalkws~vir{D?zzxR$OsKuG}$Ai+yQK^(u)S{T!OE>6K z4JvR?;+2xI&{7H>wlqULqa|+=c#w<xq#HCQdQYYAw3|97XRKKptzKUBg^~-STGt;d z%3*eBv}Wal<pIs7u5%p*kI}!@z2XYVRT=10T*j+&on&o~ryqC~50qfv$|V!1f3Rg& z&s>>%cUTzK^)oYg{Cn!zElmji>WjdT4IVtiW$eC)3DW}MV1=+cfm`}HSc;G`F86wY z8TVR^J02&}X&?fYpIjg=PgrKr?M#e2m-(nwPEJ9-HdSDL3Ox8X0{+LQD0Mn+)O4)7 zA$$_koRt%qAuzNpicxg~cHgi2(5DV@?^UZQ&j;&Dq#qMVHdy<rO8~zHEu8U6e3fGR zn43H)99V({YMO+r8Wavc1ZH)bQu*E682t+~=8XCRp(TkpQSQ@d3zd%>9p=8XQ{{+@ zDEb;&SymU76=}ZjROiZxP0NpC+{Wl)KI7yD)+f}L*;!dySC`!_9qjMzuP(k<uA=B8 z^rbL(($iydu<zbHdVhyHr)F@rJy@S0rpcaed#QYm)_a$u5y8&=noulSeHj^GO$1A9 z%M=<!13D}AGCna3S1h`YHU_7kaYjd2RY$bOLj7B7(o-=P(VdD6W$BkrKnq}``L4=p z%v^Ri?hL$I4pzCi98rAPBKbNzjQ)+POIz*+lijwOT>B0&De+DQE6U8F)dQ?oCM#8Y zeM-O@Wv+l}Jnu4#vL?H!#OGkJmZ9@~{+cP?+5RtM0q%;FXhdzgfK@DRx3<<iDH!DB zjmtY6l($yj#qTq{@Tz?GqOjnTYaEOkyg_Mm(i)V`hreI8xgnzaa_CcF28Mr^_xHn= zCtaG<p<qD8`YwBJqlQ?QN&nTk7?r2v1qo0vNDVm-g28VN-<fL;=@WXDpMpUO91Mmc z<2M)gd05%g^!23(VPdxuBLS}so_(sag6liGx7+63+sD$K4fM^mjearrEd=z4@CQP1 z{nzsRb08F~W{fpu@SLD&Yh+g3D8;j4&_g)73RWp+BrGfvRyCblwgg^mHpy&s*Krrr zVA6g~mSEYGg|J9%e8>H4Wi~FZPK@w}fHSf2@hx?AGMHHW0*~l}m`hWv?FW2a?ilr@ zajDAHj<bifY=;8Tp{%iR=;FZzV`*It9k}^YsNIU!o}_Vsg^Wp(=D9bf#~#kUj$WA8 z)$R;>eC%!0S>*?27Q^$va+ozADS?j?SZLyD^vm3rXdCyt5Tm7EI20|#YteWR0~Qsx zQgOw$M2bbZIzAI)ip41$ZStq@FvR1N2rDI2TLpM_^p&b4pDs~J1s$7}gLSTl$K(Xp z6-lpn(APdH2m~G#7$pmN-p1!A;y8AAgGDS;pQ3?_#eFpB={~>kI89@29BA~&LKD&m zeNjMP%q?@<3YD+3YbnH29(TAU>dfdd`cXM_v)4#~3c6WtxQ8*K8Gq_r<csFud>_sz zr+SWq-$yofN=8u;RvM{Bi9j~}0vpAnK_$ZWWm}X0XqO!?4uG{Z9WBefwN*9MY3N`u z43Ly42nNBy!{}h3HE7ZM*YiQonEB~-bUt`za`8hnXuuVowX+KDp#&!*hM@!yeLCbN zcDMkJc&YfpQw&Q~u3Q&XD`vt8r?@OM9oAg7oR`n71b#aqD?bd{!?5cq=gUh+rg$Uv zp~WG~c^jRdk&qWY{U|L35Qil;hpZ-8-zNHN649tGx;x^9<=2*huE$t5YvKV*Cd*Xb z`&`%{RiymFp;dPlyM^s(YA|1V1XDm;8Kp9arZScs<*mVVnjSRF487C^8Bg&?Yu<?o zphlwPYPP-I(4d9nYV5H%W(B{vtLz1Ud!;E3;QZnu<qLf_)}XgpT`DRDSHHRB<mTjB zd}k6gDQC{7K*$lkMhlEhg0**JZ=zG<&b`feY4Iz*0P=GoOt>8QNQ8zlRzZQILnApO zCB=0_P=ZcI;l!CxDO+I?k&|T)KXthu&tGs|Rf#q1Sa@;5$FI1M>EjxAp1)j`%lB_- zY0BzNB}<X~G{>!0jG2?FuC4J7g-7r{&QH*gs5t-rM4;&Sfq7aDhWABZbEiT>H6G`m z)tIJxy0(TAU(^SSp?)l5gSz_^SId|2U@=<_IxG=tu!SM2zPCJ*IN=A=(5SKKUW*T? zqqEzMwrQbiK(Uv6ZU?Z3%1^|Wqhr<pi`KA@ZJ^%PU~RS;SWegMS6ye9cZF;3As@3( z&2MSz)+Ral-WABfFV8R!G~@LM#*#@~7ikVOxR5eipbOG5Tv#Gb_^^*ZQ=ef;tcw#D zu2^~aYtz}u?6K~yru%Kd#xr8U)<G#LHn}#u5|zL(zq7xBmOZj_XkdX!f4rKX2rP5* zJ=7m1th|k0_@b-dJ=d!US@hMwu}Y{7zv&u*B~{;Nbq#tN7K{bh8eRJc`uxea?ZJx8 zXA!P~ik{EFII+I0lO{RE5T6M7_6PSk4xSw|8Dm<<jeSMIs>};J%NGhkd~u(!*$>sd z0<7YOeA}Dg+aDe>1N7_Ysi(|<vrQig@)PpvKXk6n6I`2_=-L*VP*CWZqe*viKk!py z5!W2ngg%9O^$wb4+=Y~mi3t{annUtnMcoDQ+~duR|C(#7uOz1ezq-3@35+?l>mRxH zu11f^pLF@7;l$n37sYTH9H8o3@JIhJ9JN-T5-8Maw)?me45(#Vth%O}%}bsaG9dd; zU}Zbcj?p5}7KJ5&OQ63mEe=_90?;z!KxmAylmtbWoHrk$Tkf}+6YXcYRa5mn{?yNg z!yUUd+@oaq9Sb2<aTckG;&KOuBYMHb5jLE!xnnrc%pG$XBm**<5nE!X1)hC4P6V=; z@7MDe`aqBwo`d|n?p`n<5UeiC0gLIo5~vmkIsmYO4zi}nXka?}<}<U9{d_SN;8t~` zCHCME;Pr%2)%*7|GVW-7uJ&_vm93)o3Kbm{&j71PnW`}M_ML|d6+2zTt}c|UsdBNT z_`JI3>nB{SG{1Hz@3xcU^QAR^^Sje@dG8GZIR~xg*)-IfY}Z!0_;DrnAMFRj<7+B{ zOAT;)cirm~DL!(jr<<R1@W4maL8>Gl^|g&PHw*|%FiA^P|JXzq)YNoEF`mJ@!F`y2 zP)dh_`e+@G*F=weE+=HBGV-_WjgCx397Y39K!?2#Z2+}E(*X;pH=pmd0Y2NOxk4By z-d?Us9UPq01eAa-=xTmO3*0y*#=*c9C#{#M@~vGj|LjasP}l_>rJ?8pt=+;#{oR5? zdY*^D0%KhNs;i*Lu@>Jw!*<=0ULf}46HVg8ea&X{!F5x{aD=FmGnNNg0&z%ha1RQz z+4gr=pzcXEc$wv1umogIySE=#XxD{S6G~LB62eTMJJeNlsop`o)OJ_>S)p4$dTz+O zJ)cccaB-w^i>7W+G80w6#1i@gmlfj~XBY8A$(<c{ZFr%WR}o?z1O}abin-RgU>U4^ zg)Gs#t$IMvYYLoiZZjWVPF`6O0v7GOGEiI1c0yz59yC&pTN;f9qmca3NoXbTo1B*i zr#)JKa;QH#H}`TEO9Jj({N_w;F#HovnV>I!;iKypJBl*~?Qt?;)D%Q6lM`o0wV{qW zD9Opm6@=SUV2BBbG0*^RMces+nb5~Shi(<5I}I$Ll8cXRpqr$X<zBF0TwXR9^l_@0 z3$%ikhpY(X{6)VVK2h<SB^GzC_~03jl8W9K-exd{7>yp|ZB79lThv!fb1Wg$ayOsP ziA>1zIXkMQW31op{}ja9YBv5r3>YuOl?xby8=PlZ0+1iYC4svhQ1W6q)Oju~g3i-L z{8F$X7-l?Oprt<P3SMah6A7vCa(Q){KEp-#7#x3ieM(3DMcr{}+;z@aTwI*D_HaEE z^%B0OkDVnbv7<Gd;G*KK0>0+Hf+FShB`VgSr>=6CZz%zBrhXEvllAoYO!V&H6@qvF zv|wI}=lgT!YG@?7uuxQTp=_}vWc-FZcyImpew(3&&~I;%5;+#7;6sCdLn+1)N)df2 zK7LA3>6T)_(8;l^NxpF^y|&YP1jf&XJ-Z&FOntJUk@@2qZ}@GN6#W74qU7y!!DR-x zbI(Ke4pQkI?8?so{q{w$a9Cp2Z7Gj`!!OFGn7%eN3b^%j0@wetwnm{5V+dvp*6Ft| z81>;qYfzdQ9-<dS6mglO1Zz2!I7$e4{;{JEyPDd}>)U(;g|L7F5SBsJgut6<C19|` zOzbcRnEAjEaNq$Lne*)UXkbAImX8T-7))py$=#YzWIVh~<uU5MiC`_HjuP^1`yak- z#aQXg6|)_(+7kLxTKaEeUSFw~KV4og4<e~=uJ!F0p;lTh)mGJ3aY9dR5hqJ93J0Pd z1;mbfjL>nveji;A8pHkOha4~*Q(2vF(FO5nW{%Kt-}(*>zJ>>QDtbMNe%9xYu5D#@ zev_aSt63PXCbFax{ZIM!vI4jtwp@d5C?~kO!R(mZ<|k(bwjUpRAAMw27X-xCz<=o5 zk|Bqr2^taoNrMt}MwlB7$Dwxy{5JN1dj@(nKlFuS1rU;B?@m8s))dX+G>2Lv{Q%k> zc*@?JhtVgX`1*Dq+8cy5rkm0EgpqcoJ8*QtlZ#M87UmNB?VveANe2zWSiXmuFq9vQ z2IFRz*pT<Vhj09(${&X0;U35MHl}n?@NUB-;Pm;TJLH2#e-j9<=$7XtjLB`#;uz?* z02{>t-4={;H-|zmlPs3GgK#50JP+yn`z?|vQ1{pC#4&lXxIBn2AuClNLbC-2L*0GI z40HFQyUEWD^+&_;#a23z49E0b6%C#^!1{QCXdh)ltqG88zfK?1%e4tgPO(`oS%B<# zbm1{u2)N6PJ4=iz@1qHk;4Q&`zFr~57$_Lk_^L0nfIGQy_FGUiI;U}m;;Ny?({+6N ztbz{(-W6Tt9vZ5>TVMYx*SQxtJyYs459XL=wWG%FUSyut)BgDU9#0sUl#BivlBj(! z#;|zO9Zbp%fqQJ>{{Gd0`~3KcLtiCMSz11x<Izy?S6neq3cGn;gZ9+O2y^W_JhO2B zA{di*7#e|z%8c$J()(+`NK)nHG{I2QuuV(&{28s?$2a`CB_(4Qdnf1crlnLDhbxzE z-KPm>+srg-d4n}&dQ5jlCWB!ccilwqsSFAcu`auy#kHkgW(XlPkQ5Wa;UKfRMv?8O z%>0MzWW<N!oP)z~3N&E+n2Mrz5iI%@%xHxrTE>oF>CNK(?&n&ti%DgF{U|F(26N4? zc)5<0n`5?;Rz>>ZoAY)}$~(41JS<!L%)H`L@6#-DcTSgbMXp14c8%F+b{ilz1H7F= zc-<_6mn!zsrAtF4?OLExqWsgq9V`vaDWI1hRyV12)~3li*)FoA2&Nf_`t7cCbQ~Q? zxwT~J34Pv?;z4DNb93UxZ#ioM{7bAUg0d?TR4Z~-KGQ<q*Jw|@Rswfb;6j))f?$sR z9yIZ#R70T#9FE&QO4~hNzu3w?vQMnfLn-ij!+F*Dhnu#kc@{eLHZ@t8t5M_T3@cvv zsN_^Twnp)}ANMO7X*?_YbnW8#U=Pl^s_DpJ2vNGUMk`~h=uWDTAox6+CU=5<aeGx2 z7YK9hg52~sS!4G%T7iq1U1*=aL;33KBk!wS=TkRu(`;r6-cutiC@fU`%e$iSrUcqe z0wofYy)F$gm33=9vMaq%O)|iBM8}^kgYS6VRsxaZXIlU!AP;PTs~EJAu;3(Th8AND z#@@g{OZbM=9Jc-Le))^{W9St(+tr)7wnv`fVZCvRIfL2nS9S@jqOV&R_xexBFD!?A z=}jVFaGEmI!!eS8xl4Pu&w%&c?BrTAoHnp{qsI8aOKr?$Hx+J!7uU3wUAE}t+7?zl z$k3*WVLUVF%+Zt^@!|@aOKU%>Wz(%r<&ehHh2KL1s8~MNCylStYJH=cp%q-6IdkL- zg~pmVcpno?DO*!IaiV&?^|OUo=;Ar)X~G&CD7M$H9T7Yt$eF+!?kLDfM=ji_CC8$I z0#+e&(}^myfg)&hp2N4ZST{*LD!wE5i-g3<ci~)L?x}CyYG3R0ps4#b_(0(%#v$D} zWd^ZyQ_!1(m(03vXN;w7aM)@uf22IXt!3lY_}F%KX*K`JKKEwjeZhm(;yeqMaz%J! zJ+tNvo^RSqw0sU`;KfeHZa#J}yL4vsbM6yW4jQWibYX&YP1<r3FBPGL6HU_$CR4!Y zS+rT0!nEb+n(8!}X~Pc&ET?o1#Lx;*Y^F=O6Mw0yjb`7t%Dn`@_-G<+uz<+6@x!<x zs)Q<6*H}7vOi4wb7OxZ&`~LAkrzEOGmBz#Sa+=!F6mNSENS#K@wa~e+*B7>I+T=xd zDu_i$Ka71Fcg$O=4SSwzx~~~?KuV^tHCggD%)C1A3kTE9K)C=pI@(4-&bgP7K(lZ` zUh42h$KC?gsBlLXbeJ4nnB(J{8=I?SgO)}5nmf4VG5A5}Z%Q==p_onG6%P747shBk z<Nb?X=|@eo3bwHOKDbXqZ;kI5t-Cx|IHJ?68%kx&Yc(2S-+rHW<5pIu4fniVBTz08 z*&|_6UiHQL;Q<Y_1iZvfOP``q$?vq0FHHm>HCa#S`0}E9U#UJ<f5y)wK+PORJNdl; zQmRpolUlo%mM)C8F<cNG{hclxXx@0OHjfP*rp@9%x|FbVF)nJ;;e#h#z8q*xv|!>@ z?OUo8ehm$rSG_k;<#Q=GH%)D6%{{k)(PZ`m`%wn9bbnwMjQfsHalI@FaB9h>w+Xy1 z$v^R><BT}dvhR2So4ebIosYJ9*rmGfWcQbtbep7nl4~hKn80TW12c-FA#BQFp(E)Y zbMkaAZ}HG6v&2(VH9AgH(9wj2I?7?_s3}8f<$#~(*0Z8%1R7ixj=xWNpxe1gdAQH% zjCw}>^oUC+Htb+*Xe;2jL>9~CmupqHbfTmb7s<`ImpU<X6I#a(HvwFoM<6IA@i*S& z(HLIw7DZ_u(Lv{E4g8hX(6jXFrdEYS{7y|dIw#h}C0f51X0ni)kZJO2&8(?jK1QQ$ z%M2_tOwRP>4kr~ln(#(Ks|(W6Lbj;_>@-|E`|F!g!3i488~$&OJ?+=x-+RQrNbdBZ z9H+z$nLbs)p7g9jjZBg<-qKtQw)JCQy`@D|8PY2(o2%n5jWmTPp&0KbUt`_4UCpq% zox&j3M}JVUMmvI&{+i`!5h}EN6DJ$KgP>~S7_E%@0$utdg)pDFGSqv57h5gaqi1UA zUyPh_l;Z||KL7Ha9JjaJaDy(STA!gDOAsApsFn}q`96<1selFVdmpDv40g+k94_gQ z^LolP)ZBRD+0{EKJPhe~zP@uDPhh?q{`HVsDd=Wi?{|EIN>cCL$$WWhvLn|a+4|2C zGud-4_%{)XGAUn`uGMHq9ldmC>`Gr@o`2Rfv-<4#s}N@N_S29>7H|U!&x{JpxdS8I z#tochf-?13z8spiF|^T<LynH75pW3p-8|Dc(%>jZSG@N`AMMq9!U9;P{Q-0=&fj;{ zH1pHq<T_}usto(BF6gY-IQNZ4D$_i2k!IWqVk@gz9j_CMh&DscA+E+ow9H!xU){(Z z`~kvW>T;7`$5V1pZ<IEPuyaaaJfu|R(N|<J{v}j9p1Shtn(;xi?~slH90VN;=4hLL zV*`OEBHXd}jyp@l>3AO&KwhH{6n#J@sKIoTW0lQCMY?7v0xjCaQgQG<8Fc(^Jtm?R zK`kkhkcw7iP<Na@Ew!k<(|C!qA#Bov*jESddB8-WgNt#RuZx#!qqgZv9*yHwX0I3d zt!~P<fmJ+r6#d)Q5oda4DNbM##o8NRy$weR3AbEZrQ*J5y#}et$3qB|97{{|+ZdWb zETqXhMvOmTo|zXYN)-<D1>Q`f&4RvpNDqohRM%gP-N`so)yi~=*KwzDqG7Ud^QMzU zC0eoEv%C76r_1rwJWTt$EU*1$3u2_&-n}3o;i$tokMDWF$t30@-^vKSj)_a;NWGiM zW}TZd8n&qQfYRjBh*JV)(;+36v&=!%bgYen*X9H`f3>BBm-@m>eBuAAs~koAA(u)Z zmzD_AP!NC>>)??~OFBvT_2eU$miW>V<yWpFAGx%Ila7domwq7iM=mXCB;iXC@~KJ3 z$YdWqQ9e?C7>~T(8VO&vj(jAaWei05>*<5!GoIrg>x1M|j+2OAPah<oPf2)qX(wDi zxW1wYf=+W0<(DGr3okWAz)|enf2|LUhpz{hyFtPu`G@gHJ{@?7@{#rl<B@#Q@cv^w zlFut7{IBcO7s=;tKB9c2eZu-6`9$&mH9yFD!u*^g;gS5ncqE@%0z~=9_JZ+n3d?^- z!Xxby#v%EP7yQTiBl(mQBI4K6AIayF|HIyU$2FOBed7rwL5U>vP$U6FL_iWy0ck<1 zjjp1y5;~SuiUpCHKnO(vD=Jc~xa%sYyP~p4XaXXF70W73MP(HP6%g@v=1Q{nK6%!C ze((Ev|9reZ_Xgr|u5+E4IdjhUd}l@^FF8KN`bgR{QBT<Zaf$vCQ&Yw7DgIQ^|7ZJ% z<t6QT`G1fX>m_bKV8%bcueg0ld*Tg*@~6I|zob3OMDmjUQ{29!Jw0X$+duT3_9gB4 zStKtxzr<8i@q3Ct^9=v_eZ}&U_OzPy@A8uNd@hog9M9tRCGE*G`uFxF?P)w)C@*P$ zar=_?yd#pA9G_x&Nqfc_|Fb@ldWh{wHWA88`X8~pq&<HV$xHT^SYFbeY*S(TlJ*qK zOWIRz&Oh5nEHC~P>;H>L{(JgJ+SAue*uI#0EPg+6yOQ?&B9f00Jy0w!rlyMZ5P$Aw z2;2Xjy!hXX<#9}*eC&7Hm$YY$NIv>I@{;y^A(H=|KH~Ql>$AfA-}RTYXOl=?OvM$y zpV&T<_FP~gY+u}8Q^XJc&gWi{yx5;h%8Q>T{^vGx|Gj-ld-jOri$x7cwl8T<@pJz7 z^Z$(nP?nRghLDzShx>FYX_F0?LE4$R(-TW;5dPDzTg#|IsL;+7Iu(-1pYBf~2<ZL9 z;^dgPNb4S%Y;A)#rw<Ue0}fl4xX!zPou%<o?)hrHjVM8cQkRUnj*67OFpq9%Z&K1- z!7q2sCND_n=*_rj9^<&nSo0|*ch|;zc81DMD|=eyU<sv0&uO=m!R+S~_`b8=7^6i5 ztC?9{6|m^^b<!AWX7VK$AWy*4xI{X2K1ELESVsuL*#K#ql5=qZL5et-{xI;~^RT&> z5~7dkUo|(6Cp8(xO&IlNMj%?`xan~KS;_Xzn|*U;Q7wWN-zwrso4UQ+ZnW0Fyl=my z-jZmiMmxLlLV{7-mWk-@mN&W$HgIFe#^wxrm7GJ2li{9qsRw!)U!{X4J%nfw<h6jT zkr&L9s}(1<<vVYuJ9gg2;44xshO6;{s0sw`(W_etfL^5EppDt@zm#`Si6~uJu*Se% zifU3(JO0ZIK*enODiihvMAuCxs|;RSttLmQr7r|XYrX$m{?66&-OF9FNav1+r7>p~ z1oIx;|G*{vx}~cmS9_;+RQ;5QQ5~9$To9#{l{s~uiKgc_e0!3oA%<N*g+|4-`$$?O zLoNivF1A`KlVmu6Q!r8XjcjV4b1U5$EEzX7_H$c;#kayOVP7mRIPtWHj~3=z%&A=P z=lM4GU4$P`e|@4AP3_wGXG3j^qQUG|%W}84rH<EKR=kVr(pYyp*m!53RsG|Gnfp~A zbk2CJ^-0Ur-qFlw(lK-Pw8@~{!GcTQ@R|db)6qB-_c3KfBa2}royw+DNIGopm)nF| z0;i*&3E)%^D8~SVfXtq&l&K|WQ&&r|N-?XN)Z6V6N~(-#O{+3KcA_U8E(R&g^7kGV z@Jz42(wpq8I9+OR+sD?|{CKbj^w@6sm1>qo2hRHY4!2bn_rT3TIy>}uM)9Ff&AWC_ zE8aNPbA<LGpKM?;`{i8i$k&xo(@!S*Z}NEiH{^mQbGeQF)cVI)e*-VLY?vms{}nr} zc1j+qEm7A=4xPBCM0ZZJ4mJr~y~00)YdW>?`Rc`fWzjdJuji`CkFEFauXQ<_W0FUA z;c2S@P3wfLEKwHS+@tRyx4?C#9MdID<7ts18ADGE=9ERb?9vL(B`ck1%Bwq;I+{?@ zHTsf#Tw%#)q-C*kEgPn^*||u{z_~v+A?%dR=ub2wabOO2)8m2|)RBm^Zfrth-~I7X zxz2uYRR6V8vyujB>e-4NvmYH>xQpO+$;@DT!VtG=zyF}aVuBptY1Bg}!(_SIvWq*X zo7!1Lb?dXzr>`IA=t^0%@}`PKYkqKQ`S~zDV8A(1wHoRH9X-6(7t)x#4h@M9j?ilU zT#Df<`LPH*9RxKiuRZu-ls!m~%d37q4g3q0#09wJ6H!LM?Q&#j^S&q}C{R#-ZL6^y zr-Ym^p?p73V_tDWMc0v!)j6iu2drXVe!8Axvj8exBjhkrTv!vIx%Y^Q@9gk<t<9Q7 zI!&*A+C!SN=4t&l!Ps~zE~MTj#6E80$+%;;Eu`0WR=fg$U47<fd%xDQLwkQsMA92h z8!KM*kw&QGF97xdDw)hEv7nGPwS<e%QyES^!1*M59D?&nu2S6u5qio=y>+`wI*xSx zXnfz6l`)O7D=+oaJpo?JVb*C+F3EJ{8wAgAf4<nf&+1sp*M}WGwaC^<fj6ZHfLKLF zJ7o~<NA;#hKGLnzu~y(4u3q7e@b)IZC1eF&6cb#48el<#c1dgs-MQ~I?Uw;hC~~Q` z6(d-UHDo69vvQiv=|2nq8+ytHPz}DMBkYREE&o<pY$bP9;5O%nv8Vgz=4B|!+jSP$ z?q3R8%E$TtzOUXrFF1pA@bVsQ1Jb$5uSZUv1Ybg!SM}Qg1B61A3e@j(+~K6pF&6@A zil2Wyo6qK|tm)jXM#S(_gtoH1>SD)Ia{*dg`3?7ji5H;zf5E59>7fj``2&HaXFuC# zbi{GH2|Befk8)fArgj3C&gr1D=Z>q|PF#Mz?bAv9pD*Xn^9iM1ew#DU-DUIV!)Hy? zWLwsLiDi7KB|dlDIq#y+amVcFIpr}aZaR_087tKM^%Q<pf4Y5N+|qI8g_v56gYG*u z*F2)v&3~{q`e9_d$JHgHM4cTU-aEV`S^kcu&QS!P!fGh9b3H?D&tsS(j_P@Z!)ZZr zbSnmRKne{`m_q1lv?_Ap;-bW-EQh$SsymFMyMj14-3K%St&J7jRG0DPdvtGT$S(GC zFqpQ$cCcRlzSojn1k(j041Lp`{?7b-R)DEAOo<?XO!rmx@NDj!IUsGWo!^_rJ{7i` zB5zW$5TWW@`hybe%lQ=jp^vomA3dKSEyc*>wb^pS`!F@24$^M@kS_N{15^X8{sy2% zVQPNF2a%s9W#6A&(OuJ^skJe1G2q!1ZaHt)EaIqW6g73gB_i}dgXttT{r!3)FEk$4 zdfrs9mrCQaM}8$S1!!t_@l>{l#?f`wS$FZ3+bU~xHms_FOLtZXLGo#~doghG&^USd zRlny*O+^eq?I3fC-k9C1#jw)P_CZhvq02#$iO3an)gn+Ih9#_vTzb`g@KWi$Zc;yo za!tYa;fB3qoO#-^=Vo1B{H(l8*TLXNlCgn4szMN+boi9IO21yePS+U_dy-G3ooPs1 zLCp;n%cny%$_8tj##ehZMZ2cTzmzsbmQ0yvGW%X6U4fPcmP~bg)9>JWW%(;iNnr_x zw#@@Mn>BTe0r{$L8UX7u<j`cXcec{Dm(!6UkK>R}BGj=57szu;<R5O{cv$b*7_c8- zM$PGIGBeaX7y8Ym&z8w<;t!qV!*z5f{vGR2_vdvOoOWwIkTreY&LSJ5wbbF3G)s?( zj;QXIUAHq-0<}k9HjyvhP=Re1t13?2|Ezm_iAU}pQ&pSwpT2G((pr3mk&Zx4M`zwT zPzw|z<Otad85`-a@`Y{Ban%RsL$wvTpyM~_6ch-dFBvWPiQ2cGEsohPT=fMbdFR!L zI%;wy<zwp}K_v#(4Odg^smNA>drg{C0r;NfEjR|nhs+IqDJoD5;kD*0?W#%7u)caS z-!eXd!FK))!nNJTQ=d{`wV~)O%u!M`SwA;2ahP~?nkVMZMX)|i#^&@^534dYSR55N zf_HXY$IcsA`~wpQRU4u2&0vrU*k4l}+vRf|?#x}hx@k#fy1jPmb8KAJPko!ahdRY) zAisNjc?^U<wn1H60j<|(@#P3u2}l38UpMBlvsG-Y>`bL|(D1zEWa`l5zJ#c@G1N*p zJY&ad+)^ltq!?5d-u96uU+9f-7SUmwz(yXrG({?vZmP5@wXEjdo2kw&val_J3D%R! zJj?PMWHhy`U2(Ros~~*a9<#C7driMt3gaJwenddj^h?)W^*7&(bp6vI;)DzF{Uu<F zxa})r1m4&-SinijJ`0w$xKq@y;!{&J|5r0*W8<?Gi!MDJAZsRY*PfW*DlONXr~^@m ze})FEFvhFmPAOOoyJ#|k6#{mg-EF!YAu)yf$2V&^1x^$gkOSoq9uyh~UvNQI+Bz~s zN3S0{<<DPNJgE10ByL6sE6McOiQpDvi<ikRCaZ=7hfS}2)B7|rxMZ0V>;p*O!&Fs9 zdo--0!=EjxU^i<SKWn~fR77R;9a79c8QJl2w+{L2qw*Jf&8nNf*ze0ZQ{WX_(%qC9 z>+lC|Z=%;tGz|ahVD3jEQSf*QtRymA=wEsjD%*f7ghCzkZ!MZ9(C6+yJ+T&6j@K-L zh(L|upSbUAg+R?lr7~eZ6kU2#4cD&7OrO|i)i2<#+ptQ{F^gnvqUr9oL|!341&e*G z^as`-(yJvgUk)aiCXP~r)0m3`yt{dt+Y9it8SeSI87|UOMr%^F`W>E4U-t0tPG|3W zQ0lUsutXzvJK%G`$NZPrC&<V2EjXQHmk&$}KG9Us@a}k6F2vY{9SOhM4lR^l_v@CD zq}8sdZcK`P=nmd8Mx{L7l~CTz5;$c_&(`+tP22Ij32HQyplN{`8u~Rdx~D3;vb5es z4zg_fxR`u^Jra+-Mf)=0A@q!*y0N;ye4UydaxZs2J0SnU0r$u)Aw%d3wdC%oPB&Dq zqoxVyo!i#FlDQW3)%D{V*IUM$t1gCpTiKYZZ`d~X(h>6oTVT5a^@kSfaQ!}O`4yH% zYs+AFp6-T|{d+A+*iF01pAuravS0t0r4d`Ss=TlINKdL_ZqA{oo+B0xa|2fWG3Hfq zD%|P8ux<oPP=0U{zqHvG2#HXSRfj;5HRK?}T}yDRbWO~0ydA3p3k<dZ(qKRmpcap& zvONnJB4|}S+HzI!KH&G5cq8jxjxqS1%S!U|!=uVmK6?jK*y(c2J?HnRSnr<Y_0{H% z4_anSt11o{kRYDu=;rC5+Lw991veN63VdzT4fUPu?CGKB!f^6Vodw5GCZl<2Fd&Hy zo}m8?t^bSH`~MR6aFe>}{x(oL4EpeFN}VHG`H)hGF8~WnHww@RLCLN@zqd`n!k!(r z`>B}Y*|_xi;_w2fZQGvRC3ZaCdyYHr(t7vpL5B}kuC6+FKou*#b;-Y8w@$E7OGCk` z2MV(M>8QG`*P!sp>n2XYl^Vi<;24T~HU9~F3T(U~E{CuOHXMTINyNh-q|TWEZo2&0 z7j5LjyL6EiBWF<L>}66}KoyyJWv-{-Me$hpHjmZJK;tzt+?jte{H^tz>?5!5G$!vW z`~B0y$QQfF6J@`zvRc<sdgGEwRLsKNdhOMuqrlFXd2emZ%Ce!tj~An31ge(D?J^4o z@`7pWHdn6M(jQ@$Jw~+78hxJpyOz~gY<&^ZR?Mh!t(9$5eh#G2aym5^zELylr=?Ak zks{JjEGA06%2Uz**mL6v=^AU}zPe1PcIX~=E7raJ&83PFzHm{)hk*LSrke*keFL*1 zBgbB51ipEqLe$)Ssc_as6p^>CqHH(X)ZcE<TQvRsz*yH_vjb$K70CDOpQO9A<+-`q zdV}k6znN+Nxiq-3(52{HO@jBezdN9CQ!SaJSD{Pk)GhBdWP)h=7$XFh8(3h3HCf28 z1#HT;@$K>5LvE+cOrf~#ADsSiO?EC$@i+IV$;d_dx;LiF>I9m4HhH3DL*)m30|*h? zYcJ<)gt$75y6v_*O?p-i(=kilJRrZ97aAFwVF!IK6A>CpP>+rCFA)t$RY<fIbyvJ3 z=QXdmj=3Na4!}7(PCTla1_2If+xsaHO%O02i6S&dpfo@jgneg4ed_!3U04WvuJ6bE z<3N7GKJ7eRw^aES*JgQDQs2)fc1?OV%gxuWEI9Ti@=Q=W>+0&&a%d5jF&Zd3z|>ya zRa*xvjUiU1;f2%#`Z0xGT{(}uZUUR7J0yMTJd!5AfrzT%`Sf2$XuF$M-au-??8ahU z1rGRWL5VLHLZ7EQ(K;ivAx)1|2O%1}oD4n*<qmAhz{s|7L2s`1+SR+Qbh$2><G-8~ zOox!xE6`NO6;KH4+TsIpT97!@db<+5qPhickj}z%@F(puQ4CI(+i)qaq)yq+-YR!j zfLCu8!LnXif6a_e-3A?x8sbzSS)g>5l{uS{y!w&fUCfrE@C+C=Km&OGNXHgpvYNiH zw;9?5nBfD`GFVg=MC)~g{E-5lQjQZD@#J;wN3D;Zs?$t&DrjdupS>V-q}gc4#LqpM z%-c%~nBUF^H)nZP`|ph~nz|S+gY`x`nQ!i1WZAG!!>sCq>xj?O_U;2YHVU7Os;y&s zTGPjzvSgP$TQ%gkcGSxpB0RL!dG$7zt5TiX-a>~i?sCWSV~*`-zz3;|LZ5$teqdm} zyZ2_Kwl}xx+3M9zZrQS03Tj8yWXX8I28Qn|0VB%JJRo`Xrb<{MV`e>RQqlp>lYbl= zZ_d*CYwA070bPp>O=AYMGTdyKcN|Z-#+SFd@qla9og=^RCh>3o;Nb+`^=BW5b^pq{ zhUIEt2x_4?vHaPQDUi2D&K;+s2{TuTBtvTiKlH9KeN1)RyGKb+k^>i9n7(Tu?|4nj zE=x$E)X#sl*4R<%V!7^Hn`=^kcle)Sp)dGT=N)+aNLo9g{d~oKt#@(tkB949bYvAW zKh65}LapWzznwRej@KFXhc&|4&=CTM0ldQ6FSMpF)<=;8&)>$02F^<0D~P%yWb4>l z?d3Rg=$(^>@WBQyNV_cZ-`7%5^m7fvA%9>|WN0$Q50!0S-M8!+R|}hM>uY$;(x)n~ z<Jhu{={u9G2X8pMKdGXRozMPdU(bFWxbzpga%o#SXUDt-j)C!m1h4aBy2o9Pz_yto z)*-jq_eMkO4wzMsl|C79%C(o~L8vtI)xwUYmg7|WL<uz`<vEV!D`2#BamS0voI#W+ z%t66zzZ3_2`+yW6Eu?LDEZUIbv^^>k=$rlCG4`k1!JI<fhU9D&a_C5t#-PA3Fk!eZ zbB7V8(8gKkd~*PZY{{`u@0r=48a8|5qoF3{+g0tqjDv@5OIfYoV7m2PWJmbzRQ>Z+ zLmtuZGnmS!TCHMr4=2s?U3bc%*VXPmHfh#KqgXQV_FZD-X#$Z*TzMDl$o=>V6oP~h zf=4cdYcjxB0fHJ`4pJM?-5zXecRRlOqvheRVJ7Qd_g^`+TLm_duk#<gMZP|vNHkKo z(tW^Y{>Ce(?G{J7+{h}L_b56Gk2ZAy1qrSOuJR!#6fjQ*2_7xVQhXJ_2(R63`3(8@ z$G)@$D@M*X^Xw6Ox^9GQsO?~kd7uLegK)TZGkK1Otn@ku=<2*rM?sZTH3|v3U5b;6 zvFAX=)u0^q1)W+DdkV5I_sMIzVYhhGiH*8EGjt+>;gL*Tw<0ClzDMO@H;Sa))Y7Td zi~RRoIJP#Y)N!xcj|&Q|Vj=0W>D@yHn-$#sxxPK3CePE=cC@=WJ8;t9FVP*79xh#! zJ^$$H1nG}U7bYVmOSh$hG24q{(`18)WI98h(}hnPqd8J+Yd|r19i-W)PM19V0zZS0 zql#Kux@_XL?oV1buCFb;67{0-=#CTq<)ICSb5`rR99_Bk(ra`Jz2}^S{DZz5J1>OX zPTgeCpoOlbj_y5T<1q1oz4l`ez*QRWcd5W}3uA28suQz5IA=!u)F%?Zn*k#<=kROV z=V&La%dt5GOvJV+_WTuyX~@w5Lyi{<bWm8<4sH^97c-)C0y3pE^ghHx+@lwNs7&g8 zbzXhDo{vh?tP8=nc&0X=F6sDOdfonFzva27qtJoX%`{(aE93~1j2dj<!PMAS2WNfB z^M+SKgv3L(r{R=`HjcsmgDT6{+jf*JYD1xEK>x!;DkHSMvS<?D_QXL0xgbbo*|1~m z1%Wh?%95Q8N^G7F(JqHYkCKxTKpZ3^mBFCMX`^YBkUe?L;&yA>5&J1F4Z4n5&+SIt zmKH2?AkJ2uQ%R_PKVeoi#Qdsp#>BtErTSsTVy@Z`13UXWxiV)gc4Ct6E%xIrpISV> z8Snyt)GLqbuCp7$<9{e8nqKXBeqHv8(H}quX0u~L*!W-t`b&H;p00-Pk!4lM@K@r7 zH0jT9^=ncJ{GwR(ySn8l#kd{|Rsx=X9v96i({h?hiNHlGP}FhJnX_KMv@E;Eyo8G; zQa<B`Wavdw^;0!Bk!G{b;_DZ%Hsat4@)>Y1hU{8xpuLCW!4vcF!Lsxf_~2>u2YBEi zR}s<u8;W#$e0>CZq7yt(0bQtJ$eNJiKgUI<!xN`bEOF8Dl<m0aTzKMbuk9fP()IEw zaZ@PgaXs@`8%w(z+0yk2tO2Qd9oE2c#|vXPho`h=cp?Ly$iN57(|^MU6Y1OVJ@Tvs zS^j3+&~$nduHF-#IOqsZl&5^d^*FFv>2Dfqr0SJZ;-<pk@IBV7DjfeHf6t~#!d6S3 zDXU7F4=;(Q+e_8s;U#rh+W2}Vx+BC0dr95nx-hvz2H}x|_#PZx0pBA>-;M84XH5|J z4!9w8`Xs(S86H`q3y+bmmtvVn)hn?~r0b1S;-(>5w(t??fm7gtBAX9QqgPAUYrq4o z5h2@v6@agwhdnU<9z7Qx=nfBb$M=xvZTKE3`W$>PA>5LnMt_M5)}gE6f)(jARcG<9 zGRtrk1j<`n1)g$5^b&?Cqa;cot_O+-N4~NC*wsCSavj%W$(q3Puj8V*JS`{8=46T| zu1ENatzFYU!V}BjiDmd60(}#{M}hthA3Oz~s6k(W3s$8+zy(i-C;Gz^J8>1#lxw&O zBIOHC_!i`pQQ<St6P@6RWQs*(C9@pIBvOWPLnM09-})7M$NJpqtKf+RHGPc?S$+g= zNSn^XOT*)o>A&HEwZhY^&d^!+cInFS^;4`|Rf5`x^e@8RbniJKg`Z|gP^2Hg1#3)8 zv(`!(5E_`CxmOPNr;5sS%*#dBdLdU#QB{>Au>52cQu)Te|J*gH8*jkDQ}_`6<Mn3i z>FH5%-Y7xA!G9<uFYnZ{GB&O#ZZh3TsSa){oD*<pTW~?XMTB5a`>s-f^6k0nDqbEz zF2ZPS$S9lx67I5-QYSyfZv2GMpBUe@q{X~6@&7QXQJ1Gk3tc+eeNZcTBK9=(3mh77 z@z~L^y!PXshZ~45uDW(3J#`g0aEVK|I;sMJ|LV8J9jR38)Jiq#K)>pwupw2^P66fQ zw1M4<z7hRANDlmVV!j{RF%rTm_q&1tho_A9YrOQYsnLYYvtE94Lf$4n^S)VL$2jZG z_?LGB{DlFqOnT=BM^w|$-ZmScF#z<TCNi)b<$5T+2jpyItcEA8UUWYBe#Ro~XoCE0 zsJ78X8-b;jO5$Wez0dckMdw(^HO%>vfHFFQ1DEVbj_O`W38*AP>MvkJpzT;rw*S_1 zXK^$v8j+{VDun*ncDqm2;No0ZPG8m^q9u*#zdtq03ih<=y*)`a4{l8}Qa|^!QaO=^ z(kNK{ZaZd~p%Q0bx4z3!LauGcjMH5;uy@g97}3V>ntYX>_HOY8%$!P&-Z)&1x-95I ztO1aT@NpM=)RCf%mWn8(p#YDT4kG+2{F1V<vs!m~j_xv^iS>t#k1Af(<SYN;L;6uE zenexiui*$>xKa3)iSEH^;|A&T6wz@(`Rm8?beHV5g>hgFD_@mz9SXoU&e{?(%eQp# zE&`03E77k6jG-WSm5Vbb@qze77Dzp@86n8;geB}pb<W1E9_?er0*`CuL?DIJJEa3q z)P;v1sRMXeg0W?fUP*3;61Df{lacK+jxWnV1JF7ubYx%8UbDufqce+q{mOiLQ{6nw zjM_lc<0tx03D>WWMdW4XkGFIRLx?1vQPQf4m$}K~?~MqM7PJvzO+(^Mi3Q9Efw|}Q z$oNHLNKdmpcs9fcfkH$1Co8ZC7Mw9x2A@OU*jbJl?-LGbCoQKnAN?18Rsg)d_O02Q z7b?phZ|T3&i_Qtru|2fHrxhKOLLOV6CFQa>!K?Sa`_Rn8M&_Z-qOovqjXkW*CvT)1 z!$C`NG5x1UU*y7Kn?o>ySL@5(ytXQ{l)NhDtUv4>+5HvD)1Ttq1%F$5?KprpNWtZq zB!{i~6Yw(yB7ekMCkFwl3}HbR1@-}StttHD=#KDh&qfwKJyYm)T<~q?q=x*8hwl%y zNPAY9zx{%i)P<W8mHmEm&m+9uJgGK4{$Yqf=U(L0%NfayockY3{l>boD=sCS^P5xI zv*L<fzMXrX>*b8=E*a0p$>+8*4`JhP4;Ea3;b61{aWz`yE_|(ROP1*l5skngE?7`( zFR&e0jYGFeQD2*!f8RHu<D^?#9%hFR!4SsZ+N0_);q&9E(Gozie>}GL7%N{XY_@Pt zNIyDF{vp1PbOjAeL$lpGvXsHkysRxNM)SwwWqnPCo<WNrW;MGE$9U;$S#JA<Dhh(c zeG7f3k(|g~#Tg5)Bh0{N2Ydzk0P4xJga2~6MBU5*4Lg25)o%gWBGYrN^8r1#BB>=X zKOr$G{#9k`;Y*`0GkR8Lkr(eflUIR7tR5qp_9Qf&_o{b{=z6Bc+B9H&Jo@K}0!A2| zw9{~o3>r3J0i*uuP4Uw5*Ia+VR20o(%x3KjeK}4yaoF7Tzfa!(%xIX!K>;~Ia9d5R z+KDC1(55~5o~d?lVYF3j=?}Y0-HP!}u6~+Kzcb#3r487~+`-&5j;ZmwJ_Dw{O5Ej; z(ZLsP{q=c}OhaXLplnNG*?*+B_R{>M-C*|#^4WN_w$LyqQy!!&BgmVS!7KmGsr*MT z;g>Q{)vvXs3Ef1KYB0FurRlRpZlau8$47Zj<b&YIj#Hqi=h~{V!525G-qq~^eny|B z{XXPYjww6sQCVjm`Uu6>`i6&ppgkVxx9Dolwxm^0Tpc3UJ1%-9CeVxT*jc^~k3)a{ z{}Ph2<_YD$N2rn{cp1$X%8Lnkl7uHof>(z~ejlb44ksxuN$}ccDQy3^guIOCcgc^b zR{t(9N$~nbBrj<nar-2R2Y6Wv+b{Z#K9U5l_agc4=`U%|Tdu<Pzo(C+J)_)&@?t`# z*q&njCGAOe|L6AQB;Hf}>nV}EBw<c0FKJJgMZ)$^NZ3a#FKN#Kk-X&lisdEknYZ}g z+n2Pb*%G1r_w<*v=UtJ!`0Qn|J;nM++B1Hsu>DgK?<@J9VtbO83FRfngIHeDp1+FZ zkA0_o@xK$d@9!aOUvfT*<s|JnE|UMAK9cq<_7t`+*<a%JCGEM;ODHdCPqDnDJ-bEn zlJiSU5Es9v_;Z{0Kig9*FKJH|AECVD_!P^F^%A##StS2GeI)I<#`oXbm$c`YNM5qP z#QI3u^QfP&eKFx!JU+$plJ;D<{GaV3CVY$EQ~Y^fBriFh#qyH&j9VdWUs8XuyreyK zR{p!Zq&@3I^54@((w>q2!uCr<FC?~)SRYAyDy$O9OZrc-yrezPiR2~Qm()XSPnXrg z_9gWf%S+nxg-Bk~o?>}PduFc@wl6s!#qyH&WCZ-XKH`5T*8jFh{(JgJ+H>m<!uIz` z*k9bPq&+p)3gtIS$cqWzVm-v4=S1=`-;tNJr)HqA{qM<3+A~KaFDAr`$B$SaNqbKI z5ABQJTde<P_P^^RY0qwv{Oa$#ucSSFgM{s$5XnovpI8s^=Px4ppT8q7X;1H9Vf*3@ z9+K@#{yrg+7mpvwzn8S<sSsiNCEw9U(w^e?_}|a}Hx@wIjoBUnst<Sy0TV`D+s3>< z%?1+OC=)iqgs>LC9&z@M<^3BeY10(CfSi7IgVB5hp|DnX4lT=`Kb`P&tXHr%d2OqM z&qO)9IV+HNOIX6vulDI{@-1=CM0r+cEd6EzWIlB%F_tdrv$xu>L)8TNm77RA?SYpb z<~ovyWX}{oD;jdX`1kx!2<EhoUS(5SwHzH4#h8v{`UQC;<cHn<S>0H>BYUY(nqY>* z*~O*WPFZs`9tf)t>r8p*yx>*&zud@!>``xu8=Nc}^}_XfH(+@oGyN7Y{noXga5-W_ zE~<(f4to4C2ucLT7wl*sRA%iN>7flV-L^{r6N7}>907}+a%oi!dypa{o$3UYAO%M{ z)fRXW089X9aLtx)LSUk~a^)dLh~WL=z!$u!ziv)cF5B20r0)NIjA%Q!C0_aGrya+# zf1xSu*I($I`7uAaHmBirSc{HdneW@vOJC&3pF8w%|F?8YdymaApGsT~+U%VAiSFz- z@kQM?&0G;#%~UBB!^T8bN*3T5FL%)k9M$2AYB3}sO#V5dw7;`FvF?g?9hSCJmj`3T zyfm*}x-N%Ft71f8*{~}~9@ys>b)V;y0zNVCjj%#QJbnNbrraY(LL%>$F3A;VwFzCq ztQIyM7<nJJ&q*#{<h}^0C#v|+NE{tDA`KlN2rhfPO6Hv1+J=}pSg~i&t_Gc#U{?bY zjtE@@)RF6`e(t%|JRjj>n>S<u_+iqtW7DZJuNUc>Ta!jHFe1yO0VUHL*ZqDnaxcIv zaJmEn=_2k71pyLaP2;K*=Zy9oVJ1%~zc(tH<$Vf<D-6y{Jo`{MgM@a9SfaK>cqGam zA$)@19lG3`IZztEe)Oo>kyYVBfH7^_&VPUz1;A{pOg<Gi*yoaPvWbb%4>)Jo*l7K| zb$LTVr1?rkn~!iDuCQ=9GRJP5R0(HWXODh@0xDdCe3m+SiV9<v_O0)Uk^L;M6V8W( zlc(hV5SZ7V>k%*AxoHVTGALF24?rO;rvMg9OXhSbZ0BIuC)U*jWIPfApQtSKW7Xie zeC9&ZBA*k@d0{tqBjhAfEd*xelE3hXD{7*F@E`;!vQHzEM>e_>+H^sP_O#kCP5e>4 zEx?jz3%E{M%u5Uc6wTrX*~vzEi;rN_R|JxE(&e;1;c5wjL0MwWz=Rv6wL~6+6B95b z3bgt=q>_>d$I6^spS2kvqq5j8L~%by+F+D476KS>{>jl2R=#I-<vgFKI}mJJbto7! z+rHKI07!WwR2MpY)khy8dgkiTK>iU<mqE7|8=5`qlcphjS^Gq?#grvMe;<w5Fax^U znIKBS+XNVn0N5frAR2rahC2wgM?`@6t0Xx9RE<BwvlKL}?Z%<zj*M_|1-i&O;e*RZ z1D9P3Jm<!}EIJuSId245SveStV`}sG2by_I{0K(QFt)6I?lUoXleSX<K+tCz!-6!U z?K<`C8Rn{hw|SL^SuXu1n6lVN!qs$<J^&q1D#*eDP9C&MgL6=XK&YIILJ*wKlLi%z zQMHKe5k-fNS|<9u^V8G3H2VA?25qi66>~%Sc5A%cY}ji<dSH2_MQzHRr{mezopJ>q zFR;m)m(riqIfMw_j9>E^W7qqwa&XxXyJlQ3pALsD8^1oOdtQ2j{gr6#Y@R8!ov>Iy z;>bHfA%MVrGk{-ky6sy)CFRl`0aP&t5WX!JpCYf$0ccxnNmS6KB}}?p-y7`iA>cEG z-nY@yvcU{ywEu_BB^@s_BNkLqxGLniO_6ti9t!2!d0$K6AXfcGk*S|NRI&M_J=$A* zc~RDOdxReczj5N`%rsp*8yU~@?sT4p`&Xo;=>fbKoNZ*blwyL-fbbcPQ3}r#{T$HX zlR|?}zJP)Li>8MOtAH-29FFI7@t$lZ+52M4$38=|9%DEO?*fn>P?cFYpYNP<I+w2p z%ZEXWs3yccMMlJp4}C6|5}NW}JXkh#^C&TS!s6{ogp6m+yHZV-&Du9hG^W3f5FXgL zVFyc^9&ijoMJ#``@Uro_7LJs{-mwc^W^%Pip(?uQFAx$hi!gXKq_E}Fq`DcI-MK~@ z7BBmLW~$8c0kCm85QnUzPx)q--h{O*Y0+xtgx6`uz58I9g$Wb7XPjXGho2`r!9e5D z_YH8Mc1C-y?hUN4(Yc3|P<FRuQHv0xSLjjK+2fv#b#*+(c%Z?^gW81*_gXc*>G9<I zg_M!4TvV`!QU<51HeLQR^f>IZ!^7z!!m>SOOV{X>n=^Q-h;Q*Rm-=iGbm*-E$yj0I zDPIKS9;IFdj5D+z&Go017biw^(fPcSFPs(v1EPq)v>mQBp7#upXCORW)V-C^-x1No z7kc*FJ<_||mv|y2bww^4Iub<&^I<a)>B}yluX)2aqcm1$xnrkj!Gr3?U&SK8Y=eE5 z2(j$dA62F!Rl0z20jt0Ne4m7&TUoRROCK8n`Qjp7e}Pd<pOM#zdNdBSt?rDEAfGJV zQm3N&Q-as=W;4L%fo{;;gl;4Vt?$gRg&uEwJ#Afzs}`oB2=Nw`r|^p?tzEn5!-&W7 z&nvSWXWA1>?VC&?3*>}SU5YM@F*q*YFT_26W*tUjux9+&$Urm(;|sq~h3a8m@Jjxr zM6ArV+KjgV?Z}9ECD^OJ*?#F4dvJoGF7t-~t=BsVb*pPN?m!3oi{${N7Vs)H6uG(V zi-J^#IAFHa=*kg*;Q;(fjPf820;X5Lwb@$OY#tAt>Ib_9hORB}yK>A?YxQ#%<X&>l zJ|Ck|heqNLo_X3H(sY)1WpR$ox=o2a#TD$2h|4hTaqajSKkK?~pVi4jo8x+WKb{LF z$CLJ7zEJ_ER(*aS_{`Y?k8D<|@mD;97P78h8mkLJK$a5B*Z-_+3~cUTOEPXw!1bte z^=Pkx4VibJ>WW2|?!jO%rvuvJVxFQ1rN2R)3K7aoxUB^hWT^8KvFHbx%oOTdAB3gB z)-U$3(IMO^smlpUGkY8-q~kjmRUqvY<E3!mUT7RP6!sh${NI4L^3kjxci$2m;&g0k zAf+6MKxbj}3xAX&*u_ouj2@1Lp<-;w+pHnvd->grJGC@j{ig}E(mE)#uF=2(uF@in z_j4ofoDzi(%!xr%ZnU`iM~)`}MkjguWae7eR6S4Br{E|*R+Fb4zVv!z7{C7H(yc-j zIe03ri@X3T%W?!8mZv<2U7om(G5Szh2Gg(;t3WD%mV-|47QnAUUwo>4yE)*E1~`t) zhdEP`YrYV??3a9C$z)qTv;I(*88~TmaSWApJeyNJ!V`SjkKax~ekSubvQYX~c@q{v zY}TnZ*V%Eb#lj2)K1lju1_zjJe@qaMeO+er_lY6fO+=HI;Y9JvrOQb*N>p*9QfScY zSiJ}<==x^rE?KzSR#sS`J+xvTZ$8Xls;Jja?4k-Qw6hw4)deLZ^JO$8m>}g?Wu?s| zo{2iO?m(~uPbuuDdu4lLup#3rO+(BqTD`*vWhrc>L!B(NAOXA|GEIEZq;2GLaB^oO z;k-1i0#Hxs{W#!9k`|yf1~VT=p&V*y$M3Y3`5gD)a)h<qzTcaGn4}n(lewZ%+Cax> zxc;?uFE;X)n)e<-5p7x3sMmkwL&pvb)tMPj`rp+Fg~7Nu*B&e7_O^xg*P#+v-30M# z=2D=_{6|Ml3HWEaT=<&X$}$f3R<`4Rf+MvEHZ|Y}K~b>YmJ~!V5G%?C_viZ7==d^6 zpk|@90|J@cS2;!&_G7^wzaAQ5?OHHR=gL-#ch2EZo(d8Oqw9|ev-*LDhOwn8FKC6s z%4n86pt0M0ZNXrR)_W-GhtQxXi4Djn(Ua2I1xxaggDIT`Y+PQqR_QS*0cFP5MYDe* zXP1rYlDxgWJ9KByp1r_X7XAYEL_7J>Dg0{Q(;q&}IJh#=N5`*?A6foKE*8ePdF|*) z-(FtyVm2Iq7W39-`6GSDpB^|&0HOEXLD`*@4wX&hZ#l9*zjgA{V0LT$5;%K5oQJn^ zljkj+8SWcIrCeIQjKvOQH`4>D=<GworNVHafFbZyE&QC&rYJK7&D-*nDatZIunr4< zMvE_+)cz@2o33}9JsFwb0V{y8l-qNys3${qNz&Nyd+y$+Y7GPZGEEwYP;wO0)8;;b zECWjB!o_9u{b?&K13vDUh3bL@%1$)oy+!1-jMvk=$`*$q6~%M{NSP2TF7G#4uH&oZ z^{0uI5!Zl(0f8wDMU0mLv^nsQ3(3&ISnQfOIHtbJ(xt6{vuDc6P4&K6#P#=9VPv`I zBSpTz%_zp}YjRa`=dOdi9AyK3g-tyB9HUu>ARn$8^E%#kduLYQ<mju`^xL^AzSY1V zaMb^dsD{(glsnP0@3c)x%OW@k6L{G|RwOwb50qwLKDNVDePyFMO(ym*(NP{w6huTw zHk%%a2fr<23an=tM|mo2j0IX`!Gn;3eBdVRS*Xv`n*vz{Y;LWgxp^ea521c=X(^a; ze=TVq@DJNx023Ch4NF)k_`4H~LETd4LsP#Ur|Q`5P^=CaWj7qAB|R!Xd3d}X7`1aE zVvqyy?A3a$v}jmlN(i;OXk5_Y6Hc8|@=Mo;WH>*7UioSZ4dw-6w&+$y=|Ttt1ekbi zsz3mXMgqjF=^w0la1^rfgBfN_NEajATT+45(p#+mo=deK4TWrPm0F!v$jm;KxxxiI zrq9XcR@S`cpUIHtEn2rMB(7vxhFcLLYN$USXr4-IT-sz530G0o?q8$$epTSo%R(iA zSPW|l@+x?afKI)4r)1b6@F>0Mr&v*B1*r(ilm*1+Fx#=t2i@&hkRo@^^@WVx@N~)7 z1IL_Pp&mXb05sIvzTy{^4OpOjYE{;Dlm*RzeN>PDFWiq@Q?pUz=Y%FG`@8?Ruji2Z zo2olqHRS8y4#F3FLYecD;2x(;bJd^rj^O5!-MC20zi<X2)#g4RNyCI*TsW%ZOUu=& z|2GN_`1SIjFT4<I9u~TF{9MmqLr&*gJt23W3JGaBxMGN3TURq%7lG?=`E9xrxc01l z7=zQ;GJZM@t>h;k<d%AHXPRCWCV=FNspKiJPBtlE=+QdrH=gHWH{@$fl{jol#*i?W zA&klOO{Xi8+6H5Ye?>E50w>Um2aT@2Z<`3;Ry1<Z$O|1Z`~HpB3l8DO$VBBc8!PJU zFC_KSqHol_{o$t=mt5UnlbZZ?^YcR=7$S1X`G?j!+e>c!qJFYvN<`(I@x2J1S^Hjl zXM0H0v!X0Je!kzgN!fME7DLEv#!4t@uR&-GwvI8lGYc#Uj$Vo2TD}tm{ug*=MX)d{ zFV%G7%z(UHNUJb>d5ennVum~YrYf=;MjIyceuObrV1E2THdqZ7N824kFY6A{hO5bl z)sZ)SH19Mk{|;~)T_TofApLY98A16HN;Saqao*jsR!-YK*_(427X)bs<VK*UFbjbn z7=NWuL7OVSHl94xgE_aHiz>*YT`U<t&;#Ji+I8h9Gj}Llm2|=%t+>-y88+CM#ynOL z60Qt;YctT|*Z;{U;X^~7;`50QZX>>Ey%AD0nh5rnn%4P}?l{bwyX+67rI@+YDZLTF z3)(COSHqDJ$cB|~QWg=HMD!)h#8D~JKxst5LFnbvY$N0>PO8u_#!}Y2v6w74x5Mwu z&}}UL=)8v}5a=7O?}ETh(iIG!!2l$%nZ{OM7=<qmI@s?ts)jW0h&G08cDY}C?{q{# zng@B^k2xO4|2{)70tx8Ec1;R_k3nDfBJI7HaU;|WBja%B(A7>qDLEnBtxv=Nd2^qN zJm1M1D0WX6i{2!PY+Y|rQP1k@-v6KraqSL-umW7N!+{5w6Xd|udITO#orkzz0Dp9} zXTa{>8Fiz?>f&V>iB3_o76<2k(7-keYQWnOxLKT{Z?Dfd<~=}ttbkH_fhc4UpmZJw z6NS-<AMg6OGUrf>%1o2aDD311*~Nt~XT$bVI0dq4VOWYTVi>MJjyAO`*iHP>B8|52 z1XU)mYdO=f18KcW>W51{)WTt_Lv<g@V57yHZJo!_rn&_qgLd|4gV4ZhS`*cS`QOIl zOc;8$Z8CzfT#IK?D9pK-0^2#|E~2;^8;NPFggG>a0qL>+O`ik=<Q8IS<#5;OqF@vS zCK<twiy@En+4G8ujn+CFx<C$(&7HafCfM3$En@7vGj0GzK5Tpb^K?Hmw7y_-Q58Rx z?6O#hfBi}SD!syRDG=!}UxS0V#!d?3rAagTOev5rIjjUqxK1sb5hn-dC8ND^h$AlZ zyl3EDtHs6PQ=TpUXbZ~Z=NEzH!LeWWl)<4Xb`{m0%N6+fCm(PF!ZIk{o4yJh;6&Z{ zRpmt0H?@CuW0H@`ss<ZRS0t@p9#68myvg&1P8xp!@Ym|o9*vVd?#^8Y2UEA|DzR8_ zc&-CPD^|N^6iiRmaJ-DnlnS2b?|>Wb{nRhv)#Eb;_oGebE<9!RO}24Ef=cI1*xwTn z2S=gaNKy0+o-B?vkYBN}Gn%!*0NW`T+53(BYS0i;?+|`@Ob8#}BSZHP5t(VM2_j$P z>>pJBlmcn~4q#2wyWcj_rR%4%&Pvs5vd&7^+oZ&iDKuP<J}W_re|r>I)4Sp<c{Z#G z9KRShBu`J0u2-G1i8PP(6jyJ`dWx^Nn}WFL3w38gZ6IV8JaHDjXDa=ikjNa2-yr4+ z3yI7#RjH#_nFY8CS;`Y3k$Fg#UM*E`nKCLQs8<7rS@+q0$m4lk|D8NOBX6TU?upV& zb$DVwJh30&GlgX<A~HWH-xku250<9S7cta*QZvqLamf@SE?SP_hKnZsjgLJ<rcd@R zxjLo|uVl+I!PV=rOz`#QtSTw~L);L7{!9uP#8ayIr8MqBx4}+$pg;RXWBK11{hflx zy9Ba)yOd5MUpmD~hQCO5O7$j^5vveiFCv$>cD+Jxq0MT=2<km<SI4Hp6aPl&XC>hH zxwxSzbbIM~^_0=Y*b^5%)&rJuNM{f{@$Za&tJ=rAbP0Ur6f0T2MT!*>o~Q~>{9B)} zOuAqFy<dCbiE8k~`K$q4{T$W+zTTddAkBY|8<M6cN!6RBj4s2TnCLuR8H?-(Uw z^z%}sHdykM=;v@fnj3%^&gPugs#BpC;p<})6kk$DuZVhcG=Fmvi=*X)*?6f`Z6TTU zNl4Dmt6wC8!@bkOkX_yNwHW^qg~DV#r%;pTJhCEJAZ@jZVIp@|dDJ1LiNe%?IVq4f zF`<&=ZBqCoMQ#ugZBFMOR6$|8kC-9&(dQnVaK9`=KVr_)%HbQYc4nCY`1ms89izcG z@fnpd!_^)`g|Ou4k+;_AxKkd-9QRJ~wMT?o0ArG_M4c==8CSA!zgy9T5%Q$wic-wC zd7!+q-Hb{SalZx38&3}whO}989rv+IN{^)0yrhwI1lF$#BY+wXJ6ZvOAxMR^F(Vj* zK*DV~nlp7d)D{>*E@<Ho;f1k|2RFn|40#5G4U{j*%uQ-El&|)`(DQC)NKGDxsAKee z-0P(4Q38fIgtT=hHY)zO+q!$7VbPB6{g%&wHhhFOf8xT*(7BIJH{e(Gsl~nacfr=O z^(ZFoqJ<Fs{TU`Eakx4FiSitrLYwYT6dmR;I*qAnglhOp^w)eo-w=qpa9oSOKPA7y z(DrMlk6FX?Fj!=<)qkgg?PuT6oVXXe3V(LY+A#HL)RO$-gDMbjDzC~~f^EK<nc%BA z?gi1QCRB6Ka(vect;SbJ4q-cXjDhhv?AB*Ar}DH1mw4Fr<!jm_!OFKt(_hwOSfv># zVPax3ih~|z`%l6!P`}dTGDys49{A0$AV^SNUfv}QIw~p-?Leb|4R@${MotTkysN8g z(2EripN?rXGF}~Ym|W#Gy#ZzAGtaBohs+v+O=Eso31m8Pb)fwgxDE%4;I+0lK4${{ z+h{`*7I^fptRn5e(52EPi*s`(P3&=l-OKkPB}>;!A#(=$GpM_x`2}k5hh$&FnOMc3 ze!^KA_EPGY9*S=hC}_nUz&ksK!ke&?y59z>yf<n@hv{c}XW3}Zg<F3SUE0o}D&a)` za7&8rX4`v4Iy-s}&9PG`tOU;P%B(|V1K-)}gWoxEXKGmAhdVmf1CkSi)m4(3R~Wr( zhSbjqVEE#FOIN%mP=EDw5bC!OiiquBGzj@O;`jnG!-U#*p^FH(fx4PAuZ$XMJEBK$ zXktTm=P`MFuqJy@-?n>1`Q=xtn#zp%8;$I4c+J#5?Rs?jPLy>>A9W=C5MMRelGiwc z=XUr~+_77S<N+*w(U|bY$uYR@z@1jOf=_d++RxNyM*e&JW4n&flB{En6$Y2(4Y54- z>*_xPT<$>6^h+FcNDCkdQ)}5OyzW?QJ#!K-{l#9`S&{u@8g+7)Zruf0#r*X2^dIik z*5-B3L&I=${xB`Bt9O~(VeVpxNhU9Q_|2D7G<ZP##Gi8UVeXkMr&RM_b!_&-@hvuQ z%|1b3^82&U39dro%ddNf$qSvYd|YxereWIWmht)|cH`Z**{j1cRXLC21@@)&u;Qwp zv-uSnO*cyl87+CAqced?wM_{cqmqDLT>>dwc3A;IBg`LMQ7DW=J-rL->Uh&&exS47 zn&O##DllU7UxUfe<is%imy5SkoQe_J8|e`;S^^tBizhzZZ~#=gDcl|tKlCLJK}#mN zL5tcBb2ba(JaY4F0JCPF@Y5J>NHdC6`+1Zw54X}1Q%~d({TXrln9e*f%oqdaaV<-h z`$!{{KcULyH(>)Vj?U%tn`#wciU5RFvF@ZW$9S<V3-dNnJ+1p~o=-6PPf*SEpqdcw z(4h7H((60D2OWC!8<ed-h(`a5R5IqAZu>yM)RR3=6jcq+Xpoo9s`3eLGSqJjU26E6 zwwdhzL^SBI)MEe-T`J2ZGaD(ZMlTB3m5zhw%$nw!n9SlgRp!VlDr)S+LeWtMxF487 z+iG7Mt~jcKIf~Qyrz7v^9beX#>ChIyzoD{HC4mvvw#Tff$al+YC?iNO{#_;P+?CRW z`%#(rQN`(w&!sM|M+#rl8<*;lr&|eTTx)7uyPYcJEw&ZwJayH>*^U?>bDWUX5+6 z6fG=fnsDa?hX~$|N^%`z@&=SPvHcnRFT~z7fM=X}1&w*S7r@8hk#p%{I)~<T!^(1L zlj~z_m&Pv{?w*%k=XL0RrWyeXzj9x}#jfnlN&Z8Rcg+Rv{S@DG#rpe^nj-(lG9E5n zmXV|L-Ynm~V7QrM0A;hW7rDuAzPbxcx6iGNKzc4c^s!e;W->rXU!My1GN*KB@cF0F z8ptIwn(Ek8bLX>fRPR-m3u<d?6&RsSl)60KU~}y6k?;2J#81>cbZCAsTX^Td<FZb) z9X{pVijx`k1USgVaBW;K*DOhH`)tMFfINCb+<9|2#)itshkgI(j6b41CSSkeiXJ)? zp1ymr^s-Csu0+C1Mn#R1ahjE0?G$F)?n(UI8EcxwJa+K`@W&7QpYej0gbL+LMYI@6 z-jO8F>a9p#%v+O`pCLi7$q5s-FXr8e<sr)W$3OJ&f7VAVFUhmIE|NbbYE80z@$ce? zu_J`-mwiWGl4nJT6v~Tv(c<>R`bhGu&WYqtey4p&d+y&VY=6&p<R$I7V4G0BL_%K7 zyB2-@*GH>J{=|3WCGEL!`#-<0xP9^aiyuIV6Us}zuUKBvo|i@PXTGD4q&)+62-`pV z9eGK6%Ek-jCG9EJN79}ZB6&%DB;Ql~zD_%Z?TdNfVma}rq&){j@}&~`i{-_i;`VbB zgzcAnM_$sNjKqJ}N7A0{B6-RF66+yp&)6hk`{IMe#d6|LNqbV0|5+a~?^^tx;?D|^ zyd+OjEH7zKzZ7Bn-;)>XC2oIGBrn-t;`Sx&S(YknU(!Bec}aUN;Rxl+B>GD%FKN#~ zk^F)0$V=L@fGccY%)1u9pO|+oeoyh|oHU_)kwp7qc}aUVh~y>xgIHetJaPLW>B9CU z{h?T1(w;<~P+roWVtGk>o)gJ`PajEp`eq2*-}RmMm9*z)k^E5!c}YFQ@0-RKwlBW; zL2M7PyreyKGllY!{Uw$c|9f%!*F^H)(?`;tK3T%{OTMGO_&MVC-;3lW?Jt&-v}bO% zu>IZNX<yQwb90395fbuZ9=KQ!@n@|_e$#j4CGBanOW1z+cjP7Qc~B%D_8oaidw%^N z+85hNtbg!sp*|bF)4rrVn?&;d-;tNJr$w%?{Y;TOS^VJdd}fQ}bG{=lX-}0rVf*6# zm26+qo+bZ-yrezn<o|p7lJ=|;$!C2>e@T0O#|;1b_5Y;>gjp?CEDdB(?VUs84nW>G z!5qNJ>s?4x=I)5c{f-b6sy(lXDsK+SVkce=QlTKg(a`OMuTHC|>v?(~TiS@o6edQM zi5}5zP+NRAhA~7Y$Kn=M0K7s=+Ce=PdDr0d?Gy|3;6A5AK#u?r#iv~3^l0c!b(`wu znMMF@MLQiu1-3nRk9R?G`7EyZgz>J2tD=Qyg_I-IB6p<8^IX(u5f?zGczO)q@!fpt z`)4<zg7O<oV)y_-Vdv?cZyKl}PbRE8;r~-kSJ%*W>1%GzE#sH`;DR`pbzd8jdbyh) zZ2q+c&KKSA_K9m)MoMDH?{X8;MB0VqPCGrH2e*G|k(EC8%hrcya=MwX&z|qx;MW;y zcOlo^YSlM<bFwW~<|_r1Ob9*lZdUR~=kkUwvi&=y`TQ0D@cOUXXMl<UeR{!4ii)e? zdqC2tY6&<`tFO)@=oA8d{Rl)jo4R;4um@~AULI0Ug8lr)-=22n`f6JbH>*G2GSeuY zw4?W^;!wBKeoYN)#JKnNx=?&dNP2i=(jaZURdX&Bw%`!H^D1YJ;0OrICEI^rxAGP` zwW?w<rr+kM$U(gJj5#ri#Nys$0x^TYBxdsEREb1oBCV-g>0o6V*V)<G$;pY!CAYVC zMO%HzY-G6X%_&s);Ws$7X`u7&HII&0S$O?M07Y|r?p2nwJ)F^C=`aBhSm@zkc!>@3 z=Z4#d_HF6Ea1yQpzcD$$Y=DC*hXsbWmS-(7U)^^}Beu?T`_o~TK-u87U)9+=Q>?5| zud24`E`jr^Qn~{N5st_eFr51+b?Nf*z2Cg&9~VUF+@LxgoCX+yfUNJ-s4MTJU>!=U z0idiiB(C<!vnHO=xn`q&i;|#*!v}UxyL?<_`@VZi<GKcCHYnaGjB3%i9+qWlGs*v= zLbM%fm5*J!L;vTAJm2;RR$bOS)9FXPB`qCtxoJ7wVI<tB?Mm049AmgD7tv4KIOGF! z6WZ?R-057SDuPuIETyGRk|%I!pa3ObH(MRyw~U{!m+Y|D(F+l<I|191m**t{Y7wlo zqsa-qkg|tE>7(rKU2qB&oYZNJ3d)ygb^AQSTgQ6VQR*yIf4aABkdaebqwlhK9~|3p zgtlVhj`kSFywJdkXce>@QS@H-w#@ZPPP-PE&5e6;G;JTi;-ewA9H4BvCyux)BH#)( zjXgwgJ;2-wY*dXT*HFJS7N&OL<vhNnvP$S0<{_=t%4jxQ0o4l5e87d|F!#d$m4L!@ zY-nFkKG1(3*T-hbV=S6hVllAe(qaxATfA%M8{4Qyi@bZXHf*X4Yaw4pmFb}#Cs$yX z9(#+G%O^hj2+OB24zM>RQ*%3<XwZWIaf~dJ{B;ZASCbVO8K=}kDhL=P$*p5>=2LfV z`c51i`aP*ZjcsLeQ@irIq*EwzH~y@(lZtI)7K64Fl6{JR#h@01v>hrKmJj?^8%M3n zv9<;}?9Wuqxr^MJ<N7iq9so2tHj5`u2r%zC{9p!8|FL%L!VJr2kKnMiHI0_`t<7*9 zx@ThKp1eI3f%SuKvx3W_YjmeyExgm4W4~n6mEIy%^2V{?8!)idhG+`b{!>ICDGuCD zwqDOg=-rzK)k#jg4pU9Fr{QbYGE4whu8#ni-abvhXv|A=d8d#x_-cvat+g+5)TU2f zUcCH)9Ocvl+5kX4*MRU4qaEFH)-An-z}!<ip7GTsx8vifKH8=J#h*PJ9ONh~9z^_# z?jhaqmbo)q#^#~f0FbV(J-bNE8C_M3d;HBd5gUt(rIGcDnW3X9^786xYVYu*4<Y{4 zUZ};WwFUY6KS@g$sLcteBvKR=i=lLCLQP0Xm#wSSb>cOfC4807w&fXYn7VlO<IBt~ z?)`W4U+mg$-#OT<ZrMw9?R&}$YI$VBi=ESNK@YjPd-JDz+WxJH{=Qy59o3(8fvB~w zJPS@$Q(j$Y&USA!SQ#)}qwk54iygE_Uo<_RJt=+P+dt%Q%L+~J0^Qtsn_3;maS{++ zM`!2yOd@3hl&lsadxL;}gAoD0--hGn{609IeQRz5qOqvV4fL_Si%3Z7=MYWvx=FZ^ zIZWb+Wt$HjyPe1MeF-eNRaxd_KjF!5vbXk=5#!V+(bKy(f5EgO;TastQvp7QO%U5B z`L-^Z=KF2GO2186AupQOm<u<_(RB(1&3LyzL}0VJjL)aasFG@P<lzroh{~?a$qwL= zFO@?j8lckMTops}n5lCvWf(o{NAA~Q@=A=ptC0?NX4<1z_SL0OwaS{drsZc9Gru!; zPc-B!Ygo&wKi)t?7xkVVFFFPw^|G}Z!m^WjQj@js;LYAS4ax>ZM!t_u+61gzULjNv zn<@^PY9u}L&_!T7K$IsOZW<Blm<roh{|>*gJHdB?Tk`J9LoSDW5%qDHk`s1z$BbSo z>?~?-I<qV|?wAK0jWU1ZSjdec>4pdrI)LzSVEhh}v}+X<TntnZzdg?+p&)-x{@qm# z=&kThKK4xodoP0Z_E@N@Yr2q@t+u`U@+AM;%7ZJHi~1M_WBdDA=FXwiP(HtzPUFee zDKNLoswO#)fIe_m6QH{~H3-;kqAtOzM7V0$Li6gjkXh>ZAT!7v?LfJ5p85f3#HVTx zOm_fcTyV#(pk_-rD=2a#IPy-<(fKsdZmCtzGwLDMXz8UsD?^3295}CJ#!R$<0Lo0w zC#h>;3tZENEYIQ4fR92IrEn@h1ydvGgpo9y)I#}95h!|5Q}e4zT~id4qic>lHH<~n z*EZMyg<p7RAo?2jF1=(iXO<UG7l8WFdgaehSh-&n5f7a0Ebpz!cQNY)#2J?D##q<A zZbgwDFX|7)4Z-Fv?C>Mz=?{ma-S%s4-2=xrvEj%;;FKWb`est2ssRA#Pj4qLo4NWZ zax=P9Yok<d3~f(?d8%OUoI_UB0G;G3Lm6^k%r?7GowgC`0Bwf*J8V<<5auvOO~;eT z_Jh4giextI2X@HlAE|#FbJ{#^yL=TpbNQPud%prX<ZkXE;8O*dKezM<%bs31Y}YhK z)IIieqHa@1DeP=1Tgr6u8C_ekVW6WRDeQUFjk=LaKaJqD=&|>KOXt`^JsS!#f3E?X z^Y{g!O&NS-EP`VlXX~1e&z*VvW>`>A7lTc=;WW%c;UfiEk9w*pAE;4E(20n&oVX7P zx)9!BWE!(%?X^CLE6eqaKW{Wnq7Jnd#0?`hy@AL|<1-qRrztI4z4Xvf8?fi*Bt*YB zqUB&4YNTTXXS|p$`Ib=W56lr@(RoNA{y3C?MTdY1tQ(;O9J?FNwq>!U7vPgQ`1!eA z+OcgGr~sTQ4P;*FROTzHJ_SXv=_?wUu=t)4Re+yfve<OxI4ElK$=R?s#c8+y(+K2z zmaN!9Gmp0&T3W0=H5cv3J5`IFMN<&_>~>|>9^*Fw!!24)sPL+~0d`u|9oXyGXtaEN zVLo?)_&E;sG8ofCiVd1>-i=h?fFekpud%Jx;laC^Zp$sD1SP?A7Th%gUt7P_HiWCh zv4S-<WH(fiyyM)x5glnc+CvC2bz4Z*oZD4{9>oK`S+R4Pq8AlCf2fi?V6C*-$UK43 zvlke#D)HEfI4#1edI$AH<?%(?J0G`>2De!{m>{x~19NF~qv|{ujctE2j^g|SS34n1 z#T8)ii##aw!gPUB&4&y@R9#p=A4IgtpXAQXMn(UfDYD#dJ-wb`5P(TE<#cho&W(1c z|LVUI7Jv_V2E)m22F8<R%R<o3m~8Umi_{jsxZ^E0-+*FO-pl&(BqMEKPN8r!%4vt( zpqe~I<?30&ikW9Nwa92_;h~j2_Ral!?yjjPZcY{!sVf#^^<SEvZg*6g^IU7IxmH#q zIEA6pg3?l=)xw^3c?Ihk+CMR$9}~bgrNq3vyr`(b)N$VCZ+gY2sIa1WD~Dx4yc4<w zab7*EN5e<=_3+lXK8^PLY~Ye>4BHBYbV}{!z{FXzpkRDiW!mAw_MUxj>>6?oTi-V6 z9IQJKxNRce)VHvsyHI{k^A2{azU<zxabosV?;XgF5QAeyV7#J|l7f<wo<b4}0__0J zaU7h@no)C*pH}5KqL>@Uq1|im>T0L!DRp$gpLVx6-o@g)Pay?)`M%{Pd+L#+n`sJ! z^5(3~4=x3Uz@l?Qe#-W{)OS0&@(qhb$=0F>h{9D&Dw~2UPt>c1ojat$1ZKP8D=ymq z2)*#emZ!D;sn2Q0TG#{gjC#9QL(YChhbYWJ*IEx#^@7zjf*I6SLoJegb*dJX<ka{_ z&rv?4E}y&zb$Q(usGkG#W62X;w-4^VXS3fJZH}|`MqVfQYh(3sC)!BjPcbZ>?XT6_ zW^2`VE)Z6Oxog!YxP%{r61{$c3$`mSz55+`rmBy-zp`>8+70)&s3?Z-|Cp2LtPt^) z_~&}tN631DEIbNzzA4UF7khZ_x;<QO#AtB{SwZ2zqP=}{XE6U12&5N1o<aQaao_*f z-g!q=k!=6I8B`2N5+$f44+2IGs0b*C#DfYdMg$x(m_QK32qF>$2?r4oFft~@oG_9k zD2iZ0#ekxMSwTe+c;D(it=zujo8MZ$d*6D0=(?Y~=Zt&0s`sg`Q@!`DUA~`<SAH43 zaV~#B^#{Zs%XpC+<FG#><Fx-X>|5M+LlLD*lN}N_@{bD;GSu|8%Ez37P0Q~KQ~I8m zE{WHuuiGti6I@9V)IloWKh`BE_z5pZZIn`S4)BskuJ3^-rq9b}T<Q3ws%ywS;bH7Q zzAmF65qin7zskprQ8&%;Bb`r%oPD>XXdkS>eCr2{DZFbx>x(-{S~gZazRkV<eQ7V} z<=+G9%2Q0+l+G0;CJH}X-Z)PgD}3xcu~8K24RkNiS|LRyo^?|sEp5k4-d)fn+H}2` z+4DUuQ1vA?wP?87T;3cfP<i(#c+LdUyB*B>(Db#dMPzAJ;!{P1wsTmvC>}^Zw0)s* z{8PmqQT_Ca$P*p8ZfudFhP%L`j+B5V%Le<WiX@Ar_x#9YMXQX!e9f81$@a=xqe!XV zJ?1HI8dG&KrZz5OqnMII09KIC7gS^AfNfk4iv1_1_Off3QsH)bSM63!>6KVdK04m7 z{)GuIOR{vc5KkMw;;E@>xiel(e@-u0)}Rx`$bS%+SPU~M32*6!4WduW1Cp*h+l_!P zJ;5Lmp>yFAMUA5)u)xsDSY2>`lH@xyARK<s&)p}`K5E!l+ok*rSV+fv;^q$p8J!Q^ zY~fqr#lVNJ;W^RDF5kcuGiGgSqnYINVW;cXD<??v$6%cs=iOHQu>v{!FU?hdYV+f) zmp*Wrf^HV_ZRw>$LbS~ue;*%zpL$C6L(D*=yGY&YEvY=UpRy5tj7P^<L?q*(HaC~> z@%Q&hOq9`jv{u2?Kxy!uwlj<8zZms2-@3)*>w;&)o>uqD-n#sG)l$>%O;`NpXU3iB znSOGCv44H(kzRWieGhzf(fG^ON8U~$zRRDTIJT&`Mq%!%K-VU{?tO;N{#?2xsPCwS zcY_~aPjc6?Td|B1f9iCHbW-%1pOENtI>MrPNY9=<KRi{Fm9^6dkTrPIJK&}kI_2YY znshWeqT=y&yy8C=9}yiQmC^ie_h{ww@|uUr`x$;Nw7)py+=>Z{->prUk(1k4GPk{E z#o-TIclutbIoZF5mR+Xd(%Q=8O8dpGWfSI`e0?2z=(e-e)B{duKito`t9$8$r{B2~ zZiTyt2Hp=Ik}*2ydCeu=OtCZGv*(iOh2On^oV=oAX)$C-c&7yY6YDIy<~xqBc2d_# zzA<ao5LdxajjfjuPcOJrTAn9idPw<hr<9?3b<K~~t+KQTXuZC_-<0k1axPZ;t$ZKU zTC?w_kBrf)q_OI*!K*5>acbU~Q!a-mOdFlMy6-fcKW8|r!YZYIz-#L-(r2yKK3?u& zZ?ie4or;k*UPufk^4lb+YZR(!n13G8<?C$S(nS754Gp(|1l?U1itt*0y7ZaGZgq_( z4p=540VVI+nqK?~Q;xdb+S2o)Q*N-k*P>SgEbl8A2ftbNsbsF-!`8s}X>RoQ=(XtT z(3)I>=Q=7wW}ker%X#jbug}k}x~QNdr80KeXSaoEI~NsdrJIPpbw+#c?$F&zGszEc zA%9*0y+o*gtoB#5jfBori+;=F+a;x@T38r$5}5nnnKJyQx=o^5|12Awv9W8{zRQn& zF~;)I8uOA1Up+k+K0W2FwM}RDHK%6R=eN?QWQ82|yuTpX&2y1!!@bLEPgwW0KY2<= zWz|vlJ6jzdk*~C>aoD8<h0xZ<M{OpTV#Vk7H_{92pxv@WVpT4EXh7kQ$@ueQHGyy~ zPauUq-?n+)Ki<=D;*Bi6;RiS22w&-Y$9)^sE*!d`YwNa|41;wkrVm#&H+ddi@9I8p zQSHNoAjy%hMlHNQ^1-FVw6iA;H`U#ZojKU=_7z2C4DXXq_skvEI%@HoR++*>((Nn1 z`Mpy#$K6@SPr6$Ez;V{{kFPIfr<`(MDSq|6qd@d+(!a2L_c0d5;Ye6>AumJu3D?#2 zA|(fEBz#f9&)N4Ova<Y`=u)guC5<KQd_KE{obtRM?_A+>e%Y6bC|}dag4`ad2Pc}i zX}8S74nQp|izZzil93zy^mgV7X_F1^A)}6X+*Oc}YHhlb^ho8HqTSZ>A)oHps+ixx zb9upV@w|CnwdE)Jo_breqV%b0ai$B|QN~UnrXxm=7!e{9lL+TSf`$272{M^XZlH8D zcE;kl9617iLh#HkmP?8rh{>bkYMesf3-_9?`gl6cdS5j-c6D<@>Z7*P$E&XGb!@EE zdKGW1)N9YRjI;jF?roT}^zG0f{e$O^yv|vA;r<D4SWCJvZkHDMPA_XeeYoHEszixh zhEd?#f|U6Niyq{vK2y3~?M(Ljvi-BS@TMn;_g&>=KkVrm(={rxOH`Mrc>zAxeY&P} z?WTn(Pg1I9PbsPXmKGKn8q1|!MyTj*UHI)~x^s^NB}?hCAFGA>7rzI;El|z2S331@ zcH_e{YiAVGQ<d|XfH%cC##e?bX<_oq^}Vyy^z~QYM~{Mww^f!^2R;%Xe(v~_lsgYc zGxOjuXJT|`t#+}vlg_BoFsbxNcNG3zS8Hr#Q&EM<Yh_II^XPh|Y0AouOM-lT5@oz) zKg?EER$eY6o7ATHcI^B7+SC<UAIGhcYai;4<0reizZ%|#d^G0-xs)CUPix+OHSI66 z*kLCgHcG^8c6#J7XwK`y6$cc)YuQdaSP_t7_{WfdR}yWqS{crVzGBPo!7ty%1`K>< z`uM6Tylg4u{O;o>_Y3<FDy);oF#FR#(qMwH-`XBFT?+e3N~s9~j0D26rR%=j@@vh? zNoi8t&UXucq}`1^v19kuOkwB2qh62pe^RhPV)%t?--kOg8R9t42)REgquScL=?|_T z%^c~rcC!4q%BbiR43JpM?md>x8jaCCdR&d##ULtHUT4S<6SMLDMzZg^w-@*x8e*Sf zU^44SzQ(p`DkzybzUQ6S6Q0HPH~qf&aZzXfARG!&+^TvF2}d|OX$RR<&?W9zV|$m` zMT2^YZpvSA+y}pnyfi%5jaSK?>DmZSi)9xoN(y;~o0qO@&wljHqwn<KemI5AeAGTt zD|vj+8}YNB6|L(l?J_HNeNPFT;t~7Rbk=nGxDm^>s0%BpI1?%Eg%bzCSwNfaPwbX> zCnMw~^UC-a;8A;GFO)nGuPG41l8Wzox1HMhDdqeO!^KbYW2G&3P2Cx87A1BvC;2kU z{o6VDd-YZz$6I)}gdX3Eqee8$rI<z(UbRaH?H`iTu@P;$5wQ#6i^bFh?Ku1Z-wSz9 zm3n->p#Ao-3juuw(OLRC22FQt(pBW=pUj;x`!LRh6}Q{<q+o3V<?7y^5QiPtI(l_` zC-1V1QdxVXtzEn8uv_%k)7_HXil45c(|TSRFEi_1_-XOmrh+Ou9a>Xhl<9=Gq4Vq7 zbG%nQ8#Z0V=C3EHDLs5(`K-fkYZtD_33yy29qRPHZwih_EV5B<37t1hZ)M`Hu!8Mq zNZiW3HB@ZNoS{m{@0QUHc9%hcakWiiZi>~EhaXckUXU^E{m!xtwY_C7ial96scYbn zEe{kAgjIKmezT>_;L)y<4f#EySA|s{l5o8yskWkzOoiB!Wj0TEe^3YB^l!zpe=Wbe z7$(lVRNmJ?g5ib>(?=N!ge!Y!3IdJ@g!p=9F1*Ln3f$Zhu~Maw-lVD>;_$KdO2TuE z$UX}-`k!I;{q}e5WsY4SFGtweG$d7ek7KI;i&Se(m?F@mqP2v*OGHi7PtC*<)5<lj zaIxYYsXM7eeLcNtpg?GU22E$BlJPO~@X_d04<Q+m7i<IYI^>^Gg1kxWUBqs;SnSdl z?45$T41E^fUQmj?j^kXF$j0(P(;5=1i0evJ6C+jC^_ltBh}u!2t}y%e<@HqY#y|Nl zbK6wBB+ULUK6-=`;t@akVr(m%DCDUIUNjRjJ@A@gWhD?cXbRBB;iNN(L)`Fcf&1F$ zDTz|EWl6}>?J~*nIKAzumv{GUMbxQ)p8|_hI5&jMPWq|yRf($i@fdpl`E$`uvUNG6 zy6@g9x)4I-{bmW>pg9<2J+W+BLwna`s<xpUhQ&+e1VRDnQCwWV@kF-5PP5O|z8UWH zoKFgAo7sBFns18ex2!v|?Xhh~ij-Ku*(Zy8(_0U3)>mPJx%vQlOE+}in3J8~#p#M< zv$l5`vpZ@^4U;tPjI!Rk9>+I?DmBg>NqS>Oe@}T!<r}nS3xxe1XIcwAGzD^+-R}C! z3xp|I+YayqcCLN!MJbhF@9X7Ei8O)aY_eON)XzW!gb5GmU$T@Ww>xzgsrwKa2Q$*T ze@S=4j)BdK+(^Rz`jJd&U+cxM%;E8vN4ARQC034V6BA$ThUfgp@cm*|H4p>$;Y@)r zV-qhYwwgD0fFR%=uS_7UTObv<ZE}{VB6*c|q+U0KTWEK!n7L@X`|>_W>K=N}Vk*54 zY}aGD4$sEtCOq`d{m6Qf7Tj|VsbPhYzx=H^?m|O{u5146gY@gio}b9#Hor>*oku-e z$@l9On>!<?9y`Fh6~J@07v2^)CdBdIVM{?s^SSH+b6$Fstm)O>sHbJn+*R+5ba1%5 z{SB?p*k|3~`nE2<>*9mwJ!bB$+<f84(te{Sed*q|CSm*awt1ONin=H7J$l_kb>$>! z<2JL1)B5d)2jBEL-cI$8wq#zEnP>TnfEZ(W8H^z>a0?tST)BZ)=6iz}D5Ko<ZLgzd zk~0I~<(;fpJefy!{St3Eh6LH7m61C+lVs>;nI#kFr64VNvE~bm?{zn$BJfxZ>BDEd zPN)I2viak#Gfg{wst$judOE?|xx?3zT9?gRc;DDJxh^}A*Dp(x%q{4PKzLlMtlx8! z+R9j*y<(u+eSfRsyqf}>m?aH#xol=@zMZ=q9au6dhwmACxiNQOBlYCWZ<DpuHIJ6L zGoGQjt0U-v<daxQMmj3EU`lc+GQIH7E1&d)omMaqb}bq%&=XX>YLy-=DBztZv&$VD zJmGbc-lnx<qieC7(^b)YO$2%Q_R**79BiWN(kClqPC?Rrlb~$Yk%8XWbV;@k$`+gO zn8&MiOeAADry{yc^+@8}*V1&qv?T*Of@7N{dX{)-b?A#RX_U7u@Zvb33z_VW=uO41 zlneQn7_~zEjMj+q(WJV*yml2lWuv<F7j3E&8QnOBDbM}d<}TT%Sbj4KPr5w)q(?}- zm@P?4zcg+ik+1_Lvq!#Uo*7x=sbmh=Qeqn9G&g~CMl`DslZ0YAsZ)Do$RqQbxvpM- zIGJFY?1j8NA0kg(oQ(U@AQz)6y5KlLx=eVlO`@XVm4hPbUH8m3*{SR2tIGYH#>N=% znaAedD3neic4c<hFk2~?^leX>$@u5lNsgj_uz7-jh<78%KQ^c;Ddk4t#})k)W)${) z*f!o(MKr}T!3oiJ&6^sLBb1KTmFhb(H7dF>S@LF>_{n~2dW46mR|H8iBll#?4zl_A z9&_MQ1hcWRy2y0(tv{!;+0<+`aP^B~bY8{(G&{vs$mp*XWn*x&Yq;4yH%0o~>v#G^ zHpk55zwZyL&&~F!73p)cgIIlTw$J>H%=Ou9I=26;J~!Lvi%36#bHA)UH`^y7g}Hv> zFZ8+DKD|WQ?9so_=W*=xbm!k+pUwW|s8jQI{r&y2`doXqi1fMN7pu>;XLJ^G{a@*G z?J4&cqtE@m*z0rcnIY2WW^=LnTzhKoX0Ff8E@Sn%_B<)l=YGH3`(*8Ex`(+w_xok_ zx%Rv!(*M=<+5e8czRO<b`rPr!>T&J)Mx-Arx&S+#S$(cOm+xb)&;7nweXcz_?`QPG ze{p@TJrhLw(Hwm?TbR91R?22G*XQ=1)#utXO{C8q53D|WAMEvc2bk+~?a%6S?RiwB z|EueB?P-?t_x5D3&$VZTNI&8i_Tk!d_QAiuKAT<4-X|*`{DVH%p5BL;>&J2W!(N|j z&kz5g&$Z{O!+&po_WE3Vc00o8C;g&7TzhU7>2sfV_WE3V>KtXRpUAmBw;!xMkBaoU z&m*hPwdeR_e{WA#pKH$wkv_NotUlMCbB;6D=RPm2J}cS&*NODG<C)dt+SBg@bNyfS zhilJPk-jg-p1;bDjy%a+f8H<jx%PY~()am=KG&YJPchf``h`B%p7|nuw_oV9b`t$H zrFELQzUwdax%P|^>CgIwKG&WvMf!_G`rQ7q{b1$zT&6#MztHE}^YB0DbM2{mhPnR2 zUtFKtzj%>8n?26`d#*iOMEdOb;p%hk>2;RrkNYqB!?h=Cr{D5FV*w`CwU)%X46V8f z9m5y&A8%vm-Cg6h#?eiVFBgvQy3f*Mem8aR_al$$zt6A8wwih<z^Y4BtIBJf9e!im zYlVl+n^*fxkGJuk=YGg2MY-ong_feNN7OcN{rr8c<Uxb>!-4YJljcgK3}4+WePpf` zDa)4dTs3$@mxoO;W8KZ7SBd+TkB8sz$7|lZuIzhN*t;?Gd(~V;CBqo@y{h29{$6Dp z;d;KqJ8+A-s6S`sY6|pXQh8SIR*=`vs&t;f%X@T7x0Gd)WY6>UEaIsuo=D1=q5MOD zfT;uEbtc}`ssMS@j1SAo^C=xOC$1`_XjLo}z2v17U*Bz>s*<FmFNas?#dJ}^qa(0o z_7Bhgq%*AZ^H37caG|j7y^wf87uZdZYnd+OX)P3H@Q9m2eRdJ;MRTT)ne{AViuFyk zbEgx}x}Z`SsUBKaqfOVJ_IPGa$}6Ctkh|Sw;TMxg9EcY&CQ^<#0QKA^7I)AcoDdq( z&5Pa(Epk)9H9#CBS?R<%F`REx@yau?Nw_kFd{4Av;M$l-8{(`)(RC!SifV1{9qLBs z)QmdIiyUi+O4K1AQH^%EsnL8@QiByGbCFrobKOE@5WwB(i37<{Q>yxi7BMN~U^}%T zw0pJZ(+*!ydJ}O9%;!DuSWcV)b>511n{4zNG>KPf;x+%zy{e{5k^^h`Ue?9LAR;Gb zPLSk6Ba1Ulrx&=D=8|ejhc_S(4z8oA^zE0&#}QY_J`ZxR7~Fv*n-2NT<FAnu#v;y3 zTgusP{PID%rG*Ym{czrR2(wbr2Zy(`7XW^ei}|IO2ML(&kj`{TCz%~MI+-ba+xX17 z8u`}~;z`1FL*<JjvFgu=qv>M{sZp9V*$cTv@NO~1rk4(iQbf(l8{5n1vf;BD)fioC z)%6=~v{j#~dXzh%QVt1UubnJoS*c^*b)RN*P*?RUn@4t%oiBT2lT1zcrh6|3S`;~2 zI5{lp)h^jDq-Gv(-?XV7r5`S?8S7TC;M=|#hEK!izxkHlUt|1&Z&iDdIB0NiMb71( zCgHa~MPzE2FZ!0J!gn3j`fwjkqRb9F|7YlXOW$t=E=ODq9=!T0VG`a}Z+da8$?T3_ z5uD=Z;r{)aXgq;9kBGNi_vlA4{1X{7#E(=ttfOycYs4x^;v#X_j)wwY-m?B~A<4v> z|AdcH2Xcc@<&pM!(GOLA@;x2KWkc6o?tAi_NcV0O@u0qFmY6ltiGGUy#&si4Pchr@ zEL+WpSEpS!C75CgeEJrW3`(9TiF2Q`$ZIa@&s+Bpx~SSZbJ0+x^DSBJ6YeBI$G6gp zY?!#cKs3m^Ytc^pv6oEEj?G=w!8|QWqA5nN#xIG)4+aXBFF3F=j(46o+MdAa=PE3s z544*=ZdmJ?nU?}7|J7Zd_A<-5Z;eu<50<lTP^7)jH*vNCso*ElekxBK!A48y%BK-y zMV{)0*K{NV<{8X)dqBElMtfzihbqE5&MUJu71#|B92N3ngquAIld=R`@Gq6lUAli! z*SUl#K76poCbv;-z^NEI5ArfQshNu=H6{(L%Op}34v+R=gQ;)Fl=@`4N7J><kTEUy zV*(hYpi1V@b48}|hrGs;&WL8bF9OVOzAK1(d_LUu6XaINrPpPT5(FT=kXBfumE|i* zSGAkML@aD$k*fz+{WT`nv9YF@l)D~osythAlAOlSds%eBJga=V@^5`VHzvSf{JD;# zb}|R<`P7;4EF6zFHcPar_-1zK3h@yO-ofFAfvnt3kd$@e4SEH?aeA@`Joks^7WqH6 z8E;#RutY$2lWD1Z@~8?(iAaor=0)z!vcY^3V}bJ*E?Rg*e=1Ab@tL~gqOxUI5)w&J zgRXv4zvqMW&O@G^MXr02_I9k?CLw48-%-mqGj6Nry=S)Rjy&JYtQWSz3zOa&es6A> zVg2o562ib^P92lZI6W;i)ykw<V~F#i>OLctZ;hQ_*I9K<&{($;o5cMjTYvMkM|Tmf z-0z)})X!8t=av47{PmN94n0+E?rQW&+Sn@m=BK_>bH^ujv;`A|6WlwlB@iAm6P{k+ zc<(b$@R9r)ks#axrNT8Cy*g`(##&%+{L~~#Irm|F@`n>hm5U%{O!;cAHnznbR7<oI zm{^nO!K<yuRw7n&d0c3Qj9bUR*Uumv@4O5AuHPky$Gkz4%rF8n-`d40UUMQ7{FQ{m z<P?ht67*`gjsw|AId7dDHxl&PV1#3SJtP&$x#1afyl%egdS2XTJ}DzzoWYz5vSZ16 zy4S?JuBbbv$n=jwXF8Z6npN0fbS4{vTa<U%20uZeB=BXJWN@x^orf%Q&VtQQcbjBP z`eP7?B*oDgKefZGOLY0Z>ExFH(W3~uf_tK43w<K(u1S(4ki&n$j8g3dtLRFeWGE@= z))bq5`{YbNH|!k82Cb86AAT}VaD?`jH`tR2*qeCE|EnRvy1+l_XNT>MHWL!eTdG2b z1jh??1EhcW?%$&~{%J*Yd{QUP_iux6)44%7#CmknPS8O(ML##@R9w+;`?nz6)er8i z9Y*gLGXxAKF+sTFN6OamYRTMEwficbXA*?-&n)sE@WL*C?bJTZd`fRw6EmlSljZ9? zRG1)~RaRWtW<AvQ%`omPZQDZ?8v>_zABmwWSO$xh8D+PlNW6|wy<cp4%8$gI{xt|U zp(<#D$L1OP=4QKjlz;Nb3KHHfQ*2yRY#0?)an|T)O(sd+4>fD33zwO)ul4e^DUZFr zm9BO#Sg^civ>;Jq{_&Zwl~r;YdaSq<`JgF}R4;OA*%hewIA-Ra)<3s3Kc2t5rcrvT z$N8^O&V{>fwTbVwu_D<%Kd0Dqo+t{JaU8RKH9gakf5$}OK8}cFMxvxUeK-jlg%gch zHVP-ELn@33nc#{r7ey0s2F9!sRCO(m94h)^MmEw>I7Zd$gY;SzpGzHjBAQ_r3J(b* zGB%AmLgp4Mcm?+dGEq3gm|9D-vUKTBzLF$D(@Tk*D6JI8ltEf+r)-`Yfz?9R5EA{u za}q8zP<`1$m1y2w<i;$_=_M`+(AEn{@$!wDN`r6LE&DDWv(QI}^yFWnaP-fz))5<< z%da#Aeq`DwLpG!Kwbr>PKB9^P;@nE7kn&BF9LY?9Uwit@g*wX>%I0(+-G;8!6-Ul- z?W(UzuiXT(t%H~-T%tWrizNy2^wQ6J+-0)h_7Hp*ueSE217AOv+IVDtMB%m_Ir;%p ziVL&q70|)`^kP!eRnC)4ZJIku!Z(siN6VPC^|xc9Gm+xQDq+Q4dUX}uDp?Q+>KD5d zGe4Yzd^vA1TG%e)pxkcy>d!A7?ZQjTB1sNk6I=Sz*74KT(<5uTECUiTB`din8j*6{ zNL~XI^b!TG4<j?5ELq}ah$dBLNKJKWNhBrTm}#soH>Q?8m;2^PJ`=HI_NBa_!`xZO zG?@F`1&`0qm&Jbz!d;{W4A_&%ETV*gA%*wnYL~my=VY)!L9$b;pJ9??Q6f1TV42kw zrlzXs4MqFF$`12tU2eRnL!&x&D9N9k{jyXgy;(xwLEaYWB{%aHotWroaZ*Y>D?>k~ z^tOguuL(mx4N<q5CwoN7EFkjSz}J#1-#6}0YCb4bTUXew7aae`!zPKAm~O*wNaxr_ z46s{R@408{4yzmMOpYlWsGgXxqRp)Po%GOZ!|owB9<9ABVSK^7{>7AM;e!|L!sEQd z%Kclp%VD=3trFkj%<1eDcEbI)*XV5sPe%T~dyVeh%U!d>!1>qLXhD^`d(&zw^2>N0 z2_nHrP?;=nqs!(GWZu;ZnAs(Hj%Z}>ex-;Pu*AbpXv2F<8)ZVTuE^ofacS?_WuBui zUh0-K$kG)9BYM;EWwi-URolY7)^+GiI%X9tvm?{VDnV!=IM>wWT`C<@W}fb8FPODk zEP0{FFAF)98LNolqUZI>BuUf*Um&1g8?}b-kC7&;eNrwKZw%*_(ldhxzm7f}%S0Tz zUy=`NK4U7tA8nSf+wA><^hLD5U7AQwE6FbDqsVmUD6CGtJ`k=3tTM?f{f^X2GJ^VK zJvDbyhd)3x)fJD4#IFkWFFz>4h7^>W6wR;0vE|YJa#$QG2VAD}hB>s}h?ofLm`(j9 zy0Z)Ji>tni-<su3C*ZSF9x=%UtEx!|t>brag`%LUQLo{zRPs(w5)WJt$GFlj<cQ8| zs6Q)UvTrIM7GK{nA<KH(t6`CGeGTz$yh@TBdE-EK#gI2w|2m8Dm)E^CjKnGqMxqZb zx$RVTIw|mlnawKlWNK~*Ene?(s^j<bZ?DmF3ra7m{l9vR9zMqfN4NanUZb7X;Wb+B zzy2C+D`_eq*?VTl*dd9CT?KlM8+Z*!fDk4fx3$vy;&cDv!nCBU>3TxL>BvS*^=`4f zon05QV_Z!4UKci_rg@k9pUs}-Nox)JUaLrw5y!|VwzLOM>n5vi6)rzg`CR4BLhs{k zkG@VCzO-XQFg0nY>my&LAp<n@Mn*~Cs5v05vB{-2blJ&69_MZ26}oTSt6Lk3is%at zFI1f-^&Od_s_|16*XEaquQ}3gYMmlOQaGpV%I`bWXr<Mw_MUyx`N%LyDW{W=#?`ir zrd#q|yiG?*knO=Uzu&enK}g@xGU2ts-=q9i@Lo+1I5&B_#ky{DnQX+C?E#OYdMUK| z$F1)nZKBdOuC?88KPhBvwqCjCL!Zi8eaCh`{d;=YZIDtvrAx$Qltd1lq9k~(bCH)b zFOD~Mx^VHkSI(35gj@B5<;q5LPDj1_RGRIuY?sA4<(>nv*$g{jFTAmO79|e5$<9iV zK!WIp<8AFl`?TRGnX=t7L2Q7D?zNUs@hM6pqG-Dzy;6Q$?b#6m10_M#mHj-PIIrPd zj;*lCO6Z~&aPs}OZ@aCJPSvzt(|x2exr@Sh)dj|mDY%Q%AMzTeIfYEdyH-o6&opFL zzBgNsl*);^&C0LZdn}qY%7AHM99`PEtkO%9yp-oAWybMl+Xypkk<_+q=H{7?J`1J~ z+E+>PF^y3O)c@%kU%#BXwLY4C7L2Cr^T{NQYA;zUB>AD*x^4w=od=)HZ*CXAJV^f) zDLu@;wZXNw%ptKSLE|LFO~YQ#CH0)!)nk&okA8lZ?=QxemTA0dy}6f|hqV~$9G8&4 z(VKCa?;*BWO6En*xXs7JOCNSFS`ikaDjbb?(_B{#!w{Dfa4Lk4qsCPlhDSu7ULPGT zrS~E_I&p@qY-P+mg@JN8*nH&hX;@{{srDVKimq4c>*^(q!NCwhY$GFw&uLn_OuIQ^ zWwYKPH|A&&QO4B->x#Tmi;thpZLwWuTZ`DsP~t@X*;+KyTD<O_7^bz^^XA-_iSg0Y z(Ac_XX^hFaR_UulBR9owU6WL&Qb@M*g_AvJ+IJgw?tBi1!3}FkQt#vF6XtX|U!R70 zB}Kv3)!3S>(;R4;@xTK9t)I=sGtKo0+#TK3x3{cp=FXErF{0+O{`bw<8(XDyY;GfI zPF=%jx`oAQVRZEE6FJ!y(H4VA*8BMhO|@@Aef;=Knk3^U^1di~WnqgQQ=U_(;5Sd% zWM!JETQsR>J_(z=k$U7yZY|B`$GwTA+1w?KRd&J7j<DMHs)3DZ{WS$=_In89g&sk( z3^wu_e7wAZZwB@{Kjwwsyq7jZ`;QeLa96V`B4hh@kE@ZWVexqk<MX;PCyP{BnZ1}4 zpB*_<+0kfu7ptRZRRhAqg{EJ`kJXa{2!1qW@0I#l^=f%JnmMzdCs@xL>^xnVwJU9F zdi^uQQ1{VUiZSl%yJ25C4&>-Ll-Z?QK@R!D_;}+$Dm=KC!L7P{QH_#fOg4_)!<Mgk z%8qR{(!3#-s1SYP+L3Nyx`=gbG#g)hSyFENJlVy=2Dl}S(0Jmcp>e_LT<0;(k6%pR zvm{`D_?k;AM&2Kww%+8h^~BAVrqx=*YIYjj-Pfo8x9^|Uop&o7c4pXjaq&4trVA3n zw7*q+q4c*7q0NTzPRWnUPl%3|2pp=RaZqD?mV>25M7DNJ{WMwi%Kf<((q8QvyY=Lz zqJN}&M$4HcauI$|`<Sep?R*8hW&iZJlGZ~Xr&C4WmDa%>lN;_oHuNn@b&sFk(PUMX z9$vwM_UYxsY&_>_n)@2G1y$2E#~M^#32d`EIf0LJL9%x4-tpHi@5f{?TOC~4EZ0KL zYMA9Tl}U=mN)~Uc1N*inl1-*Ju2)s+zkjh>>CbWWpyPKBpG<|-8dzCaBu7VwY+l+2 z8$a+Q$2++9RF|!kSC`G)`%bR){hLr5DLxr&8}Y;*U()2HNFPE_#*d^+=NvV<#GLlA z^_=3e#@Y!1pUti8%mkK`%<(Ma2_9fG4i#ZfT+8pqG=~x|OHhfXB&1gi%9%z-`y?9O z?uzzw#%pZ(%P(ei^V|Pjd$&@}@gx7UJzJ(diI6~1dl5#r)kN*>{13Dj^~rJ$Y0pfe zz#kQ4+9hm-OSY7kzq^3}Zzm8+1szC8_NdYkROP;y(Dbq9(Z)9ehL-ARcVmVK)_r>Z zPp{R>_>zObo)1Y#Vs@P~qhFYbGBK0o7`uPZRW)+3PA6^gyJNU_t{l55dRCXD#72S4 ze7;+9KU<3fmKGUSM-}k1;JrV^ORuV+p!RJ1n@ab|t33Ux7w>iHG(dCEP4zE)Wt>_Y zle_J84+E83=|!f8{RORspX>R1+Zxa7$Lh8+*@t=*Tf?<o^yhyX&8-kcf7x1WY_z!3 zFM^wT=a^_Qn{B|==SIiVMEcxlI(vO?bX@#AbA5kN``qiZpV{sO{DVF>I{rkYzu*_w z=SIh;UikayINN`2bo_!ypBt@bv*p-+v(ohnbA7J;S$(cO%l|>2Yft@h=K9?GWv|b* z=N6GZH(P?$=i0N&Rp$D?(&yT9iAbN#CSluWug|sTlV4n)d!MX5ZLa<O{j&O8d!7{O zbH_8Q&r0_C8Wn&4eX)A%{jmCRB7JT)2CL7tXQN1ef#?G4^;mtbJ)N#I{o&e^)#uu? zRHV<%K4G)#*nYEeSS53P?(@s)bM2Y_5BhAs*z0$%Vy@2}KkW6{`)Bo+iS)VlVfDH8 zd?C{3e!r|f*PaeHnEtqm8f4pN^||&uFVg2ezihS~+izB?++?oL?GLNZwdXpKKKJ`# z^||(ZC(`GBU#vcRKWu-TZZZAw=iD!=&$Z_jkv`Y{tUlMC#<!X4d;H@1Tzl>j>2t5o z?FZZc9(S1QbM4RSbM3kGAN0BQd??cA&gbm)x%RZIX8OY&&#XQx+5R8?2Ys$RRqis^ z=RWW3b-DJ86X|2O9Q}vw@2|26KZ^A2f1%H{r};gmKX$*+=h{;!(ii+fpY5OMuc^l( z{fWQOXYZTU*SXL1XYw!fx%Lba>5uz`KG&X?MEY!dy+!|^CX(#+JFyDC<!=l8e{2Er zio$vsghF2bNze7qo=Ho)J4Kjfbf5*^UcAXpshHICG2^1neQU?WUhJzSasxac7iU(g zSJ73@W~&Uyp({+$8}Ht08h=&SBK41m*XC=W>APO!MDr7cbW_xI`zi-bL0dN3A+KJG zw(zp|;8iSTiObHynAeFKrw$)Kdh*DLV@v&;o<E4PpL@aCUS|}l5EvL<)rsFa#(2f2 zr*}?g%{slr(`%8dujfMd`QGmy*Kcd3OpjS%C;ZQ55Lr-pnaXRJ=jHWz$uTdl^*n)n z3NJIY-e1Fbcj?d}IR_Rlc=Nh{zg1s9KfO~wb)wVR#jmcNml(cBex@HPP%K$)$43c- zzXt0E_Lwkue&0F9D<rJjUrW5dWr;2DbTgP|uD2xe8t~HiqGot0ufZ2hxqk{tPrK_N z%<_Br-6E>j$N7@${z^Hqq3rgxhxwIdLnHU;zZmAUT(_~l?3jY8PUN29k5+#_bEf1l zT{(hjDmf?f{MucipT+;!Z9bE<H3hC9QrD=f04`I3QE6yAFKw%rm)FewCCj&^rH#e@ z@kx&~zcr5!j#2+<za;@1Twj)>PQ}}?Ax$1H@0JYx(OTkZ(p*|D-8Htg<DV>3JGO?Z z9i+8_&@D`BsryRKTq-YrjoUT+HmtHl;pha(4p&H?FX`ZY&H40_clo=t94br}@yF1n zy)`znh!nagFaK%t)?=#P4%ToGpC~0RA$iM6)A;CFvC*Dha<cQg<y%fndo5=%H8xs5 zq_3dQ`}{Q}DV`2HT@OE*^Z4CpnPV=qH1U2s>eCt=)0gq-hEA^k)5ZG?Tm-RF=j~V3 z&R0AzTl#x{3Afv!4|_}<%tsXLrv<f{d&&#l%G{Ipz7nGn=ALx^_xIFK!={9^hLKK> z4KuVR<bBB&mc1rJqxYv-bM^K0SHLY@^EF0Ft1eZs`;Vce<$Xu-`7kFGzo<-(@pjNx zJ*Bwy!pfEpub!(ElA0n`9fqM6#^JTa6C^}!kpeLG7~2g`Fx>aS3+3tMbth<c#Rz@< zv$n!(6PAo#eEzS}4(5O4Fp;kBab6-}>zEZEKG}T0BM`D%F?R`FAH?PAPu=gwRJ9c+ zAPVyHt`a9!&3tBC<)9~+J5o?}qr7}!!m7LF<r80M8KJ0>;-L9`nE{-9?7qEZCAzLM z^jypPhAXFH{Uj&o&ph}b<l}<3m$D7Wt+SdK><I2}Kr`%&Wbfr=Ef5Z?H?<QkfyZ;d zkf-DI+%Gst<XCI8tE@_DP>Qx+FH78!BnJ%+YZ4S5vy6XDhu)x`&+Jpaex24lq)Ob8 za7G&Vo3frl+A|&oZ*KRP=y*MNxoFS$z9AY#qfn<ScX?`k;r;gOxA?@DTh6Oc>_hx( zWLvt;rI;-^8+63+A@>z+N!!nu!|dycFL$%x0_&)5xXAj&!_?$0r=+T@#}C-)d3sZ{ zPe0il?LEq}8!WR%25If+TVJ@Qr2i_}-~k$PDl4^eWP8gWzk1$VT9MxyDI}-;70w4+ zjqs_=t?~XG-g{1^xUoj9&Y7m~;+@|Fwtsbbr!o}vr)bj=S7Bq<<g~l++)5Z$B=K(G zAC?XhoTTT$X{&};RxE9g`YdbCIPl|Kt%wJo#N;>j!#TC)mW8Ry`WMn^lcE#WXrJ*w zVKMCog$HzNJPCCF^u7O^tAF9VIkEpaSO4VKmSpt(h+}YF{SS7X42t;Ax%$Oztz$Yi z$8q&9ntb2LCc@@8JfjuY&swNHY>WEbhp}Zq0%J=$u}yo~huyoePyFkE$Wmg=X(Yd~ zAcYQi#MCuJk$?vr@F&s)Wuh<c-+leODSLvD0~C*m-kKTpyT8fpBaURXvsTJU_%%j( zZ*sUF^Rw%@GIj)Nlo|FAWqfR*!z-`fMj<LtSJ6s&f0;;sZ=zp(j<er-q+ow6euqcg zV8RNLCGrC*_mFaev`d<<$=o5VupVKBB|{9a2a*CC;sX|cVV%iW;7YC@Vnq+fzw!3V z3ewGsaxE-YZV2fSLN+v9tCpK6E|KuXG1y}Kj8$$GvnEfYo&Eclj@+ywUi>Y{^-%TU z>_zKBA3wHiEsVD*-QT`a`k>0tWg@dtieZ(H%F9(2+|k!Rmdz{MI!I78NpSSjG{HdU zAeWs9f$f$VS8!Zlsm@+{Jt8{c7bP0>QKP|ah&ws%xC=?t^{z9UNI$lWA{H)VRLm_q zd^`3WU;p%ZsG1-=IH(~v#${`x!BO+|hN932;xWdb<H%QDn5yVNG8h&Mj?jn9qnN_` zx=ZP=Uq)qw3)eGW5yrvQKg~Lhy4mpclW3A|Ao2Ahp5?rEQy;m?K^i=xDevS~g|w&& z9VAIuf)2!>wV779u&!~5F3xjD>XEr5&KF1IAE}DHR4?!pOn*iN;GMo>V|JzE5QnFq zjOPTt7XE)GB7@WC&2CAI_UVnt!e31AtV|u{5r6c!hd4>I(lsn*vYcn^QKSpDRwOG7 zK)nOOwbi9?oc~z<#q74|L7HO&3;*Wmk1@L4=RfG^AJfTd0)2ab@8~}mbl{!vKjr9u z>p(AUQPLx9oV-<S%PfIoyhnMXru%KdyiGUyYWimKGAErGe*M|gr88#kFSF^{BkGHC zaF2xTbiV$~v9VSG^4d@Dob|{m3FEJ#XZmcHtKY?Y7-WZ54;>T!()5w|!5;j~j`w3t z2QDgJivkyRK?m%az{S1}fs1ZT=*?*PKL;*;tm-i|c4-J!psl}Osvi`lSd7KOHan|P zuD_j!!}^+ns>}C^>4k0jQdN^xUxSy~93jQK${uK5dRVqgt77$c<va3N@N(9EjmNAn zc5LQfmg@TY^Jk6}EZ@&-IEVG~-G6x_cy>Qw?$Zpn=VU>I@_4zsH=kSfrS`_xjc&D7 zC|2h6yw9c?w?~}}{wx*Wc2_#9&PrU=%)c(xZG=r&8?~Ht^E&HgFA!eX*VwDSUFjlz zo#;OwC}FMjMR7MNnAU*BY=z6YZ}sjW-#)%6Ig~A5NS9Fiu|%3kH+6Eig*9pFS1a{B zSgCLMkFC^wJ6R#^;b&{ymHG>;Y775kEA_r^^s4r6SL(ATrcM0Mtklc1`InFf;Zt2- ze7zGne!5Fh|8X^)$%)BwGIPbA%qNE$`-$=QJ<cCy5NSQ`d#C7aV&u^3&P83$K2VGc ztL_wiHLUuN=$zo>MJ5}>sO+$T4loibqag@4uCnh$UUbb8<P43iTAm$k({%>QH)dWO zzkX%Nn^g6^A#s~^-Sbg3Ohe9YV{pzO1C?{l?|1Q2`bhO(zBi?PaH`n0Lt264a%Onc z`&oUkk7}79tZWeS9{hPyU%^(-Dx25f?AmAS>ic{0)2$|w+#g&`P7hb>d;`VX<R|r> zhXq{zj)Z6J+gd1Nt%0aYgsY)C95pWgVj|CQQ!iwb*a@=^I8PAr0z3CO<kd3%Vjo1J z-Mn01Jg$54ymo%&jEK!7SAzcKSk%(io~vNjZ9p!$P(FE$>QrfI5w0bh^rzXycr~-4 zmm{L1eTUr`FC}&ERsFe%-N;RC_n3Em=UVfGdyxy2`L2;A;bFaH`v&*RZEFwgJiA!s z5x#=zV<@GrN)8G)AGfkobb=VaVxU166SY2+o4Q&bF*m;2z}}^=gWVIw-8!qy_`|KY z+6?h0{nW0BKlwv#*dI^i)P~)kbmr@bF4Uw?s_nSm=p&UIYUO3Ni9HEKYKvTYjbc{V ztS-^dMyVGGIxFM5J03PmBKlMCfbxCR(d@9(U7}wl7D*^klzdL_PmKE{%oK7Q{s+Rq zc8~`i1Imn|WPlpb1>?YE@F(yGVIUeLg3Vw%*b5GW(;yF&g6p6fJO=gPJ!l5hc#7%- zx&lcc1LT1s=m*pQ4`_oSKo^Vvqrn(34p;yiFcC}!)4(j?1pWjbzy~Y<{$L3R0V_cm z2nP`$5=4XbAP&TXL?8qk!6vX7YylZyE7%Tpf-JBb>;?P50dNo;21mhha1xvbXTUjd z0px)KPy~uWDYy*E!8LFlRDqk|HmC;oKn-{Z9)l;K4m=0-;3aqs-h%hwBWMDjK{NOU zzJoSEnNyS)_ydT8PM|aB0wh3J&<%74J%A+W38a8DkO8tl4)g-@pf~6P6o4X70?ME- z=m+`(6`%^#fI83s0{{<bf`LE_XagNE2n+^8z)&y@3<tV^5A=XO7y(9tQNRF<28O^0 z7=tljEHDA4U>q<5<AFJ_0G7ZCSOXg%0JdNPm<T2TJ75ncgDGGtZ~)W5bT9+V1hc?w z;0WdbC*TZRz+CVra0PC_9e4mw;03&a5AX%^z<jU(ECh>yAMgi@K>!E@OF$3^21`K* zSO%7Z6<{S;1y+Ml5N1w=0cs5-pu!;mwH6Xk5s-jd2MMT1NI*qF0xB93P%)5zS`P`R zSV%y{K>}(6B%tCU0hIs=s6<FWB|!p82nncUNI-3b1XKznpf*7QDisn?n;`*}1_`Jw zkbp{u1XKnjpfVu=wG|Rj+aLk89THGGAOW=#5>UG!0hI*_sJ|cqwHp#pdmsU|7ZOnW zAOW=>5>VNYfI0vPs2oT@9fSnbAxJ<Sh6L0RNI)Hh1k^D|Kplq!)CovHorDC`DM&z_ zh6GeDB%sbf0_rRzpw2-8>O3T%E<gh6A|#;lAOV#R38(@{Kovp)st6KLmmmRE3<;<b zNI;cB0;&uWP?sSAbp;Yo<&c273JIudkbtUy1k`m%KvhBlstOWNHy{CZ6B1CjAOUq7 z5>R&_0aXnNsJoDWx(5lU`;dUDfdte8NI*S=1k@u)Ks|;8R4pXHuQC+|sHc#Cs)Gd7 zGe|%^hXm9MNI=y?0;&NLP%j|?^$HSDuOR{T1`<$jAp!Lc5>W3U0rdeAP#+-y)d&fw zCP+Yif&|oONI-po1XMF5puR!^>Ki1WS|9=S9THHjkbr7~1XMdDz=TJQ17RQ!P-6HD z!ayFN{=jDt2J!$Uj?W+r<N>M^K7%lj2dK{Y48lMjpt|5Q2m^V5lE7yW2J!&a6`w&E z$OBY2d<J144^Z9l8H9m6K=r_95C-x9C5g`<4CDc-Cq9EPkOwF!d<J144^Yzh48lMj zpk(kFgn>Lj$>K8z19^aw!)Fi%@&MHfpFtSN1C%^IgD{W>sNVPt!a!bc3o)uQB?coi z|3`#2`9z8RLH}Evgv0z{W1HO1;y6l$Xv(UoQk`g3HA<l?r4TCm?>bXm>A(Nu59Weh zp-=vZi+R3bTKKb@NL3_4{`KSUProhj+XBBW@Y@2vE%4g{|0N6j_ODRC{VP~&i-S&} zGw1>&Kv&QWbO$|vB<KmGfHaT+vOo^>0`j0Y=mQjhB2WU#pfBhL`U4f93e<o)&;SDf z4`_mcKnrLC9WV$C21CG5FboU_x_}S#fIb)jMuJhm0E`BPzz7(FF<>k(0j6LaFazU( zIj{hhzzSFc8z2C-U;>y3CILHO4<>^tU@C9`)4+5v1Iz@oz--_M<^U()3|zom@F#Eu zZonOQ08iirynzq!1@pjsumCIsi+~^S2a7=f2n0(&5C{fKK?qm|mV*^wC0GSkgHRA= vPW|?;zz#cL2k_g!0y`AI4uz25w|@n8sDmAzL4x1@75EQ+`&a&_{+0g&teq5b literal 0 HcmV?d00001 diff --git a/autotest/gdrivers/hdf5.py b/autotest/gdrivers/hdf5.py index 983a77735901..2263156e6e81 100755 --- a/autotest/gdrivers/hdf5.py +++ b/autotest/gdrivers/hdf5.py @@ -1316,3 +1316,185 @@ def test_gdal_subdataset_bogus(bogus): """Test it doesn't crash""" gdal.GetSubdatasetInfo(bogus) + + +############################################################################### +# Test opening a chunked HDF5EOS swath file + + +def test_hdf5_eos_swath_chunking_optimization(): + + if False: + + import h5py + import numpy as np + + f = h5py.File("data/hdf5/dummy_HDFEOS_swath_chunked.h5", "w") + HDFEOS_INFORMATION = f.create_group("HDFEOS INFORMATION") + # Hint from https://forum.hdfgroup.org/t/nullpad-nullterm-strings/9107 + # to use the low-level API to be able to generate NULLTERM strings + # without padding bytes + HDFEOSVersion_type = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + HDFEOSVersion_type.set_size(32) + HDFEOSVersion_type.set_strpad(h5py.h5t.STR_NULLTERM) + # HDFEOS_INFORMATION.attrs.create("HDFEOSVersion", "HDFEOS_5.1.15", dtype=HDFEOSVersion_type) + HDFEOSVersion_attr = h5py.h5a.create( + HDFEOS_INFORMATION.id, + "HDFEOSVersion".encode("ASCII"), + HDFEOSVersion_type, + h5py.h5s.create(h5py.h5s.SCALAR), + ) + HDFEOSVersion_value = "HDFEOS_5.1.15".encode("ASCII") + HDFEOSVersion_value = np.frombuffer( + HDFEOSVersion_value, dtype="|S%d" % len(HDFEOSVersion_value) + ) + HDFEOSVersion_attr.write(HDFEOSVersion_value) + + StructMetadata_0_type = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + StructMetadata_0_type.set_size(32000) + StructMetadata_0_type.set_strpad(h5py.h5t.STR_NULLTERM) + StructMetadata_0 = """GROUP=SwathStructure + GROUP=SWATH_1 + SwathName="MySwath" + GROUP=Dimension + OBJECT=Dimension_1 + DimensionName="Band" + Size=20 + END_OBJECT=Dimension_1 + OBJECT=Dimension_2 + DimensionName="AlongTrack" + Size=30 + END_OBJECT=Dimension_2 + OBJECT=Dimension_3 + DimensionName="CrossTrack" + Size=40 + END_OBJECT=Dimension_3 + END_GROUP=Dimension + GROUP=DimensionMap + END_GROUP=DimensionMap + GROUP=IndexDimensionMap + END_GROUP=IndexDimensionMap + GROUP=GeoField + OBJECT=GeoField_1 + GeoFieldName="Latitude" + DataType=H5T_NATIVE_FLOAT + DimList=("AlongTrack","CrossTrack") + MaxdimList=("AlongTrack","CrossTrack") + END_OBJECT=GeoField_1 + OBJECT=GeoField_2 + GeoFieldName="Longitude" + DataType=H5T_NATIVE_FLOAT + DimList=("AlongTrack","CrossTrack") + MaxdimList=("AlongTrack","CrossTrack") + END_OBJECT=GeoField_2 + OBJECT=GeoField_3 + GeoFieldName="Time" + DataType=H5T_NATIVE_FLOAT + DimList=("AlongTrack") + MaxdimList=("AlongTrack") + END_OBJECT=GeoField_3 + END_GROUP=GeoField + GROUP=DataField + OBJECT=DataField_1 + DataFieldName="MyDataField" + DataType=H5T_NATIVE_FLOAT + DimList=("Band","AlongTrack","CrossTrack") + MaxdimList=("Band","AlongTrack","CrossTrack") + END_OBJECT=DataField_1 + END_GROUP=DataField + GROUP=ProfileField + END_GROUP=ProfileField + GROUP=MergedFields + END_GROUP=MergedFields + END_GROUP=SWATH_1 +END_GROUP=SwathStructure +GROUP=GridStructure +END_GROUP=GridStructure +END +""" + StructMetadata_0_dataset = h5py.h5d.create( + HDFEOS_INFORMATION.id, + "StructMetadata.0".encode("ASCII"), + StructMetadata_0_type, + h5py.h5s.create(h5py.h5s.SCALAR), + ) + StructMetadata_0_value = StructMetadata_0.encode("ASCII") + StructMetadata_0_value = np.frombuffer( + StructMetadata_0_value, dtype="|S%d" % len(StructMetadata_0_value) + ) + StructMetadata_0_dataset.write( + h5py.h5s.create(h5py.h5s.SCALAR), + h5py.h5s.create(h5py.h5s.SCALAR), + StructMetadata_0_value, + ) + + HDFEOS = f.create_group("HDFEOS") + ADDITIONAL = HDFEOS.create_group("ADDITIONAL") + ADDITIONAL.create_group("FILE_ATTRIBUTES") + SWATHS = HDFEOS.create_group("SWATHS") + MySwath = SWATHS.create_group("MySwath") + DataFields = MySwath.create_group("Data Fields") + ds = DataFields.create_dataset( + "MyDataField", (20, 30, 40), chunks=(3, 4, 6), dtype="f", compression="gzip" + ) + ds[...] = np.array([i for i in range(20 * 30 * 40)]).reshape(ds.shape) + GeoLocationFields = MySwath.create_group("Geolocation Fields") + ds = GeoLocationFields.create_dataset("Longitude", (20, 30), dtype="f") + ds[...] = np.array([i for i in range(20 * 30)]).reshape(ds.shape) + ds = GeoLocationFields.create_dataset("Latitude", (20, 30), dtype="f") + ds[...] = np.array([i for i in range(20 * 30)]).reshape(ds.shape) + f.close() + + ds = gdal.Open( + 'HDF5:"data/hdf5/dummy_HDFEOS_swath_chunked.h5"://HDFEOS/SWATHS/MySwath/Data_Fields/MyDataField' + ) + mem_ds = gdal.Translate("", ds, format="MEM") + + ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1) + assert ds.GetMetadataItem("WholeBandChunkOptim", "__DEBUG__") == "DISABLED" + + ds.GetRasterBand(1).ReadRaster(0, 0, ds.RasterXSize, 1) + assert ( + ds.GetMetadataItem("WholeBandChunkOptim", "__DEBUG__") + == "DETECTION_IN_PROGRESS" + ) + ds.GetRasterBand(1).ReadRaster(0, 2, ds.RasterXSize, 1) + assert ds.GetMetadataItem("WholeBandChunkOptim", "__DEBUG__") == "DISABLED" + + ds.GetRasterBand(1).ReadRaster(0, 0, ds.RasterXSize, 1) + assert ( + ds.GetMetadataItem("WholeBandChunkOptim", "__DEBUG__") + == "DETECTION_IN_PROGRESS" + ) + ds.GetRasterBand(2).ReadRaster(0, 0, ds.RasterXSize, 1) + assert ds.GetMetadataItem("WholeBandChunkOptim", "__DEBUG__") == "DISABLED" + + ds = gdal.Open( + 'HDF5:"data/hdf5/dummy_HDFEOS_swath_chunked.h5"://HDFEOS/SWATHS/MySwath/Data_Fields/MyDataField' + ) + assert ( + ds.GetMetadataItem("WholeBandChunkOptim", "__DEBUG__") + == "DETECTION_IN_PROGRESS" + ) + for i in range(ds.RasterCount): + assert ( + ds.GetRasterBand(i + 1).Checksum() == mem_ds.GetRasterBand(i + 1).Checksum() + ) + assert ds.GetMetadataItem("WholeBandChunkOptim", "__DEBUG__") == ( + "DETECTION_IN_PROGRESS" if i == 0 else "ENABLED" + ) + for i in range(ds.RasterCount): + assert ( + ds.GetRasterBand(i + 1).ReadRaster() + == mem_ds.GetRasterBand(i + 1).ReadRaster() + ) + assert ds.GetMetadataItem("WholeBandChunkOptim", "__DEBUG__") == "ENABLED" + assert ds.GetRasterBand(i + 1).ReadRaster(1, 2, 3, 4) == mem_ds.GetRasterBand( + i + 1 + ).ReadRaster(1, 2, 3, 4) + assert ds.GetMetadataItem("WholeBandChunkOptim", "__DEBUG__") == "ENABLED" + + blockxsize, blockysize = ds.GetRasterBand(1).GetBlockSize() + assert ds.GetRasterBand(1).ReadBlock(1, 2) == mem_ds.GetRasterBand(1).ReadRaster( + 1 * blockxsize, 2 * blockysize, blockxsize, blockysize + ) diff --git a/frmts/hdf5/hdf5imagedataset.cpp b/frmts/hdf5/hdf5imagedataset.cpp index 9c50f7c9c8c1..e192ab79c24d 100644 --- a/frmts/hdf5/hdf5imagedataset.cpp +++ b/frmts/hdf5/hdf5imagedataset.cpp @@ -83,6 +83,37 @@ class HDF5ImageDataset final : public HDF5Dataset int m_nYIndex = -1; int m_nOtherDimIndex = -1; + int m_nBlockXSize = 0; + int m_nBlockYSize = 0; + int m_nBandChunkSize = 1; //! Number of bands in a chunk + + enum WholeBandChunkOptim + { + WBC_DETECTION_IN_PROGRESS, + WBC_DISABLED, + WBC_ENABLED, + }; + + //! Flag to detect if the read pattern of HDF5ImageRasterBand::IRasterIO() + // is whole band after whole band. + WholeBandChunkOptim m_eWholeBandChunkOptim = WBC_DETECTION_IN_PROGRESS; + //! Value of nBand during last HDF5ImageRasterBand::IRasterIO() call + int m_nLastRasterIOBand = -1; + //! Value of nXOff during last HDF5ImageRasterBand::IRasterIO() call + int m_nLastRasterIOXOff = -1; + //! Value of nYOff during last HDF5ImageRasterBand::IRasterIO() call + int m_nLastRasterIOYOff = -1; + //! Value of nXSize during last HDF5ImageRasterBand::IRasterIO() call + int m_nLastRasterIOXSize = -1; + //! Value of nYSize during last HDF5ImageRasterBand::IRasterIO() call + int m_nLastRasterIOYSize = -1; + //! Value such that m_abyBandChunk represent band data in the range + // [m_iCurrentBandChunk * m_nBandChunkSize, (m_iCurrentBandChunk+1) * m_nBandChunkSize[ + int m_iCurrentBandChunk = -1; + //! Cached values (in native data type) for bands in the range + // [m_iCurrentBandChunk * m_nBandChunkSize, (m_iCurrentBandChunk+1) * m_nBandChunkSize[ + std::vector<GByte> m_abyBandChunk{}; + CPLErr CreateODIMH5Projection(); public: @@ -106,6 +137,9 @@ class HDF5ImageDataset final : public HDF5Dataset GSpacing nBandSpace, GDALRasterIOExtraArg *psExtraArg) override; + const char *GetMetadataItem(const char *pszName, + const char *pszDomain = "") override; + Hdf5ProductType GetSubdatasetType() const { return iSubdatasetType; @@ -220,8 +254,9 @@ class HDF5ImageRasterBand final : public GDALPamRasterBand { friend class HDF5ImageDataset; - bool bNoDataSet; - double dfNoDataValue; + bool bNoDataSet = false; + double dfNoDataValue = -9999.0; + int m_nIRasterIORecCounter = 0; public: HDF5ImageRasterBand(HDF5ImageDataset *, int, GDALDataType); @@ -251,32 +286,12 @@ HDF5ImageRasterBand::~HDF5ImageRasterBand() /************************************************************************/ HDF5ImageRasterBand::HDF5ImageRasterBand(HDF5ImageDataset *poDSIn, int nBandIn, GDALDataType eType) - : bNoDataSet(false), dfNoDataValue(-9999.0) { poDS = poDSIn; nBand = nBandIn; eDataType = eType; - nBlockXSize = poDS->GetRasterXSize(); - nBlockYSize = 1; - - // Check for chunksize and set it as the blocksize (optimizes read). - const hid_t listid = H5Dget_create_plist(poDSIn->dataset_id); - if (listid > 0) - { - if (H5Pget_layout(listid) == H5D_CHUNKED) - { - hsize_t panChunkDims[3] = {0, 0, 0}; - const int nDimSize = H5Pget_chunk(listid, 3, panChunkDims); - CPL_IGNORE_RET_VAL(nDimSize); - CPLAssert(nDimSize == poDSIn->ndims); - nBlockXSize = static_cast<int>(panChunkDims[poDSIn->GetXIndex()]); - if (poDSIn->GetYIndex() >= 0) - nBlockYSize = - static_cast<int>(panChunkDims[poDSIn->GetYIndex()]); - } - - H5Pclose(listid); - } + nBlockXSize = poDSIn->m_nBlockXSize; + nBlockYSize = poDSIn->m_nBlockYSize; // netCDF convention for nodata bNoDataSet = @@ -308,8 +323,6 @@ double HDF5ImageRasterBand::GetNoDataValue(int *pbSuccess) CPLErr HDF5ImageRasterBand::IReadBlock(int nBlockXOff, int nBlockYOff, void *pImage) { - HDF5_GLOBAL_LOCK(); - HDF5ImageDataset *poGDS = static_cast<HDF5ImageDataset *>(poDS); memset(pImage, 0, @@ -321,6 +334,29 @@ CPLErr HDF5ImageRasterBand::IReadBlock(int nBlockXOff, int nBlockYOff, return CE_None; } + const int nXOff = nBlockXOff * nBlockXSize; + const int nYOff = nBlockYOff * nBlockYSize; + const int nXSize = std::min(nBlockXSize, nRasterXSize - nXOff); + const int nYSize = std::min(nBlockYSize, nRasterYSize - nYOff); + if (poGDS->m_eWholeBandChunkOptim == HDF5ImageDataset::WBC_ENABLED) + { + const bool bIsBandInterleavedData = + poGDS->ndims == 3 && poGDS->m_nOtherDimIndex == 0 && + poGDS->GetYIndex() == 1 && poGDS->GetXIndex() == 2; + if (poGDS->nBands == 1 || bIsBandInterleavedData) + { + GDALRasterIOExtraArg sExtraArg; + INIT_RASTERIO_EXTRA_ARG(sExtraArg); + const int nDTSize = GDALGetDataTypeSizeBytes(eDataType); + return IRasterIO(GF_Read, nXOff, nYOff, nXSize, nYSize, pImage, + nXSize, nYSize, eDataType, nDTSize, + static_cast<GSpacing>(nDTSize) * nBlockXSize, + &sExtraArg); + } + } + + HDF5_GLOBAL_LOCK(); + hsize_t count[3] = {0, 0, 0}; H5OFFSET_TYPE offset[3] = {0, 0, 0}; hsize_t col_dims[3] = {0, 0, 0}; @@ -335,22 +371,14 @@ CPLErr HDF5ImageRasterBand::IReadBlock(int nBlockXOff, int nBlockYOff, } const int nYIndex = poGDS->GetYIndex(); - if (nYIndex >= 0) - offset[nYIndex] = nBlockYOff * static_cast<hsize_t>(nBlockYSize); - offset[poGDS->GetXIndex()] = nBlockXOff * static_cast<hsize_t>(nBlockXSize); - if (nYIndex >= 0) - count[nYIndex] = nBlockYSize; - count[poGDS->GetXIndex()] = nBlockXSize; - // Blocksize may not be a multiple of imagesize. if (nYIndex >= 0) { - count[nYIndex] = std::min(hsize_t(nBlockYSize), - poDS->GetRasterYSize() - offset[nYIndex]); + offset[nYIndex] = nYOff; + count[nYIndex] = nYSize; } - count[poGDS->GetXIndex()] = - std::min(hsize_t(nBlockXSize), - poDS->GetRasterXSize() - offset[poGDS->GetXIndex()]); + offset[poGDS->GetXIndex()] = nXOff; + count[poGDS->GetXIndex()] = nXSize; // Select block from file space. herr_t status = H5Sselect_hyperslab(poGDS->dataspace_id, H5S_SELECT_SET, @@ -402,14 +430,187 @@ CPLErr HDF5ImageRasterBand::IRasterIO(GDALRWFlag eRWFlag, int nXOff, int nYOff, { HDF5ImageDataset *poGDS = static_cast<HDF5ImageDataset *>(poDS); - const bool bIsExpectedLayout = - ((poGDS->ndims == 3 && poGDS->m_nOtherDimIndex == 0 && - poGDS->GetYIndex() == 1 && poGDS->GetXIndex() == 2) || - (poGDS->ndims == 2 && poGDS->GetYIndex() == 0 && - poGDS->GetXIndex() == 1)); + const bool bIsBandInterleavedData = + poGDS->ndims == 3 && poGDS->m_nOtherDimIndex == 0 && + poGDS->GetYIndex() == 1 && poGDS->GetXIndex() == 2; const int nDTSize = GDALGetDataTypeSizeBytes(eDataType); + // Try to detect if we read whole bands by chunks of whole lines + // If so, then read and cache whole band (or group of m_nBandChunkSize bands) + // to save HDF5 decompression. + if (m_nIRasterIORecCounter == 0) + { + bool bInvalidateWholeBandChunkOptim = false; + if (!(nXSize == nBufXSize && nYSize == nBufYSize)) + { + bInvalidateWholeBandChunkOptim = true; + } + // Is the first request on band 1, line 0 and one or several full lines? + else if (poGDS->m_eWholeBandChunkOptim != + HDF5ImageDataset::WBC_ENABLED && + nBand == 1 && nXOff == 0 && nYOff == 0 && + nXSize == nRasterXSize) + { + poGDS->m_eWholeBandChunkOptim = + HDF5ImageDataset::WBC_DETECTION_IN_PROGRESS; + poGDS->m_nLastRasterIOBand = 1; + poGDS->m_nLastRasterIOXOff = nXOff; + poGDS->m_nLastRasterIOYOff = nYOff; + poGDS->m_nLastRasterIOXSize = nXSize; + poGDS->m_nLastRasterIOYSize = nYSize; + } + else if (poGDS->m_eWholeBandChunkOptim == + HDF5ImageDataset::WBC_DETECTION_IN_PROGRESS) + { + if (poGDS->m_nLastRasterIOBand == 1 && nBand == 1) + { + // Is this request a continuation of the previous one? + if (nXOff == 0 && poGDS->m_nLastRasterIOXOff == 0 && + nYOff == poGDS->m_nLastRasterIOYOff + + poGDS->m_nLastRasterIOYSize && + poGDS->m_nLastRasterIOXSize == nRasterXSize && + nXSize == nRasterXSize) + { + poGDS->m_nLastRasterIOXOff = nXOff; + poGDS->m_nLastRasterIOYOff = nYOff; + poGDS->m_nLastRasterIOXSize = nXSize; + poGDS->m_nLastRasterIOYSize = nYSize; + } + else + { + bInvalidateWholeBandChunkOptim = true; + } + } + else if (poGDS->m_nLastRasterIOBand == 1 && nBand == 2) + { + // Are we switching to band 2 while having fully read band 1? + if (nXOff == 0 && nYOff == 0 && nXSize == nRasterXSize && + poGDS->m_nLastRasterIOXOff == 0 && + poGDS->m_nLastRasterIOXSize == nRasterXSize && + poGDS->m_nLastRasterIOYOff + poGDS->m_nLastRasterIOYSize == + nRasterYSize) + { + if ((poGDS->m_nBandChunkSize > 1 || + nBufYSize < nRasterYSize) && + static_cast<int64_t>(poGDS->m_nBandChunkSize) * + nRasterXSize * nRasterYSize * nDTSize < + CPLGetUsablePhysicalRAM() / 10) + { + poGDS->m_eWholeBandChunkOptim = + HDF5ImageDataset::WBC_ENABLED; + } + else + { + bInvalidateWholeBandChunkOptim = true; + } + } + else + { + bInvalidateWholeBandChunkOptim = true; + } + } + else + { + bInvalidateWholeBandChunkOptim = true; + } + } + if (bInvalidateWholeBandChunkOptim) + { + poGDS->m_eWholeBandChunkOptim = HDF5ImageDataset::WBC_DISABLED; + poGDS->m_nLastRasterIOBand = -1; + poGDS->m_nLastRasterIOXOff = -1; + poGDS->m_nLastRasterIOYOff = -1; + poGDS->m_nLastRasterIOXSize = -1; + poGDS->m_nLastRasterIOYSize = -1; + } + } + + if (poGDS->m_eWholeBandChunkOptim == HDF5ImageDataset::WBC_ENABLED && + nXSize == nBufXSize && nYSize == nBufYSize) + { + if (poGDS->nBands == 1 || bIsBandInterleavedData) + { + if (poGDS->m_iCurrentBandChunk < 0) + CPLDebug("HDF5", "Using whole band chunk caching"); + const int iBandChunk = (nBand - 1) / poGDS->m_nBandChunkSize; + if (iBandChunk != poGDS->m_iCurrentBandChunk) + { + poGDS->m_abyBandChunk.resize( + static_cast<size_t>(poGDS->m_nBandChunkSize) * + nRasterXSize * nRasterYSize * nDTSize); + + HDF5_GLOBAL_LOCK(); + + hsize_t count[3] = { + static_cast<hsize_t>( + std::min(poGDS->nBands, + (iBandChunk + 1) * poGDS->m_nBandChunkSize) - + iBandChunk * poGDS->m_nBandChunkSize), + static_cast<hsize_t>(nRasterYSize), + static_cast<hsize_t>(nRasterXSize)}; + H5OFFSET_TYPE offset[3] = { + static_cast<H5OFFSET_TYPE>(iBandChunk * + poGDS->m_nBandChunkSize), + static_cast<H5OFFSET_TYPE>(0), + static_cast<H5OFFSET_TYPE>(0)}; + herr_t status = + H5Sselect_hyperslab(poGDS->dataspace_id, H5S_SELECT_SET, + offset, nullptr, count, nullptr); + if (status < 0) + return CE_Failure; + + const hid_t memspace = + H5Screate_simple(poGDS->ndims, count, nullptr); + H5OFFSET_TYPE mem_offset[3] = {0, 0, 0}; + status = + H5Sselect_hyperslab(memspace, H5S_SELECT_SET, mem_offset, + nullptr, count, nullptr); + if (status < 0) + { + H5Sclose(memspace); + return CE_Failure; + } + + status = H5Dread(poGDS->dataset_id, poGDS->native, memspace, + poGDS->dataspace_id, H5P_DEFAULT, + poGDS->m_abyBandChunk.data()); + + H5Sclose(memspace); + + if (status < 0) + { + CPLError( + CE_Failure, CPLE_AppDefined, + "HDF5ImageRasterBand::IRasterIO(): H5Dread() failed"); + return CE_Failure; + } + + poGDS->m_iCurrentBandChunk = iBandChunk; + } + + for (int iY = 0; iY < nYSize; ++iY) + { + GDALCopyWords(poGDS->m_abyBandChunk.data() + + static_cast<size_t>((nBand - 1) % + poGDS->m_nBandChunkSize) * + nRasterYSize * nRasterXSize * nDTSize + + static_cast<size_t>(nYOff + iY) * + nRasterXSize * nDTSize + + nXOff * nDTSize, + eDataType, nDTSize, + static_cast<GByte *>(pData) + + static_cast<size_t>(iY) * nLineSpace, + eBufType, static_cast<int>(nPixelSpace), nXSize); + } + return CE_None; + } + } + + const bool bIsExpectedLayout = + (bIsBandInterleavedData || + (poGDS->ndims == 2 && poGDS->GetYIndex() == 0 && + poGDS->GetXIndex() == 1)); if (eRWFlag == GF_Read && bIsExpectedLayout && nXSize == nBufXSize && nYSize == nBufYSize && eBufType == eDataType && nPixelSpace == nDTSize && nLineSpace == nXSize * nPixelSpace) @@ -474,10 +675,13 @@ CPLErr HDF5ImageRasterBand::IRasterIO(GDALRWFlag eRWFlag, int nXOff, int nYOff, CPLAssert(pMemData); // Read from HDF5 into the temporary MEMDataset using the // natural interleaving of the HDF5 dataset - if (IRasterIO(eRWFlag, nXOff, nYOff, nXSize, nYSize, pMemData, + ++m_nIRasterIORecCounter; + CPLErr eErr = + IRasterIO(eRWFlag, nXOff, nYOff, nXSize, nYSize, pMemData, nXSize, nYSize, eDataType, nDTSize, - static_cast<GSpacing>(nXSize) * nDTSize, - psExtraArg) != CE_None) + static_cast<GSpacing>(nXSize) * nDTSize, psExtraArg); + --m_nIRasterIORecCounter; + if (eErr != CE_None) { return CE_Failure; } @@ -930,6 +1134,58 @@ GDALDataset *HDF5ImageDataset::Open(GDALOpenInfo *poOpenInfo) } } + poDS->m_nBlockXSize = poDS->GetRasterXSize(); + poDS->m_nBlockYSize = 1; + poDS->m_nBandChunkSize = 1; + + // Check for chunksize and set it as the blocksize (optimizes read). + const hid_t listid = H5Dget_create_plist(poDS->dataset_id); + if (listid > 0) + { + if (H5Pget_layout(listid) == H5D_CHUNKED) + { + hsize_t panChunkDims[3] = {0, 0, 0}; + const int nDimSize = H5Pget_chunk(listid, 3, panChunkDims); + CPL_IGNORE_RET_VAL(nDimSize); + CPLAssert(nDimSize == poDS->ndims); + poDS->m_nBlockXSize = + static_cast<int>(panChunkDims[poDS->GetXIndex()]); + if (poDS->GetYIndex() >= 0) + poDS->m_nBlockYSize = + static_cast<int>(panChunkDims[poDS->GetYIndex()]); + if (nBands > 1) + { + poDS->m_nBandChunkSize = + static_cast<int>(panChunkDims[poDS->m_nOtherDimIndex]); + + poDS->SetMetadataItem("BAND_CHUNK_SIZE", + CPLSPrintf("%d", poDS->m_nBandChunkSize), + "IMAGE_STRUCTURE"); + } + } + + const int nFilters = H5Pget_nfilters(listid); + for (int i = 0; i < nFilters; ++i) + { + unsigned int flags = 0; + size_t cd_nelmts = 0; + char szName[64 + 1] = {0}; + const auto eFilter = H5Pget_filter(listid, i, &flags, &cd_nelmts, + nullptr, 64, szName); + if (eFilter == H5Z_FILTER_DEFLATE) + { + poDS->SetMetadataItem("COMPRESSION", "DEFLATE", + "IMAGE_STRUCTURE"); + } + else if (eFilter == H5Z_FILTER_SZIP) + { + poDS->SetMetadataItem("COMPRESSION", "SZIP", "IMAGE_STRUCTURE"); + } + } + + H5Pclose(listid); + } + for (int i = 0; i < nBands; i++) { HDF5ImageRasterBand *const poBand = new HDF5ImageRasterBand( @@ -1263,6 +1519,29 @@ CPLErr HDF5ImageDataset::CreateProjections() return CE_None; } +/************************************************************************/ +/* GetMetadataItem() */ +/************************************************************************/ + +const char *HDF5ImageDataset::GetMetadataItem(const char *pszName, + const char *pszDomain) +{ + if (pszDomain && EQUAL(pszDomain, "__DEBUG__") && + EQUAL(pszName, "WholeBandChunkOptim")) + { + switch (m_eWholeBandChunkOptim) + { + case WBC_DETECTION_IN_PROGRESS: + return "DETECTION_IN_PROGRESS"; + case WBC_DISABLED: + return "DISABLED"; + case WBC_ENABLED: + return "ENABLED"; + } + } + return GDALPamDataset::GetMetadataItem(pszName, pszDomain); +} + /************************************************************************/ /* GetSpatialRef() */ /************************************************************************/ From bbb989d7162a1c40d6fa70dca185efeaf59e3709 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 02:25:39 +0200 Subject: [PATCH 097/230] gdal_contour: fix lowest min value in polygonize mode Fixes #9705 --- alg/contour.cpp | 60 +++++++++++++++++++++++------- autotest/alg/contour.py | 81 +++++++++++++++++++++-------------------- 2 files changed, 89 insertions(+), 52 deletions(-) diff --git a/alg/contour.cpp b/alg/contour.cpp index 5424dcbed72d..34f29761b4f3 100644 --- a/alg/contour.cpp +++ b/alg/contour.cpp @@ -118,7 +118,7 @@ struct PolygonContourWriter CPL_DISALLOW_COPY_ASSIGN(PolygonContourWriter) explicit PolygonContourWriter(OGRContourWriterInfo *poInfo, double minLevel) - : poInfo_(poInfo), previousLevel_(minLevel) + : poInfo_(poInfo), currentLevel_(minLevel) { } @@ -175,8 +175,8 @@ struct PolygonContourWriter std::unique_ptr<OGRMultiPolygon> currentGeometry_ = {}; OGRPolygon *currentPart_ = nullptr; OGRContourWriterInfo *poInfo_ = nullptr; - double currentLevel_ = 0; - double previousLevel_; + double currentLevel_; + double previousLevel_ = 0; }; struct GDALRingAppender @@ -582,13 +582,19 @@ CPLErr GDALContourGenerateEx(GDALRasterBandH hBand, void *hLayer, opt = CSLFetchNameValue(options, "FIXED_LEVELS"); if (opt) { - char **values = CSLTokenizeStringComplex(opt, ",", FALSE, FALSE); - fixedLevels.resize(CSLCount(values)); + const CPLStringList aosLevels( + CSLTokenizeStringComplex(opt, ",", FALSE, FALSE)); + fixedLevels.resize(aosLevels.size()); for (size_t i = 0; i < fixedLevels.size(); i++) { - fixedLevels[i] = CPLAtof(values[i]); + fixedLevels[i] = CPLAtof(aosLevels[i]); + if (i > 0 && !(fixedLevels[i] >= fixedLevels[i - 1])) + { + CPLError(CE_Failure, CPLE_AppDefined, + "FIXED_LEVELS should be strictly increasing"); + return CE_Failure; + } } - CSLDestroy(values); } bool useNoData = false; @@ -661,16 +667,44 @@ CPLErr GDALContourGenerateEx(GDALRasterBandH hBand, void *hLayer, { if (polygonize) { - int bSuccess; - PolygonContourWriter w(&oCWI, - GDALGetRasterMinimum(hBand, &bSuccess)); + int bSuccessMin = FALSE; + double dfMinimum = GDALGetRasterMinimum(hBand, &bSuccessMin); + int bSuccessMax = FALSE; + double dfMaximum = GDALGetRasterMaximum(hBand, &bSuccessMax); + if ((!bSuccessMin || !bSuccessMax) && !fixedLevels.empty()) + { + double adfMinMax[2]; + if (GDALComputeRasterMinMax(hBand, false, adfMinMax) == CE_None) + { + dfMinimum = adfMinMax[0]; + dfMaximum = adfMinMax[1]; + } + } + if (!fixedLevels.empty()) + { + // If the minimum raster value is larger than the first requested + // level, select the requested level that is just below the + // minimum raster value + if (fixedLevels[0] < dfMinimum) + { + for (size_t i = 1; i < fixedLevels.size(); ++i) + { + if (fixedLevels[i] >= dfMinimum) + { + dfMinimum = fixedLevels[i - 1]; + break; + } + } + } + } + + PolygonContourWriter w(&oCWI, dfMinimum); typedef PolygonRingAppender<PolygonContourWriter> RingAppender; RingAppender appender(w); if (!fixedLevels.empty()) { - FixedLevelRangeIterator levels( - &fixedLevels[0], fixedLevels.size(), - GDALGetRasterMaximum(hBand, &bSuccess)); + FixedLevelRangeIterator levels(&fixedLevels[0], + fixedLevels.size(), dfMaximum); SegmentMerger<RingAppender, FixedLevelRangeIterator> writer( appender, levels, /* polygonize */ true); ContourGeneratorFromRaster<decltype(writer), diff --git a/autotest/alg/contour.py b/autotest/alg/contour.py index e63dc3e9d16b..b657256b764c 100755 --- a/autotest/alg/contour.py +++ b/autotest/alg/contour.py @@ -52,6 +52,8 @@ def input_tif(tmp_path): ds.SetProjection(wkt) ds.SetGeoTransform([1, precision, 0, 50, 0, -precision]) + ds.GetRasterBand(1).Fill(1) + raw_data = struct.pack("h", 10) * int(size / 2) for i in range(int(size / 2)): ds.WriteRaster( @@ -172,7 +174,7 @@ def test_contour_2(input_tif, tmp_path): 49.75 - 0.125 - 0.0625, ], ] - expected_height = [10, 20, 25, 10000] + expected_height = [10, 20, 25] lyr = ogr_ds.ExecuteSQL("select * from contour order by elev asc") @@ -230,7 +232,20 @@ def test_contour_real_world_case(): # Test with -p option (polygonize) -def test_contour_3(input_tif, tmp_path): +@pytest.mark.parametrize( + "fixed_levels, expected_min, expected_max", + [ + ("-10,0,10,20,25,30,40", [0, 10, 20, 25], [10, 20, 25, 30]), + ("-10,0,10,20,25,30", [0, 10, 20, 25], [10, 20, 25, 30]), + ("0,10,20,25,30", [0, 10, 20, 25], [10, 20, 25, 30]), + ("1,10,20,25,30", [1, 10, 20, 25], [10, 20, 25, 30]), + ("10,20,25,30", [1, 10, 20, 25], [10, 20, 25, 30]), + ("10,20,24", [1, 10, 20, 24], [10, 20, 24, 25]), + ("10,20,25", [1, 10, 20], [10, 20, 25]), + ("0,10,20", [0, 10, 20], [10, 20, 25]), + ], +) +def test_contour_3(input_tif, tmp_path, fixed_levels, expected_min, expected_max): output_shp = str(tmp_path / "contour.shp") @@ -244,12 +259,11 @@ def test_contour_3(input_tif, tmp_path): ogr_lyr.CreateField(field_defn) ds = gdal.Open(input_tif) - # gdal.ContourGenerateEx(ds.GetRasterBand(1), 0, 0, 0, [10, 20, 25], 0, 0, ogr_lyr, 0, 1, 1) gdal.ContourGenerateEx( ds.GetRasterBand(1), ogr_lyr, options=[ - "FIXED_LEVELS=10,20,25", + "FIXED_LEVELS=" + fixed_levels, "ID_FIELD=0", "ELEV_FIELD_MIN=1", "ELEV_FIELD_MAX=2", @@ -269,41 +283,30 @@ def test_contour_3(input_tif, tmp_path): 49.75 - 0.125 - 0.0625, ], ] - expected_height = [10, 20, 25, 10000] - - lyr = ogr_ds.ExecuteSQL("select * from contour order by elevMin asc") - - assert lyr.GetFeatureCount() == len(expected_envelopes) - - i = 0 - feat = lyr.GetNextFeature() - while feat is not None: - if i < 3 and feat.GetField("elevMax") != expected_height[i]: - pytest.fail( - "Got %f as z. Expected %f" - % (feat.GetField("elevMax"), expected_height[i]) - ) - elif i > 0 and i < 3 and feat.GetField("elevMin") != expected_height[i - 1]: - pytest.fail( - "Got %f as z. Expected %f" - % (feat.GetField("elevMin"), expected_height[i - 1]) - ) - - envelope = feat.GetGeometryRef().GetEnvelope() - for j in range(4): - if expected_envelopes[i][j] != pytest.approx( - envelope[j], abs=precision / 2 * 1.001 - ): - print("i=%d, wkt=%s" % (i, feat.GetGeometryRef().ExportToWkt())) - print(feat.GetGeometryRef().GetEnvelope()) - pytest.fail( - "%f, %f" % (expected_envelopes[i][j] - envelope[j], precision / 2) - ) - i = i + 1 - feat = lyr.GetNextFeature() - - ogr_ds.ReleaseResultSet(lyr) - ogr_ds.Destroy() + if len(expected_min) < len(expected_envelopes): + expected_envelopes = expected_envelopes[0 : len(expected_min)] + + with ogr_ds.ExecuteSQL("select * from contour order by elevMin asc") as lyr: + + assert lyr.GetFeatureCount() == len(expected_envelopes) + + i = 0 + for feat in lyr: + assert feat.GetField("elevMin") == expected_min[i], i + assert feat.GetField("elevMax") == expected_max[i], i + + envelope = feat.GetGeometryRef().GetEnvelope() + for j in range(4): + if expected_envelopes[i][j] != pytest.approx( + envelope[j], abs=precision / 2 * 1.001 + ): + print("i=%d, wkt=%s" % (i, feat.GetGeometryRef().ExportToWkt())) + print(feat.GetGeometryRef().GetEnvelope()) + pytest.fail( + "%f, %f" + % (expected_envelopes[i][j] - envelope[j], precision / 2) + ) + i = i + 1 # Check behaviour when the nodata value as a double isn't exactly the Float32 pixel value From 337a557af853ed3a147231bf2a543ceed1276026 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 03:36:38 +0200 Subject: [PATCH 098/230] GDALInfoAppOptionsGetParser(): avoid potential nullptr dereference (master only) --- apps/gdalinfo_lib.cpp | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/apps/gdalinfo_lib.cpp b/apps/gdalinfo_lib.cpp index 6e41027fab46..b67eb9e1d29c 100644 --- a/apps/gdalinfo_lib.cpp +++ b/apps/gdalinfo_lib.cpp @@ -301,11 +301,15 @@ GDALInfoAppOptionsGetParser(GDALInfoOptions *psOptions, .store_into(psOptions->osWKTFormat) .help(_("WKT format used for SRS.")); - argParser->add_argument("-sd") - .metavar("<n>") - .store_into(psOptionsForBinary->nSubdataset) - .help(_("Use subdataset of specified index (starting at 1), instead of " + if (psOptionsForBinary) + { + argParser->add_argument("-sd") + .metavar("<n>") + .store_into(psOptionsForBinary->nSubdataset) + .help(_( + "Use subdataset of specified index (starting at 1), instead of " "the source dataset itself.")); + } argParser->add_argument("-oo") .metavar("<NAME>=<VALUE>") From 86e7c03549feda33a5892e6290ba54e90c6ab4c0 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 03:51:43 +0200 Subject: [PATCH 099/230] Addresses minor cppcheck warnings --- alg/gdaltransformer.cpp | 4 ++-- alg/gdalwarper.cpp | 3 +-- frmts/georaster/georaster_rasterband.cpp | 14 +++++------ frmts/hdf5/hdf5multidim.cpp | 2 -- frmts/netcdf/netcdfdataset.cpp | 24 ------------------- ogr/ogrsf_frmts/gpx/ogrgpxdatasource.cpp | 6 ++--- ogr/ogrsf_frmts/sosi/ogr_sosi.h | 4 ++-- .../sqlite/ogrsqlitesqlfunctionscommon.cpp | 1 + port/cpl_vsil_adls.cpp | 1 - port/cpl_vsil_az.cpp | 1 - 10 files changed, 15 insertions(+), 45 deletions(-) diff --git a/alg/gdaltransformer.cpp b/alg/gdaltransformer.cpp index e8e826eb6e8e..4c619ac70d40 100644 --- a/alg/gdaltransformer.cpp +++ b/alg/gdaltransformer.cpp @@ -402,8 +402,8 @@ CPLErr CPL_STDCALL GDALSuggestedWarpOutput2(GDALDatasetH hSrcDS, if (GDALGetGeoTransform(hSrcDS, adfGeoTransform) == CE_None && adfGeoTransform[2] == 0.0 && adfGeoTransform[4] == 0.0) { - GDALGenImgProjTransformInfo *psInfo{ - static_cast<GDALGenImgProjTransformInfo *>(pTransformArg)}; + const GDALGenImgProjTransformInfo *psInfo = + static_cast<const GDALGenImgProjTransformInfo *>(pTransformArg); if (psInfo && !psInfo->pSrcTransformer && !psInfo->bHasCustomTransformationPipeline && diff --git a/alg/gdalwarper.cpp b/alg/gdalwarper.cpp index e278979462f8..cfd5e9d747c3 100644 --- a/alg/gdalwarper.cpp +++ b/alg/gdalwarper.cpp @@ -1314,8 +1314,7 @@ void CPL_STDCALL GDALDestroyWarpOptions(GDALWarpOptions *psOptions) CPLFree(psOptions->papSrcPerBandValidityMaskFuncArg); if (psOptions->hCutline != nullptr) - delete OGRGeometry::FromHandle( - static_cast<OGRGeometryH>(psOptions->hCutline)); + delete static_cast<OGRGeometry *>(psOptions->hCutline); CPLFree(psOptions); } diff --git a/frmts/georaster/georaster_rasterband.cpp b/frmts/georaster/georaster_rasterband.cpp index 689720961fdd..907760453be9 100644 --- a/frmts/georaster/georaster_rasterband.cpp +++ b/frmts/georaster/georaster_rasterband.cpp @@ -405,13 +405,13 @@ CPLErr GeoRasterRasterBand::GetStatistics(int bApproxOK, int bForce, (void)bForce; (void)bApproxOK; - char szMin[MAX_DOUBLE_STR_REP + 1]; - char szMax[MAX_DOUBLE_STR_REP + 1]; - char szMean[MAX_DOUBLE_STR_REP + 1]; - char szMedian[MAX_DOUBLE_STR_REP + 1]; - char szMode[MAX_DOUBLE_STR_REP + 1]; - char szStdDev[MAX_DOUBLE_STR_REP + 1]; - char szSampling[MAX_DOUBLE_STR_REP + 1]; + char szMin[MAX_DOUBLE_STR_REP + 1] = {0}; + char szMax[MAX_DOUBLE_STR_REP + 1] = {0}; + char szMean[MAX_DOUBLE_STR_REP + 1] = {0}; + char szMedian[MAX_DOUBLE_STR_REP + 1] = {0}; + char szMode[MAX_DOUBLE_STR_REP + 1] = {0}; + char szStdDev[MAX_DOUBLE_STR_REP + 1] = {0}; + char szSampling[MAX_DOUBLE_STR_REP + 1] = {0}; if (!bValidStats) { diff --git a/frmts/hdf5/hdf5multidim.cpp b/frmts/hdf5/hdf5multidim.cpp index 1bca9766cec8..fa27ff79a051 100644 --- a/frmts/hdf5/hdf5multidim.cpp +++ b/frmts/hdf5/hdf5multidim.cpp @@ -1651,8 +1651,6 @@ HDF5Array::GetCoordinateVariables() const "Geolocation Fields")); if (poLongitude && poLatitude) { - std::vector<std::shared_ptr<GDALMDArray>> - m_apoCoordinates{}; ret.push_back(poLongitude); ret.push_back(poLatitude); } diff --git a/frmts/netcdf/netcdfdataset.cpp b/frmts/netcdf/netcdfdataset.cpp index 3d9d58839f3c..69e030de649c 100644 --- a/frmts/netcdf/netcdfdataset.cpp +++ b/frmts/netcdf/netcdfdataset.cpp @@ -3260,8 +3260,6 @@ void netCDFDataset::SetProjectionFromVar( // These values from CF metadata. OGRSpatialReference oSRS; oSRS.SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); - char szDimNameX[NC_MAX_NAME + 1]; - // char szDimNameY[NC_MAX_NAME + 1]; size_t xdim = nRasterXSize; size_t ydim = nRasterYSize; @@ -3415,28 +3413,6 @@ void netCDFDataset::SetProjectionFromVar( static_cast<int>(bIsGdalFile), static_cast<int>(bIsGdalCfFile), static_cast<int>(bSwitchedXY), static_cast<int>(bBottomUp)); - // Look for dimension: lon. - - memset(szDimNameX, '\0', sizeof(szDimNameX)); - // memset(szDimNameY, '\0', sizeof(szDimNameY)); - - if (!bReadSRSOnly) - { - for (unsigned int i = 0; - i < strlen(poDS->papszDimName[poDS->nXDimID]) && i < 3; i++) - { - szDimNameX[i] = (char)CPLTolower(static_cast<unsigned char>( - (poDS->papszDimName[poDS->nXDimID])[i])); - } - szDimNameX[3] = '\0'; - // for( unsigned int i = 0; - // (i < strlen(poDS->papszDimName[poDS->nYDimID]) - // && i < 3 ); i++ ) { - // szDimNameY[i]=(char)CPLTolower(static_cast<unsigned char>((poDS->papszDimName[poDS->nYDimID])[i])); - // } - // szDimNameY[3] = '\0'; - } - // Read projection coordinates. int nGroupDimXID = -1; diff --git a/ogr/ogrsf_frmts/gpx/ogrgpxdatasource.cpp b/ogr/ogrsf_frmts/gpx/ogrgpxdatasource.cpp index d4987e17a565..7c884948a4c3 100644 --- a/ogr/ogrsf_frmts/gpx/ogrgpxdatasource.cpp +++ b/ogr/ogrsf_frmts/gpx/ogrgpxdatasource.cpp @@ -293,10 +293,8 @@ void OGRGPXDataSource::startElementValidateCbk(const char *pszNameIn, } if (!osId.empty() && !osDomain.empty()) { - SetMetadataItem("AUTHOR_EMAIL", std::string(std::move(osId)) - .append("@") - .append(osDomain) - .c_str()); + SetMetadataItem("AUTHOR_EMAIL", + osId.append("@").append(osDomain).c_str()); } } else if (strcmp(pszNameIn, "link") == 0) diff --git a/ogr/ogrsf_frmts/sosi/ogr_sosi.h b/ogr/ogrsf_frmts/sosi/ogr_sosi.h index 38cb5b106399..4193cda3a2a0 100644 --- a/ogr/ogrsf_frmts/sosi/ogr_sosi.h +++ b/ogr/ogrsf_frmts/sosi/ogr_sosi.h @@ -178,8 +178,8 @@ class OGRSOSISimpleDataType class OGRSOSIDataType { - OGRSOSISimpleDataType *poElements; - int nElementCount; + OGRSOSISimpleDataType *poElements = nullptr; + int nElementCount = 0; OGRSOSIDataType &operator=(const OGRSOSIDataType &) = delete; diff --git a/ogr/ogrsf_frmts/sqlite/ogrsqlitesqlfunctionscommon.cpp b/ogr/ogrsf_frmts/sqlite/ogrsqlitesqlfunctionscommon.cpp index d6762317bc30..e4b33c697b92 100644 --- a/ogr/ogrsf_frmts/sqlite/ogrsqlitesqlfunctionscommon.cpp +++ b/ogr/ogrsf_frmts/sqlite/ogrsqlitesqlfunctionscommon.cpp @@ -48,6 +48,7 @@ class OGRSQLiteExtensionData #ifdef DEBUG void *pDummy = nullptr; /* to track memory leaks */ #endif + std::map<std::pair<int, int>, std::unique_ptr<OGRCoordinateTransformation>> oCachedTransformsMap{}; std::map<std::string, std::unique_ptr<GDALDataset>> oCachedDS{}; diff --git a/port/cpl_vsil_adls.cpp b/port/cpl_vsil_adls.cpp index e9d08a9204cd..07f32fdc6e18 100644 --- a/port/cpl_vsil_adls.cpp +++ b/port/cpl_vsil_adls.cpp @@ -106,7 +106,6 @@ class VSIADLSFSHandler; struct VSIDIRADLS : public VSIDIR { - std::string m_osRootPath{}; int m_nRecurseDepth = 0; struct Iterator diff --git a/port/cpl_vsil_az.cpp b/port/cpl_vsil_az.cpp index c575d6d9d57d..3a1330d4e320 100644 --- a/port/cpl_vsil_az.cpp +++ b/port/cpl_vsil_az.cpp @@ -72,7 +72,6 @@ const char GDAL_MARKER_FOR_DIR[] = ".gdal_marker_for_dir"; struct VSIDIRAz : public VSIDIRWithMissingDirSynthesis { - std::string osRootPath{}; int nRecurseDepth = 0; std::string osNextMarker{}; From 49497e31500a4930e5e230c8663fba2aad1efcfe Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 03:54:00 +0200 Subject: [PATCH 100/230] Add missing explicit --- port/cpl_error.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/port/cpl_error.h b/port/cpl_error.h index f9d50539c51b..8cc49228468f 100644 --- a/port/cpl_error.h +++ b/port/cpl_error.h @@ -301,7 +301,7 @@ extern "C++" /** Constructor that backs up the error state, and optionally installs * a thread-local temporary error handler (typically CPLQuietErrorHandler). */ - CPLErrorStateBackuper(CPLErrorHandler hHandler = nullptr); + explicit CPLErrorStateBackuper(CPLErrorHandler hHandler = nullptr); /** Destructor that restores the error state to its initial state * before construction. From 431a6cc2b941fdf909f346f8800dc1da3e337b0c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 04:44:27 +0200 Subject: [PATCH 101/230] JPEG: ReadFLIRMetadata(): stop on Start-Of-Scan marker --- frmts/jpeg/jpgdataset.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/frmts/jpeg/jpgdataset.cpp b/frmts/jpeg/jpgdataset.cpp index 486f4fa6d908..ce0a87be09c2 100644 --- a/frmts/jpeg/jpgdataset.cpp +++ b/frmts/jpeg/jpgdataset.cpp @@ -406,6 +406,14 @@ void JPGDatasetCommon::ReadFLIRMetadata() 1) break; + // Not a marker + if (abyChunkHeader[0] != 0xFF) + continue; + + // Stop on Start of Scan + if (abyChunkHeader[1] == 0xDA) + break; + int nMarkerLength = abyChunkHeader[2] * 256 + abyChunkHeader[3] - 2; nChunkLoc += 4 + nMarkerLength; From a6a5acec46372e4b7ee4ada9770926f20c2e7176 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 05:21:23 +0200 Subject: [PATCH 102/230] Update miramon.rst add versionadded --- doc/source/drivers/vector/miramon.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/source/drivers/vector/miramon.rst b/doc/source/drivers/vector/miramon.rst index 48f1427d184b..b19d1831e76d 100644 --- a/doc/source/drivers/vector/miramon.rst +++ b/doc/source/drivers/vector/miramon.rst @@ -1,7 +1,9 @@ .. _vector.miramon: MiraMon Vector -==================== +============== + +.. versionadded:: 3.9 .. shortname:: MiraMonVector From 9d338fe791900385785a5ec4d3fda6f3e6d07635 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 13:57:11 +0200 Subject: [PATCH 103/230] [Lint] Parquet: fix indentation --- ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp | 301 ++++++++++---------- 1 file changed, 148 insertions(+), 153 deletions(-) diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp index a4aa293d8eb5..ce3a755f3ed6 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp @@ -206,195 +206,190 @@ bool OGRParquetLayerBase::DealWithGeometryColumn( } bool bRegularField = true; - // odd indetation to make backports to release/3.5 easier + auto oIter = m_oMapGeometryColumns.find(field->name()); + if (oIter != m_oMapGeometryColumns.end() || + STARTS_WITH(osExtensionName.c_str(), "ogc.") || + STARTS_WITH(osExtensionName.c_str(), "geoarrow.")) { - auto oIter = m_oMapGeometryColumns.find(field->name()); - if (oIter != m_oMapGeometryColumns.end() || - STARTS_WITH(osExtensionName.c_str(), "ogc.") || - STARTS_WITH(osExtensionName.c_str(), "geoarrow.")) - { - CPLJSONObject oJSONDef; - if (oIter != m_oMapGeometryColumns.end()) - oJSONDef = oIter->second; - auto osEncoding = oJSONDef.GetString("encoding"); - if (osEncoding.empty() && !osExtensionName.empty()) - osEncoding = osExtensionName; - - OGRwkbGeometryType eGeomType = wkbUnknown; - auto eGeomEncoding = OGRArrowGeomEncoding::WKB; - if (IsValidGeometryEncoding(field, osEncoding, eGeomType, - eGeomEncoding)) - { - bRegularField = false; - OGRGeomFieldDefn oField(field->name().c_str(), wkbUnknown); + CPLJSONObject oJSONDef; + if (oIter != m_oMapGeometryColumns.end()) + oJSONDef = oIter->second; + auto osEncoding = oJSONDef.GetString("encoding"); + if (osEncoding.empty() && !osExtensionName.empty()) + osEncoding = osExtensionName; + + OGRwkbGeometryType eGeomType = wkbUnknown; + auto eGeomEncoding = OGRArrowGeomEncoding::WKB; + if (IsValidGeometryEncoding(field, osEncoding, eGeomType, + eGeomEncoding)) + { + bRegularField = false; + OGRGeomFieldDefn oField(field->name().c_str(), wkbUnknown); - auto oCRS = oJSONDef["crs"]; - OGRSpatialReference *poSRS = nullptr; - if (!oCRS.IsValid()) - { - if (!m_oMapGeometryColumns.empty()) - { - // WGS 84 is implied if no crs member is found. - poSRS = new OGRSpatialReference(); - poSRS->SetAxisMappingStrategy( - OAMS_TRADITIONAL_GIS_ORDER); - poSRS->importFromEPSG(4326); - } - } - else if (oCRS.GetType() == CPLJSONObject::Type::String) + auto oCRS = oJSONDef["crs"]; + OGRSpatialReference *poSRS = nullptr; + if (!oCRS.IsValid()) + { + if (!m_oMapGeometryColumns.empty()) { - const auto osWKT = oCRS.ToString(); + // WGS 84 is implied if no crs member is found. poSRS = new OGRSpatialReference(); poSRS->SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); + poSRS->importFromEPSG(4326); + } + } + else if (oCRS.GetType() == CPLJSONObject::Type::String) + { + const auto osWKT = oCRS.ToString(); + poSRS = new OGRSpatialReference(); + poSRS->SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); - if (poSRS->importFromWkt(osWKT.c_str()) != OGRERR_NONE) - { - poSRS->Release(); - poSRS = nullptr; - } + if (poSRS->importFromWkt(osWKT.c_str()) != OGRERR_NONE) + { + poSRS->Release(); + poSRS = nullptr; } - else if (oCRS.GetType() == CPLJSONObject::Type::Object) + } + else if (oCRS.GetType() == CPLJSONObject::Type::Object) + { + // CRS encoded as PROJJSON (extension) + const auto oType = oCRS["type"]; + if (oType.IsValid() && + oType.GetType() == CPLJSONObject::Type::String) { - // CRS encoded as PROJJSON (extension) - const auto oType = oCRS["type"]; - if (oType.IsValid() && - oType.GetType() == CPLJSONObject::Type::String) + const auto osType = oType.ToString(); + if (osType.find("CRS") != std::string::npos) { - const auto osType = oType.ToString(); - if (osType.find("CRS") != std::string::npos) - { - poSRS = new OGRSpatialReference(); - poSRS->SetAxisMappingStrategy( - OAMS_TRADITIONAL_GIS_ORDER); + poSRS = new OGRSpatialReference(); + poSRS->SetAxisMappingStrategy( + OAMS_TRADITIONAL_GIS_ORDER); - if (poSRS->SetFromUserInput( - oCRS.ToString().c_str()) != OGRERR_NONE) - { - poSRS->Release(); - poSRS = nullptr; - } + if (poSRS->SetFromUserInput(oCRS.ToString().c_str()) != + OGRERR_NONE) + { + poSRS->Release(); + poSRS = nullptr; } } } + } - if (poSRS) - { - const double dfCoordEpoch = oJSONDef.GetDouble("epoch"); - if (dfCoordEpoch > 0) - poSRS->SetCoordinateEpoch(dfCoordEpoch); + if (poSRS) + { + const double dfCoordEpoch = oJSONDef.GetDouble("epoch"); + if (dfCoordEpoch > 0) + poSRS->SetCoordinateEpoch(dfCoordEpoch); - oField.SetSpatialRef(poSRS); + oField.SetSpatialRef(poSRS); - poSRS->Release(); - } + poSRS->Release(); + } - if (!m_osCRS.empty()) + if (!m_osCRS.empty()) + { + poSRS = new OGRSpatialReference(); + poSRS->SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); + if (poSRS->SetFromUserInput(m_osCRS.c_str()) == OGRERR_NONE) { - poSRS = new OGRSpatialReference(); - poSRS->SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); - if (poSRS->SetFromUserInput(m_osCRS.c_str()) == OGRERR_NONE) - { - oField.SetSpatialRef(poSRS); - } - poSRS->Release(); + oField.SetSpatialRef(poSRS); } + poSRS->Release(); + } - if (oJSONDef.GetString("edges") == "spherical") + if (oJSONDef.GetString("edges") == "spherical") + { + SetMetadataItem("EDGES", "SPHERICAL"); + } + + // m_aeGeomEncoding be filled before calling + // ComputeGeometryColumnType() + m_aeGeomEncoding.push_back(eGeomEncoding); + if (eGeomType == wkbUnknown) + { + // geometry_types since 1.0.0-beta1. Was geometry_type + // before + auto oType = oJSONDef.GetObj("geometry_types"); + if (!oType.IsValid()) + oType = oJSONDef.GetObj("geometry_type"); + if (oType.GetType() == CPLJSONObject::Type::String) { - SetMetadataItem("EDGES", "SPHERICAL"); + // string is no longer valid since 1.0.0-beta1 + const auto osType = oType.ToString(); + if (osType != "Unknown") + eGeomType = GetGeometryTypeFromString(osType); } - - // m_aeGeomEncoding be filled before calling - // ComputeGeometryColumnType() - m_aeGeomEncoding.push_back(eGeomEncoding); - if (eGeomType == wkbUnknown) + else if (oType.GetType() == CPLJSONObject::Type::Array) { - // geometry_types since 1.0.0-beta1. Was geometry_type - // before - auto oType = oJSONDef.GetObj("geometry_types"); - if (!oType.IsValid()) - oType = oJSONDef.GetObj("geometry_type"); - if (oType.GetType() == CPLJSONObject::Type::String) + const auto oTypeArray = oType.ToArray(); + if (oTypeArray.Size() == 1) { - // string is no longer valid since 1.0.0-beta1 - const auto osType = oType.ToString(); - if (osType != "Unknown") - eGeomType = GetGeometryTypeFromString(osType); + eGeomType = + GetGeometryTypeFromString(oTypeArray[0].ToString()); } - else if (oType.GetType() == CPLJSONObject::Type::Array) + else if (oTypeArray.Size() > 1) { - const auto oTypeArray = oType.ToArray(); - if (oTypeArray.Size() == 1) + const auto PromoteToCollection = + [](OGRwkbGeometryType eType) { - eGeomType = GetGeometryTypeFromString( - oTypeArray[0].ToString()); - } - else if (oTypeArray.Size() > 1) + if (eType == wkbPoint) + return wkbMultiPoint; + if (eType == wkbLineString) + return wkbMultiLineString; + if (eType == wkbPolygon) + return wkbMultiPolygon; + return eType; + }; + bool bMixed = false; + bool bHasMulti = false; + bool bHasZ = false; + bool bHasM = false; + const auto eFirstType = + OGR_GT_Flatten(GetGeometryTypeFromString( + oTypeArray[0].ToString())); + const auto eFirstTypeCollection = + PromoteToCollection(eFirstType); + for (int i = 0; i < oTypeArray.Size(); ++i) { - const auto PromoteToCollection = - [](OGRwkbGeometryType eType) - { - if (eType == wkbPoint) - return wkbMultiPoint; - if (eType == wkbLineString) - return wkbMultiLineString; - if (eType == wkbPolygon) - return wkbMultiPolygon; - return eType; - }; - bool bMixed = false; - bool bHasMulti = false; - bool bHasZ = false; - bool bHasM = false; - const auto eFirstType = - OGR_GT_Flatten(GetGeometryTypeFromString( - oTypeArray[0].ToString())); - const auto eFirstTypeCollection = - PromoteToCollection(eFirstType); - for (int i = 0; i < oTypeArray.Size(); ++i) + const auto eThisGeom = GetGeometryTypeFromString( + oTypeArray[i].ToString()); + if (PromoteToCollection(OGR_GT_Flatten( + eThisGeom)) != eFirstTypeCollection) { - const auto eThisGeom = - GetGeometryTypeFromString( - oTypeArray[i].ToString()); - if (PromoteToCollection(OGR_GT_Flatten( - eThisGeom)) != eFirstTypeCollection) - { - bMixed = true; - break; - } - bHasZ |= OGR_GT_HasZ(eThisGeom) != FALSE; - bHasM |= OGR_GT_HasM(eThisGeom) != FALSE; - bHasMulti |= (PromoteToCollection( - OGR_GT_Flatten(eThisGeom)) == - OGR_GT_Flatten(eThisGeom)); + bMixed = true; + break; } - if (!bMixed) + bHasZ |= OGR_GT_HasZ(eThisGeom) != FALSE; + bHasM |= OGR_GT_HasM(eThisGeom) != FALSE; + bHasMulti |= + (PromoteToCollection(OGR_GT_Flatten( + eThisGeom)) == OGR_GT_Flatten(eThisGeom)); + } + if (!bMixed) + { + if (eFirstTypeCollection == wkbMultiPolygon || + eFirstTypeCollection == wkbMultiLineString) { - if (eFirstTypeCollection == wkbMultiPolygon || - eFirstTypeCollection == wkbMultiLineString) - { - if (bHasMulti) - eGeomType = OGR_GT_SetModifier( - eFirstTypeCollection, bHasZ, bHasM); - else - eGeomType = OGR_GT_SetModifier( - eFirstType, bHasZ, bHasM); - } + if (bHasMulti) + eGeomType = OGR_GT_SetModifier( + eFirstTypeCollection, bHasZ, bHasM); + else + eGeomType = OGR_GT_SetModifier( + eFirstType, bHasZ, bHasM); } } } - else if (CPLTestBool(CPLGetConfigOption( - "OGR_PARQUET_COMPUTE_GEOMETRY_TYPE", "YES"))) - { - eGeomType = computeGeometryTypeFun(); - } } - - oField.SetType(eGeomType); - oField.SetNullable(field->nullable()); - m_poFeatureDefn->AddGeomFieldDefn(&oField); - m_anMapGeomFieldIndexToArrowColumn.push_back(iFieldIdx); + else if (CPLTestBool(CPLGetConfigOption( + "OGR_PARQUET_COMPUTE_GEOMETRY_TYPE", "YES"))) + { + eGeomType = computeGeometryTypeFun(); + } } + + oField.SetType(eGeomType); + oField.SetNullable(field->nullable()); + m_poFeatureDefn->AddGeomFieldDefn(&oField); + m_anMapGeomFieldIndexToArrowColumn.push_back(iFieldIdx); } } From 71f88cbf6bb040a39aa9ec140e291d32f525665e Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 13:29:36 +0200 Subject: [PATCH 104/230] GTiff: enable -Wold-style-cast --- frmts/gtiff/CMakeLists.txt | 2 +- frmts/gtiff/gt_jpeg_copy.cpp | 8 ++--- frmts/gtiff/tifvsi.cpp | 4 ++- frmts/jpeg/vsidataio.cpp | 64 ++++++++++++++++++++---------------- 4 files changed, 43 insertions(+), 35 deletions(-) diff --git a/frmts/gtiff/CMakeLists.txt b/frmts/gtiff/CMakeLists.txt index 575da527ea08..bb95feeadd6c 100644 --- a/frmts/gtiff/CMakeLists.txt +++ b/frmts/gtiff/CMakeLists.txt @@ -36,7 +36,7 @@ add_gdal_driver( gt_overview.cpp gt_wkt_srs.cpp tifvsi.cpp - BUILTIN CXX_WFLAGS_EFFCXX) + BUILTIN STRONG_CXX_WFLAGS) gdal_standard_includes(gdal_GTIFF) if (GDAL_ENABLE_DRIVER_JPEG) diff --git a/frmts/gtiff/gt_jpeg_copy.cpp b/frmts/gtiff/gt_jpeg_copy.cpp index 7995860094e5..73262092f210 100644 --- a/frmts/gtiff/gt_jpeg_copy.cpp +++ b/frmts/gtiff/gt_jpeg_copy.cpp @@ -464,7 +464,7 @@ CPLErr GTIFF_CopyFromJPEG_WriteAdditionalTags(TIFF *hTIFF, GDALDataset *poSrcDS) sDInfo.client_data = &setjmp_buffer; bCallDestroyDecompress = true; - jpeg_create_decompress(&sDInfo); + jpeg_CreateDecompress(&sDInfo, JPEG_LIB_VERSION, sizeof(sDInfo)); jpeg_vsiio_src(&sDInfo, fpJPEG); jpeg_read_header(&sDInfo, TRUE); @@ -473,7 +473,7 @@ CPLErr GTIFF_CopyFromJPEG_WriteAdditionalTags(TIFF *hTIFF, GDALDataset *poSrcDS) sJErr.error_exit = GTIFF_ErrorExitJPEG; sCInfo.client_data = &setjmp_buffer; - jpeg_create_compress(&sCInfo); + jpeg_CreateCompress(&sCInfo, JPEG_LIB_VERSION, sizeof(sCInfo)); bCallDestroyCompress = true; jpeg_copy_critical_parameters(&sDInfo, &sCInfo); GTIFF_Set_TIFFTAG_JPEGTABLES(hTIFF, sDInfo, sCInfo); @@ -612,7 +612,7 @@ static CPLErr GTIFF_CopyBlockFromJPEG(GTIFF_CopyBlockFromJPEGArgs *psArgs) sCInfo.client_data = &setjmp_buffer; // Initialize destination compression parameters from source values. - jpeg_create_compress(&sCInfo); + jpeg_CreateCompress(&sCInfo, JPEG_LIB_VERSION, sizeof(sCInfo)); jpeg_copy_critical_parameters(psDInfo, &sCInfo); // Ensure libjpeg won't write any extraneous markers. @@ -837,7 +837,7 @@ CPLErr GTIFF_CopyFromJPEG(GDALDataset *poDS, GDALDataset *poSrcDS, sJErr.error_exit = GTIFF_ErrorExitJPEG; sDInfo.client_data = &setjmp_buffer; - jpeg_create_decompress(&sDInfo); + jpeg_CreateDecompress(&sDInfo, JPEG_LIB_VERSION, sizeof(sDInfo)); // This is to address bug related in ticket #1795. if (CPLGetConfigOption("JPEGMEM", nullptr) == nullptr) diff --git a/frmts/gtiff/tifvsi.cpp b/frmts/gtiff/tifvsi.cpp index 3db1bad94fb6..a1f106d257c0 100644 --- a/frmts/gtiff/tifvsi.cpp +++ b/frmts/gtiff/tifvsi.cpp @@ -54,6 +54,8 @@ #include "xtiffio.h" +#include <limits> + #if (TIFFLIB_VERSION > 20220520) || defined(INTERNAL_LIBTIFF) // > 4.4.0 #define SUPPORTS_LIBTIFF_OPEN_OPTIONS @@ -452,7 +454,7 @@ static void VSI_TIFFSetOpenOptions(TIFFOpenOptions *opts) else return CPLGetUsablePhysicalRAM() * 9 / 10; }(); - if (nMemLimit > 0 && nMemLimit < TIFF_TMSIZE_T_MAX) + if (nMemLimit > 0 && nMemLimit < std::numeric_limits<tmsize_t>::max()) { //CPLDebug("GTiff", "TIFFOpenOptionsSetMaxCumulatedMemAlloc(%" PRIu64 ")", // static_cast<uint64_t>(nMemLimit)); diff --git a/frmts/jpeg/vsidataio.cpp b/frmts/jpeg/vsidataio.cpp index 052fbbc497af..98bbef16d89c 100644 --- a/frmts/jpeg/vsidataio.cpp +++ b/frmts/jpeg/vsidataio.cpp @@ -60,7 +60,7 @@ constexpr size_t INPUT_BUF_SIZE = 4096; static void init_source(j_decompress_ptr cinfo) { - my_src_ptr src = (my_src_ptr)cinfo->src; + my_src_ptr src = reinterpret_cast<my_src_ptr>(cinfo->src); // We reset the empty-input-file flag for each image, // but we don't clear the input buffer. @@ -101,7 +101,7 @@ static void init_source(j_decompress_ptr cinfo) static boolean fill_input_buffer(j_decompress_ptr cinfo) { - my_src_ptr src = (my_src_ptr)cinfo->src; + my_src_ptr src = reinterpret_cast<my_src_ptr>(cinfo->src); size_t nbytes = VSIFReadL(src->buffer, 1, INPUT_BUF_SIZE, src->infile); if (nbytes == 0) @@ -110,13 +110,15 @@ static boolean fill_input_buffer(j_decompress_ptr cinfo) { // Treat empty input file as fatal error. cinfo->err->msg_code = JERR_INPUT_EMPTY; - cinfo->err->error_exit((j_common_ptr)(cinfo)); + cinfo->err->error_exit(reinterpret_cast<j_common_ptr>(cinfo)); return FALSE; // will never reach that point } - WARNMS(cinfo, JWRN_JPEG_EOF); + (cinfo)->err->msg_code = JWRN_JPEG_EOF; + (*cinfo->err->emit_message)(reinterpret_cast<j_common_ptr>(cinfo), -1); + // Insert a fake EOI marker. - src->buffer[0] = (JOCTET)0xFF; - src->buffer[1] = (JOCTET)JPEG_EOI; + src->buffer[0] = static_cast<JOCTET>(0xFF); + src->buffer[1] = static_cast<JOCTET>(JPEG_EOI); nbytes = 2; } @@ -193,22 +195,22 @@ static boolean fill_input_buffer_ipp(j_decompress_ptr cinfo) static void skip_input_data(j_decompress_ptr cinfo, long num_bytes) { - my_src_ptr src = (my_src_ptr)cinfo->src; + my_src_ptr src = reinterpret_cast<my_src_ptr>(cinfo->src); // Just a dumb implementation for now. Could use fseek() except // it doesn't work on pipes. Not clear that being smart is worth // any trouble anyway --- large skips are infrequent. if (num_bytes > 0) { - while (num_bytes > (long)src->pub.bytes_in_buffer) + while (num_bytes > static_cast<long>(src->pub.bytes_in_buffer)) { - num_bytes -= (long)src->pub.bytes_in_buffer; + num_bytes -= static_cast<long>(src->pub.bytes_in_buffer); (void)fill_input_buffer(cinfo); // note we assume that fill_input_buffer will never return FALSE, // so suspension need not be handled. } - src->pub.next_input_byte += (size_t)num_bytes; - src->pub.bytes_in_buffer -= (size_t)num_bytes; + src->pub.next_input_byte += static_cast<size_t>(num_bytes); + src->pub.bytes_in_buffer -= static_cast<size_t>(num_bytes); } } @@ -247,15 +249,16 @@ void jpeg_vsiio_src(j_decompress_ptr cinfo, VSILFILE *infile) if (cinfo->src == nullptr) { // First time for this JPEG object? - cinfo->src = (struct jpeg_source_mgr *)(*cinfo->mem->alloc_small)( - (j_common_ptr)cinfo, JPOOL_PERMANENT, sizeof(my_source_mgr)); - src = (my_src_ptr)cinfo->src; - src->buffer = (JOCTET *)(*cinfo->mem->alloc_small)( - (j_common_ptr)cinfo, JPOOL_PERMANENT, - INPUT_BUF_SIZE * sizeof(JOCTET)); + j_common_ptr cinfo_common = reinterpret_cast<j_common_ptr>(cinfo); + cinfo->src = + static_cast<struct jpeg_source_mgr *>((*cinfo->mem->alloc_small)( + cinfo_common, JPOOL_PERMANENT, sizeof(my_source_mgr))); + src = reinterpret_cast<my_src_ptr>(cinfo->src); + src->buffer = static_cast<JOCTET *>((*cinfo->mem->alloc_small)( + cinfo_common, JPOOL_PERMANENT, INPUT_BUF_SIZE * sizeof(JOCTET))); } - src = (my_src_ptr)cinfo->src; + src = reinterpret_cast<my_src_ptr>(cinfo->src); src->pub.init_source = init_source; #ifdef IPPJ_HUFF src->pub.fill_input_buffer = fill_input_buffer_ipp; @@ -297,11 +300,12 @@ constexpr size_t OUTPUT_BUF_SIZE = 4096; static void init_destination(j_compress_ptr cinfo) { - my_dest_ptr dest = (my_dest_ptr)cinfo->dest; + my_dest_ptr dest = reinterpret_cast<my_dest_ptr>(cinfo->dest); // Allocate the output buffer --- it will be released when done with image. - dest->buffer = (JOCTET *)(*cinfo->mem->alloc_small)( - (j_common_ptr)cinfo, JPOOL_IMAGE, OUTPUT_BUF_SIZE * sizeof(JOCTET)); + dest->buffer = static_cast<JOCTET *>((*cinfo->mem->alloc_small)( + reinterpret_cast<j_common_ptr>(cinfo), JPOOL_IMAGE, + OUTPUT_BUF_SIZE * sizeof(JOCTET))); dest->pub.next_output_byte = dest->buffer; dest->pub.free_in_buffer = OUTPUT_BUF_SIZE; @@ -330,7 +334,7 @@ static void init_destination(j_compress_ptr cinfo) static boolean empty_output_buffer(j_compress_ptr cinfo) { - my_dest_ptr dest = (my_dest_ptr)cinfo->dest; + my_dest_ptr dest = reinterpret_cast<my_dest_ptr>(cinfo->dest); size_t bytes_to_write = OUTPUT_BUF_SIZE; #ifdef IPPJ_HUFF @@ -347,7 +351,7 @@ static boolean empty_output_buffer(j_compress_ptr cinfo) bytes_to_write) { cinfo->err->msg_code = JERR_FILE_WRITE; - cinfo->err->error_exit((j_common_ptr)(cinfo)); + cinfo->err->error_exit(reinterpret_cast<j_common_ptr>(cinfo)); return FALSE; // will never reach that point } @@ -365,7 +369,7 @@ static boolean empty_output_buffer(j_compress_ptr cinfo) // for error exit. static void term_destination(j_compress_ptr cinfo) { - my_dest_ptr dest = (my_dest_ptr)cinfo->dest; + my_dest_ptr dest = reinterpret_cast<my_dest_ptr>(cinfo->dest); size_t datacount = OUTPUT_BUF_SIZE - dest->pub.free_in_buffer; // Write any data remaining in the buffer. @@ -374,14 +378,14 @@ static void term_destination(j_compress_ptr cinfo) if (VSIFWriteL(dest->buffer, 1, datacount, dest->outfile) != datacount) { cinfo->err->msg_code = JERR_FILE_WRITE; - cinfo->err->error_exit((j_common_ptr)(cinfo)); + cinfo->err->error_exit(reinterpret_cast<j_common_ptr>(cinfo)); return; // will never reach that point } } if (VSIFFlushL(dest->outfile) != 0) { cinfo->err->msg_code = JERR_FILE_WRITE; - cinfo->err->error_exit((j_common_ptr)(cinfo)); + cinfo->err->error_exit(reinterpret_cast<j_common_ptr>(cinfo)); return; // will never reach that point } } @@ -402,11 +406,13 @@ void jpeg_vsiio_dest(j_compress_ptr cinfo, VSILFILE *outfile) if (cinfo->dest == nullptr) { // First time for this JPEG object? - cinfo->dest = (struct jpeg_destination_mgr *)(*cinfo->mem->alloc_small)( - (j_common_ptr)cinfo, JPOOL_PERMANENT, sizeof(my_destination_mgr)); + cinfo->dest = static_cast<struct jpeg_destination_mgr *>( + (*cinfo->mem->alloc_small)(reinterpret_cast<j_common_ptr>(cinfo), + JPOOL_PERMANENT, + sizeof(my_destination_mgr))); } - dest = (my_dest_ptr)cinfo->dest; + dest = reinterpret_cast<my_dest_ptr>(cinfo->dest); dest->pub.init_destination = init_destination; dest->pub.empty_output_buffer = empty_output_buffer; dest->pub.term_destination = term_destination; From 54b21b09b2796d92457c8c80a4255d4894a564d3 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 16:15:49 +0200 Subject: [PATCH 105/230] Arrow/Parquet: fix crash when reading geometries from a geometry column with a pyarrow-registered extension type --- autotest/ogr/ogr_arrow.py | 2 + .../arrow_common/ograrrowlayer.hpp | 38 +++++++++++++------ 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/autotest/ogr/ogr_arrow.py b/autotest/ogr/ogr_arrow.py index 22efac1849f5..4e7857e4b994 100755 --- a/autotest/ogr/ogr_arrow.py +++ b/autotest/ogr/ogr_arrow.py @@ -486,6 +486,8 @@ def __arrow_ext_deserialize__(cls, storage_type, serialized): ds = ogr.Open("data/arrow/from_paleolimbot_geoarrow/point-default.feather") lyr = ds.GetLayer(0) assert lyr.GetGeometryColumn() == "geometry" + f = lyr.GetNextFeature() + assert f.GetGeometryRef().ExportToIsoWkt() == "POINT (30 10)" finally: pa.unregister_extension_type(point_type.extension_name) diff --git a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp index 47854f431c41..3e6d19c449f0 100644 --- a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp +++ b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp @@ -1961,6 +1961,21 @@ OGRArrowLayer::TimestampToOGR(int64_t timestamp, psField->Date.Second = static_cast<float>(dt.tm_sec + floatingPart); } +/************************************************************************/ +/* GetStorageArray() */ +/************************************************************************/ + +static const arrow::Array *GetStorageArray(const arrow::Array *array) +{ + if (array->type_id() == arrow::Type::EXTENSION) + { + auto extensionArray = + cpl::down_cast<const arrow::ExtensionArray *>(array); + array = extensionArray->storage().get(); + } + return array; +} + /************************************************************************/ /* ReadFeature() */ /************************************************************************/ @@ -2392,7 +2407,7 @@ inline OGRFeature *OGRArrowLayer::ReadFeature( iCol = m_anMapGeomFieldIndexToArrowColumn[i]; } - const auto array = poColumnArrays[iCol].get(); + const auto array = GetStorageArray(poColumnArrays[iCol].get()); auto poGeometry = ReadGeometry(i, array, nIdxInBatch); if (poGeometry) { @@ -3824,7 +3839,8 @@ OGRArrowLayer::SetBatch(const std::shared_ptr<arrow::RecordBatch> &poBatch) if (iCol >= 0 && m_aeGeomEncoding[m_iGeomFieldFilter] == OGRArrowGeomEncoding::WKB) { - const arrow::Array *poArrayWKB = m_poBatchColumns[iCol].get(); + const arrow::Array *poArrayWKB = + GetStorageArray(m_poBatchColumns[iCol].get()); if (poArrayWKB->type_id() == arrow::Type::BINARY) m_poArrayWKB = static_cast<const arrow::BinaryArray *>(poArrayWKB); @@ -4075,7 +4091,7 @@ inline OGRFeature *OGRArrowLayer::GetNextRawFeature() do { bReturnFeature = false; - auto array = m_poBatchColumns[iCol].get(); + auto array = GetStorageArray(m_poBatchColumns[iCol].get()); CPLAssert(array->type_id() == arrow::Type::LIST); auto listOfPartsArray = static_cast<const arrow::ListArray *>(array); @@ -4170,7 +4186,7 @@ inline OGRFeature *OGRArrowLayer::GetNextRawFeature() do { bReturnFeature = false; - auto array = m_poBatchColumns[iCol].get(); + auto array = GetStorageArray(m_poBatchColumns[iCol].get()); CPLAssert(array->type_id() == arrow::Type::STRUCT); auto pointValues = static_cast<const arrow::StructArray *>(array); @@ -4227,7 +4243,7 @@ inline OGRFeature *OGRArrowLayer::GetNextRawFeature() do { bReturnFeature = false; - auto array = m_poBatchColumns[iCol].get(); + auto array = GetStorageArray(m_poBatchColumns[iCol].get()); CPLAssert(array->type_id() == arrow::Type::LIST); const auto listArray = static_cast<const arrow::ListArray *>(array); @@ -4300,7 +4316,7 @@ inline OGRFeature *OGRArrowLayer::GetNextRawFeature() do { bReturnFeature = false; - auto array = m_poBatchColumns[iCol].get(); + auto array = GetStorageArray(m_poBatchColumns[iCol].get()); CPLAssert(array->type_id() == arrow::Type::LIST); const auto listOfRingsArray = static_cast<const arrow::ListArray *>(array); @@ -4385,7 +4401,7 @@ inline OGRFeature *OGRArrowLayer::GetNextRawFeature() do { bReturnFeature = false; - auto array = m_poBatchColumns[iCol].get(); + auto array = GetStorageArray(m_poBatchColumns[iCol].get()); CPLAssert(array->type_id() == arrow::Type::LIST); const auto listArray = static_cast<const arrow::ListArray *>(array); @@ -4463,7 +4479,7 @@ inline OGRFeature *OGRArrowLayer::GetNextRawFeature() do { bReturnFeature = false; - auto array = m_poBatchColumns[iCol].get(); + auto array = GetStorageArray(m_poBatchColumns[iCol].get()); CPLAssert(array->type_id() == arrow::Type::LIST); auto listOfPartsArray = static_cast<const arrow::ListArray *>(array); @@ -4550,7 +4566,7 @@ inline OGRFeature *OGRArrowLayer::GetNextRawFeature() do { bReturnFeature = false; - auto array = m_poBatchColumns[iCol].get(); + auto array = GetStorageArray(m_poBatchColumns[iCol].get()); CPLAssert(array->type_id() == arrow::Type::LIST); auto listOfPartsArray = static_cast<const arrow::ListArray *>(array); @@ -4645,7 +4661,7 @@ inline OGRFeature *OGRArrowLayer::GetNextRawFeature() } else if (iCol >= 0) { - auto array = m_poBatchColumns[iCol].get(); + auto array = GetStorageArray(m_poBatchColumns[iCol].get()); while (true) { bool bMatchBBOX = false; @@ -4674,7 +4690,7 @@ inline OGRFeature *OGRArrowLayer::GetNextRawFeature() m_bEOF = !ReadNextBatch(); if (m_bEOF) return nullptr; - array = m_poBatchColumns[iCol].get(); + array = GetStorageArray(m_poBatchColumns[iCol].get()); } } } From f5de9ef3fa45478ae73ed3fa792ea04a4b44c2ce Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 16:23:05 +0200 Subject: [PATCH 106/230] Arrow/Parquet: handle fields with a pyarrow-registered extension type --- autotest/generate_parquet_test_file.py | 35 +++++++++++++ .../ogr/data/arrow/extension_custom.feather | Bin 0 -> 842 bytes .../ogr/data/parquet/extension_custom.parquet | Bin 0 -> 772 bytes autotest/ogr/ogr_arrow.py | 44 ++++++++++++++++ autotest/ogr/ogr_parquet.py | 13 +++++ ogr/ogrsf_frmts/arrow/ogrfeatherlayer.cpp | 5 +- ogr/ogrsf_frmts/arrow_common/ogr_arrow.h | 1 + .../arrow_common/ograrrowlayer.hpp | 47 ++++++++++++++---- ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp | 5 +- 9 files changed, 137 insertions(+), 13 deletions(-) create mode 100644 autotest/ogr/data/arrow/extension_custom.feather create mode 100644 autotest/ogr/data/parquet/extension_custom.parquet diff --git a/autotest/generate_parquet_test_file.py b/autotest/generate_parquet_test_file.py index 06ae46e218e3..ec934a646165 100644 --- a/autotest/generate_parquet_test_file.py +++ b/autotest/generate_parquet_test_file.py @@ -1177,8 +1177,43 @@ def generate_nested_types(): ) +def generate_extension_custom(): + import pathlib + + import pyarrow as pa + import pyarrow.feather as feather + import pyarrow.parquet as pq + + class MyJsonType(pa.ExtensionType): + def __init__(self): + super().__init__(pa.string(), "my_json") + + def __arrow_ext_serialize__(self): + return b"" + + @classmethod + def __arrow_ext_deserialize__(cls, storage_type, serialized): + return cls() + + my_json_type = MyJsonType() + storage_array = pa.array(['{"foo":"bar"}'], pa.string()) + extension_custom = pa.ExtensionArray.from_storage(my_json_type, storage_array) + + names = ["extension_custom"] + + locals_ = locals() + table = pa.table([locals_[x] for x in names], names=names) + + HERE = pathlib.Path(__file__).parent + feather.write_feather(table, HERE / "ogr/data/arrow/extension_custom.feather") + pq.write_table( + table, HERE / "ogr/data/parquet/extension_custom.parquet", compression="NONE" + ) + + if __name__ == "__main__": generate_test_parquet() generate_all_geoms_parquet() generate_parquet_wkt_with_dict() generate_nested_types() + generate_extension_custom() diff --git a/autotest/ogr/data/arrow/extension_custom.feather b/autotest/ogr/data/arrow/extension_custom.feather new file mode 100644 index 0000000000000000000000000000000000000000..5b8d42faf2be946615d84efca88ce15053d50bc8 GIT binary patch literal 842 zcmd^7%Sr<=6uo29I!J^;y68f%1Q#ytc5q?Ag&Qe!qq`b)1cjM^GlC*Q@oT#DL;NQ_ zCy!FZKk&f4$vw$^Ce!(Rb~zFG1|$<HgxC=+1L;eS9gr#50xO4BQG<pxf{5&4&Ec`_ z?-eoxLuCCS^a?`HF@#`_>ZPli_Q5ra+vm1(HTf;b8AL(8P*3t<j4A6Z*Is2~>k2e7 ziqx-*$JRB-p?!7qqZ~q5-*4Z#>g=8EERNPt@MK=jVTF}B35^J06597BJth0#&;4mo z8cg9chdWoGd51ALqlA>Bmxw>sgUF5J${rFJYcn2s_DLvnF}gnefVrf(OGTtjJcqe_ zGxyG!(%jf5^B!)+_lLo^(Wrdjr&)xa(c205BM4{28j4uRC-%hVJOcAS{>w4u;Q!a; E2iU(~MgRZ+ literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/parquet/extension_custom.parquet b/autotest/ogr/data/parquet/extension_custom.parquet new file mode 100644 index 0000000000000000000000000000000000000000..e42cd9d377a485e95e3eef91e3974702206e7007 GIT binary patch literal 772 zcmcIi-%A2P5FQti9t=cUxr2L<gQN$uQz|g%Vb1c1e%VQ;y@XzuR@&KlXHoQT_1NFi zpVG`)kQDUPa67X*^X)g^9@9X#ER)id=7AIvvH*(!@N8ZMfw^V&x}o_ZL1tyPNac)7 z$&|4PD~odgZT?921+PRxK;bQ6$h5wZft0Rg{Kv?@n_LHX7hXJxf{}!r$h{`$E7S`M ze-rCPk_u7)xeNsWnbeCLprt0>z#nzFl3o@+8!f|uLFk;&4c+)}P5dHJSfH>3pc`5m zUHABGYbH^97i(+}Xd~hYEw+6`O7yXg*vDF5<!k~R@~(!s$C1uokxsju>aZhg@@2v~ zh#u~6hQLJD<Sm(PyWFZ4@PTljjyIo7PHX+rX~kY;IPDFqH<#@rt5>mR0M@gsq^hP* z4c4zjDo1^&UT>b>R@=v;Z#*~SxN}w>&PY58cI3CJ_g<wM^E*i=@hRO@Q&hK%b=dvR z7oE4=!MRhnI$wW|vC2;P$x0t+`EfVAkNs%X8;@7JVHiB*r+&zTmoJw~R>{&09*1q3 Ipa#wV2mD396aWAK literal 0 HcmV?d00001 diff --git a/autotest/ogr/ogr_arrow.py b/autotest/ogr/ogr_arrow.py index 4e7857e4b994..59115e397690 100755 --- a/autotest/ogr/ogr_arrow.py +++ b/autotest/ogr/ogr_arrow.py @@ -492,6 +492,50 @@ def __arrow_ext_deserialize__(cls, storage_type, serialized): pa.unregister_extension_type(point_type.extension_name) +############################################################################### +# Test reading a file with an extension on a regular field registered with +# PyArrow + + +def test_ogr_arrow_read_with_extension_registered_on_regular_field(): + pa = pytest.importorskip("pyarrow") + + class MyJsonType(pa.ExtensionType): + def __init__(self): + super().__init__(pa.string(), "my_json") + + def __arrow_ext_serialize__(self): + return b"" + + @classmethod + def __arrow_ext_deserialize__(cls, storage_type, serialized): + return cls() + + my_json_type = MyJsonType() + + pa.register_extension_type(my_json_type) + try: + ds = ogr.Open("data/arrow/extension_custom.feather") + lyr = ds.GetLayer(0) + f = lyr.GetNextFeature() + assert f["extension_custom"] == '{"foo":"bar"}' + finally: + pa.unregister_extension_type(my_json_type.extension_name) + + +############################################################################### +# Test reading a file with an extension on a regular field not registered with +# PyArrow + + +def test_ogr_arrow_read_with_extension_not_registered_on_regular_field(): + + ds = ogr.Open("data/arrow/extension_custom.feather") + lyr = ds.GetLayer(0) + f = lyr.GetNextFeature() + assert f["extension_custom"] == '{"foo":"bar"}' + + ############################################################################### # Test storing OGR field alternative name and comment in gdal:schema extension diff --git a/autotest/ogr/ogr_parquet.py b/autotest/ogr/ogr_parquet.py index 55fd9fb7be5b..61519fc043f3 100755 --- a/autotest/ogr/ogr_parquet.py +++ b/autotest/ogr/ogr_parquet.py @@ -3678,3 +3678,16 @@ def check(lyr): ): lyr.SetSpatialFilterRect(minx + 0.1, miny + 0.1, maxx - 0.1, maxy - 0.1) assert lyr.GetFeatureCount() != 0 + + +############################################################################### +# Test reading a file with an extension on a regular field not registered with +# PyArrow + + +def test_ogr_parquet_read_with_extension_not_registered_on_regular_field(): + + ds = ogr.Open("data/parquet/extension_custom.parquet") + lyr = ds.GetLayer(0) + f = lyr.GetNextFeature() + assert f["extension_custom"] == '{"foo":"bar"}' diff --git a/ogr/ogrsf_frmts/arrow/ogrfeatherlayer.cpp b/ogr/ogrsf_frmts/arrow/ogrfeatherlayer.cpp index f9bfb0cb61fa..a82639d33332 100644 --- a/ogr/ogrsf_frmts/arrow/ogrfeatherlayer.cpp +++ b/ogr/ogrsf_frmts/arrow/ogrfeatherlayer.cpp @@ -209,8 +209,9 @@ void OGRFeatherLayer::EstablishFeatureDefn() OGRwkbGeometryType eGeomType = wkbUnknown; auto eGeomEncoding = OGRArrowGeomEncoding::WKB; - if (IsValidGeometryEncoding(field, osEncoding, eGeomType, - eGeomEncoding)) + if (IsValidGeometryEncoding(field, osEncoding, + oIter != m_oMapGeometryColumns.end(), + eGeomType, eGeomEncoding)) { bRegularField = false; OGRGeomFieldDefn oField(fieldName.c_str(), wkbUnknown); diff --git a/ogr/ogrsf_frmts/arrow_common/ogr_arrow.h b/ogr/ogrsf_frmts/arrow_common/ogr_arrow.h index 0d0d784959c2..b47cb907d16f 100644 --- a/ogr/ogrsf_frmts/arrow_common/ogr_arrow.h +++ b/ogr/ogrsf_frmts/arrow_common/ogr_arrow.h @@ -192,6 +192,7 @@ class OGRArrowLayer CPL_NON_FINAL static bool IsValidGeometryEncoding(const std::shared_ptr<arrow::Field> &field, const std::string &osEncoding, + bool bWarnIfUnknownEncoding, OGRwkbGeometryType &eGeomTypeOut, OGRArrowGeomEncoding &eGeomEncodingOut); static OGRwkbGeometryType diff --git a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp index 3e6d19c449f0..40d497c154c4 100644 --- a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp +++ b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp @@ -285,7 +285,7 @@ OGRArrowLayer::IsHandledMapType(const std::shared_ptr<arrow::MapType> &mapType) /************************************************************************/ inline bool OGRArrowLayer::MapArrowTypeToOGR( - const std::shared_ptr<arrow::DataType> &type, + const std::shared_ptr<arrow::DataType> &typeIn, const std::shared_ptr<arrow::Field> &field, OGRFieldDefn &oField, OGRFieldType &eType, OGRFieldSubType &eSubType, const std::vector<int> &path, @@ -293,6 +293,32 @@ inline bool OGRArrowLayer::MapArrowTypeToOGR( &oMapFieldNameToGDALSchemaFieldDefn) { bool bTypeOK = false; + + std::string osExtensionName; + std::shared_ptr<arrow::DataType> type(typeIn); + if (type->id() == arrow::Type::EXTENSION) + { + auto extensionType = cpl::down_cast<arrow::ExtensionType *>(type.get()); + osExtensionName = extensionType->extension_name(); + type = extensionType->storage_type(); + } + else if (const auto &field_kv_metadata = field->metadata()) + { + auto extension_name = field_kv_metadata->Get("ARROW:extension:name"); + if (extension_name.ok()) + { + osExtensionName = *extension_name; + } + } + + if (!osExtensionName.empty()) + { + CPLDebug(GetDriverUCName().c_str(), + "Dealing with field %s of extension type %s as %s", + field->name().c_str(), osExtensionName.c_str(), + type->ToString().c_str()); + } + switch (type->id()) { case arrow::Type::NA: @@ -909,7 +935,7 @@ IsListOfPointStructType(const std::shared_ptr<arrow::DataType> &type, inline bool OGRArrowLayer::IsValidGeometryEncoding( const std::shared_ptr<arrow::Field> &field, const std::string &osEncoding, - OGRwkbGeometryType &eGeomTypeOut, + bool bWarnIfUnknownEncoding, OGRwkbGeometryType &eGeomTypeOut, OGRArrowGeomEncoding &eOGRArrowGeomEncodingOut) { const auto &fieldName = field->name(); @@ -1133,10 +1159,13 @@ inline bool OGRArrowLayer::IsValidGeometryEncoding( return true; } - CPLError(CE_Warning, CPLE_AppDefined, - "Geometry column %s uses a unhandled encoding: %s. " - "Handling it as a regular field", - fieldName.c_str(), osEncoding.c_str()); + if (bWarnIfUnknownEncoding) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Geometry column %s uses a unhandled encoding: %s. " + "Handling it as a regular field", + fieldName.c_str(), osEncoding.c_str()); + } return false; } @@ -2024,7 +2053,7 @@ inline OGRFeature *OGRArrowLayer::ReadFeature( iCol = m_anMapFieldIndexToArrowColumn[i][0]; } - const arrow::Array *array = poColumnArrays[iCol].get(); + const arrow::Array *array = GetStorageArray(poColumnArrays[iCol].get()); if (array->IsNull(nIdxInBatch)) { poFeature->SetFieldNull(i); @@ -2043,7 +2072,7 @@ inline OGRFeature *OGRArrowLayer::ReadFeature( const int iArrowSubcol = m_anMapFieldIndexToArrowColumn[i][j]; j++; CPLAssert(iArrowSubcol < static_cast<int>(subArrays.size())); - array = subArrays[iArrowSubcol].get(); + array = GetStorageArray(subArrays[iArrowSubcol].get()); if (array->IsNull(nIdxInBatch)) { poFeature->SetFieldNull(i); @@ -2060,7 +2089,7 @@ inline OGRFeature *OGRArrowLayer::ReadFeature( static_cast<const arrow::DictionaryArray *>(array); m_poReadFeatureTmpArray = castArray->indices(); // does not return a const reference - array = m_poReadFeatureTmpArray.get(); + array = GetStorageArray(m_poReadFeatureTmpArray.get()); if (array->IsNull(nIdxInBatch)) { poFeature->SetFieldNull(i); diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp index ce3a755f3ed6..114cf1410de4 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp @@ -220,8 +220,9 @@ bool OGRParquetLayerBase::DealWithGeometryColumn( OGRwkbGeometryType eGeomType = wkbUnknown; auto eGeomEncoding = OGRArrowGeomEncoding::WKB; - if (IsValidGeometryEncoding(field, osEncoding, eGeomType, - eGeomEncoding)) + if (IsValidGeometryEncoding(field, osEncoding, + oIter != m_oMapGeometryColumns.end(), + eGeomType, eGeomEncoding)) { bRegularField = false; OGRGeomFieldDefn oField(field->name().c_str(), wkbUnknown); From 21c06b9d486171e45b20df2006ab09666d3a8ba9 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 16:38:22 +0200 Subject: [PATCH 107/230] Arrow/Parquet: preliminary/in-advance read support for future JSON Canonical Extension Cf https://github.com/apache/arrow/pull/41257 and https://github.com/apache/arrow/pull/13901 --- autotest/generate_parquet_test_file.py | 35 ++++++++++++++++++ .../ogr/data/arrow/extension_json.feather | Bin 0 -> 842 bytes .../ogr/data/parquet/extension_json.parquet | Bin 0 -> 766 bytes autotest/ogr/ogr_arrow.py | 27 ++++++++++++++ autotest/ogr/ogr_parquet.py | 13 +++++++ .../arrow_common/ograrrowlayer.hpp | 8 +++- ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp | 5 +++ ogr/ogrsf_frmts/generic/ogrlayerarrow.h | 1 + 8 files changed, 88 insertions(+), 1 deletion(-) create mode 100644 autotest/ogr/data/arrow/extension_json.feather create mode 100644 autotest/ogr/data/parquet/extension_json.parquet diff --git a/autotest/generate_parquet_test_file.py b/autotest/generate_parquet_test_file.py index ec934a646165..71e226d4ed9e 100644 --- a/autotest/generate_parquet_test_file.py +++ b/autotest/generate_parquet_test_file.py @@ -1211,9 +1211,44 @@ def __arrow_ext_deserialize__(cls, storage_type, serialized): ) +def generate_extension_json(): + import pathlib + + import pyarrow as pa + import pyarrow.feather as feather + import pyarrow.parquet as pq + + class JsonType(pa.ExtensionType): + def __init__(self): + super().__init__(pa.string(), "arrow.json") + + def __arrow_ext_serialize__(self): + return b"" + + @classmethod + def __arrow_ext_deserialize__(cls, storage_type, serialized): + return cls() + + json_type = JsonType() + storage_array = pa.array(['{"foo":"bar"}'], pa.string()) + extension_json = pa.ExtensionArray.from_storage(json_type, storage_array) + + names = ["extension_json"] + + locals_ = locals() + table = pa.table([locals_[x] for x in names], names=names) + + HERE = pathlib.Path(__file__).parent + feather.write_feather(table, HERE / "ogr/data/arrow/extension_json.feather") + pq.write_table( + table, HERE / "ogr/data/parquet/extension_json.parquet", compression="NONE" + ) + + if __name__ == "__main__": generate_test_parquet() generate_all_geoms_parquet() generate_parquet_wkt_with_dict() generate_nested_types() generate_extension_custom() + generate_extension_json() diff --git a/autotest/ogr/data/arrow/extension_json.feather b/autotest/ogr/data/arrow/extension_json.feather new file mode 100644 index 0000000000000000000000000000000000000000..a902cb2ddafb5e51f82ef41caa3dd052fabdd1e7 GIT binary patch literal 842 zcmd^7Jxc>Y5S=rdc*rS-q(~7+SS+l>w1-8~r4b=b>~w=CDC8C{2MQtte@#k%M*fw) zw;v${|A7zQ?#|np*_pYU%w~7<k;o4qiAXNQiDc4|wxl=#iGnS#QmiVhkYV*ek#p=R zB95J3E?p=@((Yp2fQ;;e#2Hw=H)XYYvDL%t%2pyr(59dQx2T2wLy++k)>!N!ipnfY z$gVb1*Y@qo=ScB7t79yA0kXcs_+{CcC({^Q?Vu3Jw>gImcD_qw$lxcDx!$CuTpv?^ z9enzj8(;Q5_fXj@b8tox@r_m#{@4dW8-|@dL@?KWKI-g~Q2Ms_IR1jUrg_SRh=*_w zbN!*8t<{BIm|A~&ukih0@cU<|yyLpbeb4Ca2Kpt)8L@^u6!MKTu|E&5{Ez?T8gp>} G*XS2s<X@%$ literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/parquet/extension_json.parquet b/autotest/ogr/data/parquet/extension_json.parquet new file mode 100644 index 0000000000000000000000000000000000000000..c331e835492f37a5250151c42c0eac14286cc211 GIT binary patch literal 766 zcmcIiK~LgP5FTC(@qmVqeWXo#AWbA5PyxlH-UhIZyBMS>AQw}*T~H}41;ll;e`}BX zTlS|o^N5M=WluZlo9WDaGw++1xk9%hx1=e}mJ||l06PHi!~7OS=9xM267#17nUlE^ zrL!_E)5a<6?|cAgA6=>!d{QX^#osAIX7pMX(z>=AJTHUU;y#+)JuIS`guKZAO3_!e zi`)NhtCxr<NCBj>WB}xrUaEnXUigzB^f;5*6MtW`2Q7oJd7&G+@qhX7G@`go<`aN! zXc=_fcK>WcPx~W|Sk_oa#10*nFNhQvqm4Mm##qJb1i0kC2I2)<^4~0Z=N!6aMbhPB z!a0aOHaS9IYS!gnl6u|cs~QnIgrmrswUQ>^=k!j#I=z)&oj#4G*4(dJD~c-UrU}<K zFPORtDup-Ppws)@y{`DD!@;CH?AKnrzO`zHwaKWj(wli4`@0`n$E`4a8C6fHYRo)o z2*q3Zq#o(;5R|dIH;>k!9lqoB5HE)VcQSPB^6lHpF;+>3M^=vlS|Rq5`6^f*_;GyT dB}ud{JOv4_T%l4qEFYG2gO|ag?YW}Oe+9rWyTSkf literal 0 HcmV?d00001 diff --git a/autotest/ogr/ogr_arrow.py b/autotest/ogr/ogr_arrow.py index 59115e397690..8e33f05b87a7 100755 --- a/autotest/ogr/ogr_arrow.py +++ b/autotest/ogr/ogr_arrow.py @@ -536,6 +536,33 @@ def test_ogr_arrow_read_with_extension_not_registered_on_regular_field(): assert f["extension_custom"] == '{"foo":"bar"}' +############################################################################### +# Test reading a file with the arrow.json extension + + +def test_ogr_arrow_read_arrow_json_extension(): + + ds = ogr.Open("data/arrow/extension_json.feather") + lyr = ds.GetLayer(0) + assert lyr.GetLayerDefn().GetFieldDefn(0).GetSubType() == ogr.OFSTJSON + f = lyr.GetNextFeature() + assert f["extension_json"] == '{"foo":"bar"}' + + stream = lyr.GetArrowStream() + schema = stream.GetSchema() + + dst_ds = gdal.GetDriverByName("Memory").Create("", 0, 0, 0, gdal.GDT_Unknown) + dst_lyr = dst_ds.CreateLayer("test") + success, error_msg = dst_lyr.IsArrowSchemaSupported(schema) + assert success + + for i in range(schema.GetChildrenCount()): + if schema.GetChild(i).GetName() not in ("wkb_geometry", "OGC_FID"): + dst_lyr.CreateFieldFromArrowSchema(schema.GetChild(i)) + + assert dst_lyr.GetLayerDefn().GetFieldDefn(0).GetSubType() == ogr.OFSTJSON + + ############################################################################### # Test storing OGR field alternative name and comment in gdal:schema extension diff --git a/autotest/ogr/ogr_parquet.py b/autotest/ogr/ogr_parquet.py index 61519fc043f3..ebfb3ca3b889 100755 --- a/autotest/ogr/ogr_parquet.py +++ b/autotest/ogr/ogr_parquet.py @@ -3691,3 +3691,16 @@ def test_ogr_parquet_read_with_extension_not_registered_on_regular_field(): lyr = ds.GetLayer(0) f = lyr.GetNextFeature() assert f["extension_custom"] == '{"foo":"bar"}' + + +############################################################################### +# Test reading a file with the arrow.json extension + + +def test_ogr_parquet_read_arrow_json_extension(): + + ds = ogr.Open("data/parquet/extension_json.parquet") + lyr = ds.GetLayer(0) + assert lyr.GetLayerDefn().GetFieldDefn(0).GetSubType() == ogr.OFSTJSON + f = lyr.GetNextFeature() + assert f["extension_json"] == '{"foo":"bar"}' diff --git a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp index 40d497c154c4..b7a4fc847f09 100644 --- a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp +++ b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp @@ -311,7 +311,11 @@ inline bool OGRArrowLayer::MapArrowTypeToOGR( } } - if (!osExtensionName.empty()) + // Preliminary/in-advance read support for future JSON Canonical Extension + // Cf https://github.com/apache/arrow/pull/41257 and + // https://github.com/apache/arrow/pull/13901 + if (!osExtensionName.empty() && + osExtensionName != EXTENSION_NAME_ARROW_JSON) { CPLDebug(GetDriverUCName().c_str(), "Dealing with field %s of extension type %s as %s", @@ -370,6 +374,8 @@ inline bool OGRArrowLayer::MapArrowTypeToOGR( case arrow::Type::LARGE_STRING: bTypeOK = true; eType = OFTString; + if (osExtensionName == EXTENSION_NAME_ARROW_JSON) + eSubType = OFSTJSON; break; case arrow::Type::BINARY: case arrow::Type::LARGE_BINARY: diff --git a/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp b/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp index 1c88d044b258..4ab02935f5ba 100644 --- a/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp +++ b/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp @@ -5948,6 +5948,11 @@ bool OGRLayer::CreateFieldFromArrowSchemaInternal( if (poDS && poDS->GetFieldDomain(oIter.second)) oFieldDefn.SetDomainName(oIter.second); } + else if (oIter.first == ARROW_EXTENSION_NAME_KEY && + oIter.second == EXTENSION_NAME_ARROW_JSON) + { + oFieldDefn.SetSubType(OFSTJSON); + } else { CPLDebug("OGR", "Unknown field metadata: %s", diff --git a/ogr/ogrsf_frmts/generic/ogrlayerarrow.h b/ogr/ogrsf_frmts/generic/ogrlayerarrow.h index 0e3c759ede6a..dc9ed4e726ef 100644 --- a/ogr/ogrsf_frmts/generic/ogrlayerarrow.h +++ b/ogr/ogrsf_frmts/generic/ogrlayerarrow.h @@ -38,6 +38,7 @@ constexpr const char *ARROW_EXTENSION_NAME_KEY = "ARROW:extension:name"; constexpr const char *ARROW_EXTENSION_METADATA_KEY = "ARROW:extension:metadata"; constexpr const char *EXTENSION_NAME_OGC_WKB = "ogc.wkb"; constexpr const char *EXTENSION_NAME_GEOARROW_WKB = "geoarrow.wkb"; +constexpr const char *EXTENSION_NAME_ARROW_JSON = "arrow.json"; std::map<std::string, std::string> CPL_DLL OGRParseArrowMetadata(const char *pabyMetadata); From a113437c25036009ee7e86329e6fbdde1ac9ee04 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 17:47:20 +0200 Subject: [PATCH 108/230] Update gdal-bash-completion.sh --- scripts/gdal-bash-completion.sh | 49 ++++++++++++++------------------- 1 file changed, 21 insertions(+), 28 deletions(-) diff --git a/scripts/gdal-bash-completion.sh b/scripts/gdal-bash-completion.sh index e48af70e6bb9..90a99211f3fc 100644 --- a/scripts/gdal-bash-completion.sh +++ b/scripts/gdal-bash-completion.sh @@ -46,7 +46,7 @@ _gdaladdo() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -r -ro -clean -q -oo -minsize --partial-refresh-from-source-timestamp --partial-refresh-from-projwin --partial-refresh-from-source-extent --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general -r -ro --quiet -b -oo -minsize -clean --partial-refresh-from-source-timestamp --partial-refresh-from-projwin --partial-refresh-from-source-extent --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -68,7 +68,7 @@ _gdalbuildvrt() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -tileindex -resolution -te -tr -tap -separate -b -sd -allow_projection_difference -q -addalpha -hidenodata -srcnodata -vrtnodata -ignore_srcmaskband -a_srs -r -oo -input_file_list -overwrite -strict -non_strict --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general --quiet -strict -non_strict -tile_index -resolution -tr -input_file_list -separate -allow_projection_difference -sd -tap -te -addalpha -b -hidenodata -overwrite -srcnodata -vrtnodata -a_srs -r -oo -ignore_srcmaskband -nodata_max_mask_threshold --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -194,7 +194,7 @@ _gdal_edit.py() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -ro -a_srs -a_ullr -a_ulurll -tr -unsetgt -unsetrpc -a_nodata -unsetnodata -offset -scale -units -colorinterp_<X> -unsetstats -stats -approx_stats -setstats -gcp -unsetmd -oo -mo --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --help-general -ro -a_srs -a_ullr -a_ulurll -tr -unsetgt -unsetrpc -a_nodata -unsetnodata -offset -scale -units -colorinterp_<X> -a_coord_epoch -unsetepoch -unsetstats -stats -approx_stats -setstats -gcp -unsetmd -oo -mo --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -246,7 +246,7 @@ _gdal_fillnodata.py() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -q -md -si -o -mask -b -of -co --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --help-general -q -md -si -o -mask -interp -b -of -co --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -272,7 +272,7 @@ _gdal_grid() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -oo -ot -of -co -zfield -z_increase -z_multiply -a_srs -spat -clipsrc -clipsrcsql -clipsrclayer -clipsrcwhere -l -where -sql -txe -tye -tr -outsize -a -q --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general --quiet -of -ot -txe -tye -outsize -tr -co -zfield -z_increase -z_multiply -where -l -sql -spat -clipsrc -clipsrcsql -clipsrclayer -clipsrcwhere -a_srs -a -oo --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -302,7 +302,7 @@ _gdalinfo() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -json -mm -stats -approx_stats -hist -nogcp -nomd -norat -noct -nofl -checksum -listmdd -mdd -proj4 -wkt_format -sd -oo -if --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general -json -mm -stats -approx_stats -hist -nogcp -nomd -norat -noct -nofl -checksum -listmdd -proj4 -wkt_format -sd -oo -if -mdd --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -324,7 +324,7 @@ _gdallocationinfo() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -xml -lifonly -valonly -b -overview -l_srs -geoloc -wgs84 -oo --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --help-general -xml -lifonly -valonly -E -field_sep -ignore_extra_input -b -overview -l_srs -geoloc -wgs84 -oo --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -570,13 +570,17 @@ _gdaltindex() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -f -tileindex -write_absolute_path -skip_different_projection -t_srs -src_srs_name -src_srs_format -lyr_name --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --help-general -overwrite -recursive -filename_filter -min_pixel_size -max_pixel_size -f -tileindex -write_absolute_path -skip_different_projection -t_srs -src_srs_name -src_srs_format -lyr_name -lco -gti_filename -tr -te -ot -bandcount -nodata -colorinterp -mask -mo -fetch_md --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; esac tool=${COMP_WORDS[0]} case "$prev" in + -ot) + key_list="Byte Int16 UInt16 UInt32 Int32 Float32 Float64 CInt16 CInt32 CFloat32 CFloat64" + mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") + ;; -f) key_list="$( $tool --formats | tail -n +2 | cut -f 3 -d ' ')" mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") @@ -596,7 +600,7 @@ _gdaltransform() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -i -s_srs -t_srs -to -s_coord_epoch -t_coord_epoch -ct -order -tps -rpc -geoloc -gcp -output_xy --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --help-general -i -s_srs -t_srs -to -s_coord_epoch -t_coord_epoch -ct -order -tps -rpc -geoloc -gcp -output_xy -E -field_sep -ignore_extra_input --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -618,7 +622,7 @@ _gdal_translate() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general --long-usage -ot -strict -if -of -b -mask -expand -outsize -tr -ovr -r -unscale -scale -exponent -srcwin -epo -eco -projwin -projwin_srs -a_srs -a_coord_epoch -a_ullr -a_nodata -a_gt -a_scale -a_offset -nogcp -gcp -colorinterp{_bn} -colorinterp -mo -q -sds -co -stats -norat -noxmp -oo --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general -ot -if -of --quiet -b -mask -expand -strict -not_strict -outsize -tr -ovr -sds -r -scale -scale_X -unscale -exponent -exponent_X -srcwin -projwin -projwin_srs -epo -eco -a_srs -a_coord_epoch -a_ullr -a_nodata -a_gt -a_scale -a_offset -nogcp -gcp -colorinterp -colorinterp_X -stats -approx_stats -norat -noxmp -co -mo -dmo -oo --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -648,7 +652,7 @@ _gdalwarp() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general --formats -b -srcband -dstband -s_srs -t_srs -ct -to -vshift -novshift -s_coord_epoch -t_coord_epoch -order -tps -rpc -geoloc -et -refine_gcps -te -te_srs -tr -tr -tap -ts -ovr -wo -ot -wt -srcnodata -dstnodata -srcalpha -nosrcalpha -dstalpha -r -wm -multi -q -cutline -cl -cwhere -csql -cblend -crop_to_cutline -if -of -co -overwrite -nomd -cvmd -setci -oo -doo --version --build --license --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general --quiet -overwrite -of -co -s_srs -t_srs -srcalpha -nosrcalpha -dstalpha -tr -ts -te -te_srs -r -ot --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -678,14 +682,14 @@ _gdal_viewshed() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -b -a_nodata -f -oz -tz -md -ox -oy -vv -iv -ov -cc -co -q -om --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general -of -ox -oy -oz -vv -iv -ov -co -a_nodata -tz -md -cc -b -om --quiet --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; esac tool=${COMP_WORDS[0]} case "$prev" in - -f) + -of) key_list="$( $tool --formats | tail -n +2 | cut -f 3 -d ' ')" mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") ;; @@ -734,7 +738,7 @@ _sozip() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general --quiet --verbose -g --grow --overwrite -r --recurse-paths -j --junk -l --list --validate --optimize-from=input.zip --enable-sozip={auto --sozip-chunk-size=<value> --sozip-min-file-size=<value> --content-type=<value> --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general --recurse-paths --grow --overwrite --list --validate --optimize-from --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -756,7 +760,7 @@ _gdal_footprint() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -b -combine_bands -oo -ovr -srcnodata -t_cs -t_srs -split_polys -convex_hull -densify -simplify -min_ring_area -max_points -of -lyr_name -dsco -lco -overwrite -q --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --help-general -b -combine_bands -oo -ovr -srcnodata -t_cs -t_srs -split_polys -convex_hull -densify -simplify -min_ring_area -max_points -of -lyr_name -location_field_name -no_location -write_absolute_path -dsco -lco -overwrite -q --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; @@ -782,22 +786,11 @@ _ogr2ogr() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -skipfailures -append -upsert -update -select -where -progress -sql -dialect -preserve_fid -fid -limit -spat -spat_srs -geomfield -a_srs -t_srs -s_srs -ct -f -overwrite -dsco -lco -nln -nlt -dim --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general -of -dsco -lco -append -upsert -overwrite -update -sql -dialect -spat -where -select -nln -nlt -s_srs -a_srs -t_srs --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; esac - tool=${COMP_WORDS[0]/ogrtindex/ogr2ogr} - case "$prev" in - -f) - key_list="$( $tool --formats | tail -n +2 | grep -o -E '"[^"]+"' | sed 's/\ /__/')" - for iter in $key_list; do - if [[ $iter =~ ^$cur ]]; then - COMPREPLY+=( "${iter//__/ }" ) - fi - done - ;; - esac return 0 } complete -o default -F _ogr2ogr ogr2ogr @@ -808,7 +801,7 @@ _ogrinfo() _get_comp_words_by_ref cur prev case "$cur" in -*) - key_list="--help --help-general -json -ro -q -where -spat -geomfield -fid -sql -dialect -al -rl -so -features -fields={YES -geom={YES -oo -nomd -listmdd -mdd -nocount -noextent -nogeomtype -wkt_format -fielddomain --version --build --license --formats --format --optfile --config --debug --pause --locale " + key_list="--help --long-usage --help-general -json -ro -update --quiet -fid -spat -geomfield -where -sql -rl -dialect -al -summary -features -limit -fields -geom -oo -nomd -listmdd -mdd -nocount -noextent -extent3D -nogeomtype -wkt_format -fielddomain -if --version --build --license --formats --format --optfile --config --debug --pause --locale " mapfile -t COMPREPLY < <(compgen -W "$key_list" -- "$cur") return 0 ;; From 8fd81616be39f5f39aecb97c4749265e55ab389c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 17:49:42 +0200 Subject: [PATCH 109/230] scripts/typos_allowlist.txt: update --- scripts/typos_allowlist.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/typos_allowlist.txt b/scripts/typos_allowlist.txt index be5ac1e0ee82..d6e0ee34aa44 100644 --- a/scripts/typos_allowlist.txt +++ b/scripts/typos_allowlist.txt @@ -316,7 +316,7 @@ either 2 or 4 comma separated values. The same rules apply for the source and de CPLStrlcpy(szPerimeterOfThePolygonCat, "Perimetre del poligon", CPLStrlcpy(szAreaOfThePolygonCat, "Area del poligon", CPLStrlcpy(szNumberOfElementaryPolygonsCat, "Nombre de poligons elementals", - VSIFPrintfL(MMMap.fMMMap, "[VERSIO]\n"); + VSIFPrintfL(m_MMMap.fMMMap, "[VERSIO]\n"); #define MM_IsDoubleInfinite(x) EsDoubleInfinit((x)) https://www.miramon.cat/help/eng/GeMPlus/ClausREL.htm fprintf_function(pF, "[%s]" LineReturn, SECTION_VERSIO); From fab3860c2ac8aab496f63863f7366052441c595a Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 18:24:23 +0200 Subject: [PATCH 110/230] Internal libtiff: resync with upstream --- .../libtiff/gdal_libtiff_symbol_rename.h | 11 +++--- frmts/gtiff/libtiff/tif_dir.h | 6 +-- frmts/gtiff/libtiff/tif_getimage.c | 38 +++++++++++++++++++ 3 files changed, 47 insertions(+), 8 deletions(-) diff --git a/frmts/gtiff/libtiff/gdal_libtiff_symbol_rename.h b/frmts/gtiff/libtiff/gdal_libtiff_symbol_rename.h index cba60f569e09..49545474f4ed 100644 --- a/frmts/gtiff/libtiff/gdal_libtiff_symbol_rename.h +++ b/frmts/gtiff/libtiff/gdal_libtiff_symbol_rename.h @@ -7,11 +7,13 @@ #define BuildMapBitdepth16To8 gdal_BuildMapBitdepth16To8 #define BuildMapUaToAa gdal_BuildMapUaToAa #define ByteCountLooksBad gdal_ByteCountLooksBad +#define CalcFinalIFDdatasizeReading gdal_CalcFinalIFDdatasizeReading #define checkcmap gdal_checkcmap #define CheckDirCount gdal_CheckDirCount #define ChopUpSingleUncompressedStrip gdal_ChopUpSingleUncompressedStrip #define CLAMPw gdal_CLAMPw #define cl_hash gdal_cl_hash +#define cmpTIFFEntryOffsetAndLength gdal_cmpTIFFEntryOffsetAndLength #define countInkNamesString gdal_countInkNamesString #define cvtcmap gdal_cvtcmap #define DoubleToRational gdal_DoubleToRational @@ -23,6 +25,8 @@ #define equalFuncNumberToOffset gdal_equalFuncNumberToOffset #define equalFuncOffsetToNumber gdal_equalFuncOffsetToNumber #define EstimateStripByteCounts gdal_EstimateStripByteCounts +#define EvaluateIFDdatasizeReading gdal_EvaluateIFDdatasizeReading +#define EvaluateIFDdatasizeWrite gdal_EvaluateIFDdatasizeWrite #define Fax3BadLength gdal_Fax3BadLength #define Fax3Cleanup gdal_Fax3Cleanup #define _Fax3Close gdal__Fax3Close @@ -305,8 +309,8 @@ #define setDoubleArrayOneValue gdal_setDoubleArrayOneValue #define setExtraSamples gdal_setExtraSamples #define setorientation gdal_setorientation +#define SetupBuffers gdal_SetupBuffers #define setupMap gdal_setupMap -#define SetupUncompressedBuffer gdal_SetupUncompressedBuffer #define swabHorAcc16 gdal_swabHorAcc16 #define swabHorAcc32 gdal_swabHorAcc32 #define swabHorAcc64 gdal_swabHorAcc64 @@ -633,6 +637,7 @@ #define _TIFFReserveLargeEnoughWriteBuffer gdal__TIFFReserveLargeEnoughWriteBuffer #define TIFFReverseBits gdal_TIFFReverseBits #define TIFFRewriteDirectory gdal_TIFFRewriteDirectory +#define TIFFRewriteDirectorySec gdal_TIFFRewriteDirectorySec #define _TIFFRewriteField gdal__TIFFRewriteField #define TIFFRGBAImageBegin gdal_TIFFRGBAImageBegin #define TIFFRGBAImageEnd gdal_TIFFRGBAImageEnd @@ -699,8 +704,6 @@ #define TIFFTileSize gdal_TIFFTileSize #define TIFFTileSize64 gdal_TIFFTileSize64 #define _TIFFtrue gdal__TIFFtrue -#define _TIFFUInt64ToDouble gdal__TIFFUInt64ToDouble -#define _TIFFUInt64ToFloat gdal__TIFFUInt64ToFloat #define TIFFUnlinkDirectory gdal_TIFFUnlinkDirectory #define _tiffUnmapProc gdal__tiffUnmapProc #define TIFFUnRegisterCODEC gdal_TIFFUnRegisterCODEC @@ -802,8 +805,6 @@ #define unixWarningHandler gdal_unixWarningHandler #define uv_decode gdal_uv_decode #define uv_encode gdal_uv_encode -#define WebPGetFeatures gdal_WebPGetFeatures -#define WebPInitDecoderConfig gdal_WebPInitDecoderConfig #define WriteAsLong4 gdal_WriteAsLong4 #define WriteAsLong8 gdal_WriteAsLong8 #define _WriteAsType gdal__WriteAsType diff --git a/frmts/gtiff/libtiff/tif_dir.h b/frmts/gtiff/libtiff/tif_dir.h index 17242eda558b..f9558b623703 100644 --- a/frmts/gtiff/libtiff/tif_dir.h +++ b/frmts/gtiff/libtiff/tif_dir.h @@ -146,9 +146,9 @@ typedef struct td_deferstrilearraywriting; /* see TIFFDeferStrileArrayWriting() */ /* LibTIFF writes all data that does not fit into the IFD entries directly - * after the IFD tag entry part. When reading, only the IFD data directly and - * continuously behind the IFD tags is taken into account for the IFD data - * size.*/ + * after the IFD tag entry part. When reading, only the IFD data directly + * and continuously behind the IFD tags is taken into account for the IFD + * data size.*/ uint64_t td_dirdatasize_write; /* auxiliary for evaluating size of IFD data to be written */ uint64_t td_dirdatasize_read; /* auxiliary for evaluating size of IFD data diff --git a/frmts/gtiff/libtiff/tif_getimage.c b/frmts/gtiff/libtiff/tif_getimage.c index 6fee35db28ed..0ada2697259c 100644 --- a/frmts/gtiff/libtiff/tif_getimage.c +++ b/frmts/gtiff/libtiff/tif_getimage.c @@ -760,6 +760,13 @@ static int gtTileContig(TIFFRGBAImage *img, uint32_t *raster, uint32_t w, toskew = -(int32_t)(tw - w); } + if (tw == 0 || th == 0) + { + TIFFErrorExtR(tif, TIFFFileName(tif), "tile width or height is zero"); + return (0); + } + + /* * Leftmost tile is clipped on left side if col_offset > 0. */ @@ -916,6 +923,12 @@ static int gtTileSeparate(TIFFRGBAImage *img, uint32_t *raster, uint32_t w, break; } + if (tw == 0 || th == 0) + { + TIFFErrorExtR(tif, TIFFFileName(tif), "tile width or height is zero"); + return (0); + } + /* * Leftmost tile is clipped on left side if col_offset > 0. */ @@ -1092,6 +1105,12 @@ static int gtStripContig(TIFFRGBAImage *img, uint32_t *raster, uint32_t w, } TIFFGetFieldDefaulted(tif, TIFFTAG_ROWSPERSTRIP, &rowsperstrip); + if (rowsperstrip == 0) + { + TIFFErrorExtR(tif, TIFFFileName(tif), "rowsperstrip is zero"); + return (0); + } + scanline = TIFFScanlineSize(tif); fromskew = (w < imagewidth ? imagewidth - w : 0); @@ -1216,6 +1235,12 @@ static int gtStripSeparate(TIFFRGBAImage *img, uint32_t *raster, uint32_t w, } TIFFGetFieldDefaulted(tif, TIFFTAG_ROWSPERSTRIP, &rowsperstrip); + if (rowsperstrip == 0) + { + TIFFErrorExtR(tif, TIFFFileName(tif), "rowsperstrip is zero"); + return (0); + } + scanline = TIFFScanlineSize(tif); fromskew = (w < imagewidth ? imagewidth - w : 0); for (row = 0; row < h; row += nrow) @@ -3213,6 +3238,13 @@ int TIFFReadRGBAStripExt(TIFF *tif, uint32_t row, uint32_t *raster, } TIFFGetFieldDefaulted(tif, TIFFTAG_ROWSPERSTRIP, &rowsperstrip); + + if (rowsperstrip == 0) + { + TIFFErrorExtR(tif, TIFFFileName(tif), "rowsperstrip is zero"); + return (0); + } + if ((row % rowsperstrip) != 0) { TIFFErrorExtR( @@ -3289,6 +3321,12 @@ int TIFFReadRGBATileExt(TIFF *tif, uint32_t col, uint32_t row, uint32_t *raster, TIFFGetFieldDefaulted(tif, TIFFTAG_TILEWIDTH, &tile_xsize); TIFFGetFieldDefaulted(tif, TIFFTAG_TILELENGTH, &tile_ysize); + if (tile_xsize == 0 || tile_ysize == 0) + { + TIFFErrorExtR(tif, TIFFFileName(tif), "tile_xsize or tile_ysize is zero"); + return (0); + } + if ((col % tile_xsize) != 0 || (row % tile_ysize) != 0) { TIFFErrorExtR(tif, TIFFFileName(tif), From 580bb41a369c866e1e30987e547652ee9cf9bf24 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 18:58:53 +0200 Subject: [PATCH 111/230] GDALMDReaderALOS::LoadRPCTxtFile(): prevent read index out of bounds on corrupted RPCTxt file (https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=68182) --- autotest/gcore/tiff_read.py | 33 ++++++++++ gcore/mdreader/reader_alos.cpp | 113 ++++++++++++++++----------------- 2 files changed, 89 insertions(+), 57 deletions(-) diff --git a/autotest/gcore/tiff_read.py b/autotest/gcore/tiff_read.py index 4dcb667c1b24..a21b335ebaf8 100755 --- a/autotest/gcore/tiff_read.py +++ b/autotest/gcore/tiff_read.py @@ -2439,6 +2439,39 @@ def test_tiff_read_md9(): md["ACQUISITIONDATETIME"] == "2010-07-01 00:00:00" ), "bad value for IMAGERY[ACQUISITIONDATETIME]" + assert ds.GetMetadata("RPC") == { + "HEIGHT_OFF": "+3000", + "HEIGHT_SCALE": "+3158", + "LAT_OFF": "+55.8151", + "LAT_SCALE": "+00.4400", + "LINE_DEN_COEFF": "+1.000000E+0 -2.395249E-3 +4.910170E-3 -1.979234E-3 " + "-4.434428E-5 -1.095954E-5 -1.797271E-5 +2.114926E-6 " + "-3.908516E-6 -8.265047E-6 +0.000000E+0 +0.000000E+0 " + "+0.000000E+0 +0.000000E+0 +0.000000E+0 +0.000000E+0 " + "+0.000000E+0 +0.000000E+0 +0.000000E+0 +0.000000E+0", + "LINE_NUM_COEFF": "-3.910052E-4 -3.183540E-1 -1.136076E+0 -2.764965E-4 " + "+4.820507E-3 +6.314381E-4 +2.248402E-3 -4.827423E-3 " + "-5.628790E-3 +7.063636E-7 +7.387494E-6 +1.621526E-5 " + "+7.714024E-5 +2.645431E-6 +9.095926E-6 +5.103928E-6 " + "+9.402414E-6 +1.317677E-5 +2.180787E-5 +6.331507E-9", + "LINE_OFF": "004000", + "LINE_SCALE": "004129", + "LONG_OFF": "+032.0758", + "LONG_SCALE": "+000.7304", + "SAMP_DEN_COEFF": "+1.000000E+0 -2.395249E-3 +4.910170E-3 -1.979234E-3 " + "-4.434428E-5 -1.095954E-5 -1.797271E-5 +2.114926E-6 " + "-3.908516E-6 -8.265047E-6 +0.000000E+0 +0.000000E+0 " + "+0.000000E+0 +0.000000E+0 +0.000000E+0 +0.000000E+0 " + "+0.000000E+0 +0.000000E+0 +0.000000E+0 +0.000000E+0", + "SAMP_NUM_COEFF": "+7.854784E-3 +1.190927E+0 -3.819688E-1 +4.903193E-5 " + "-6.649807E-3 +3.604242E-3 -1.156816E-3 -4.774021E-3 " + "-1.842626E-3 +6.588797E-8 -2.311049E-5 -3.063846E-6 " + "-8.987769E-5 +5.254359E-6 -1.970831E-5 +9.643964E-7 " + "-1.697456E-6 -3.331775E-5 +1.815988E-7 -8.016227E-9", + "SAMP_OFF": "03639", + "SAMP_SCALE": "03699", + } + ds = None assert not os.path.exists("data/alos/IMG-md_alos.tif.aux.xml") diff --git a/gcore/mdreader/reader_alos.cpp b/gcore/mdreader/reader_alos.cpp index 56b7f5e39ef9..2dfcc6e05315 100644 --- a/gcore/mdreader/reader_alos.cpp +++ b/gcore/mdreader/reader_alos.cpp @@ -299,10 +299,6 @@ void GDALMDReaderALOS::LoadMetadata() } } -static const char *const apszRPCTXT20ValItems[] = { - RPC_LINE_NUM_COEFF, RPC_LINE_DEN_COEFF, RPC_SAMP_NUM_COEFF, - RPC_SAMP_DEN_COEFF, nullptr}; - /** * LoadRPCTxtFile */ @@ -311,74 +307,77 @@ char **GDALMDReaderALOS::LoadRPCTxtFile() if (m_osRPBSourceFilename.empty()) return nullptr; - char **papszLines = CSLLoad(m_osRPBSourceFilename); - if (nullptr == papszLines) + const CPLStringList aosLines(CSLLoad(m_osRPBSourceFilename)); + if (aosLines.empty()) return nullptr; - const char *pszFirstRow = papszLines[0]; - char **papszRPB = nullptr; + const char *pszFirstRow = aosLines[0]; + CPLStringList aosRPB; if (nullptr != pszFirstRow) { - char buff[50] = {0}; - int nOffset = 0; - CPLStrlcpy(buff, pszFirstRow + nOffset, 7); - nOffset += 6; - papszRPB = CSLAddNameValue(papszRPB, RPC_LINE_OFF, buff); - - CPLStrlcpy(buff, pszFirstRow + nOffset, 6); - nOffset += 5; - papszRPB = CSLAddNameValue(papszRPB, RPC_SAMP_OFF, buff); - - CPLStrlcpy(buff, pszFirstRow + nOffset, 9); - nOffset += 8; - papszRPB = CSLAddNameValue(papszRPB, RPC_LAT_OFF, buff); - - CPLStrlcpy(buff, pszFirstRow + nOffset, 10); - nOffset += 9; - papszRPB = CSLAddNameValue(papszRPB, RPC_LONG_OFF, buff); - - CPLStrlcpy(buff, pszFirstRow + nOffset, 6); - nOffset += 5; - papszRPB = CSLAddNameValue(papszRPB, RPC_HEIGHT_OFF, buff); - - CPLStrlcpy(buff, pszFirstRow + nOffset, 7); - nOffset += 6; - papszRPB = CSLAddNameValue(papszRPB, RPC_LINE_SCALE, buff); - - CPLStrlcpy(buff, pszFirstRow + nOffset, 6); - nOffset += 5; - papszRPB = CSLAddNameValue(papszRPB, RPC_SAMP_SCALE, buff); + static const struct + { + const char *pszName; + int nSize; + } apsFieldDescriptors[] = { + {RPC_LINE_OFF, 6}, {RPC_SAMP_OFF, 5}, {RPC_LAT_OFF, 8}, + {RPC_LONG_OFF, 9}, {RPC_HEIGHT_OFF, 5}, {RPC_LINE_SCALE, 6}, + {RPC_SAMP_SCALE, 5}, {RPC_LAT_SCALE, 8}, {RPC_LONG_SCALE, 9}, + {RPC_HEIGHT_SCALE, 5}, + }; + + int nRequiredSize = 0; + for (const auto &sFieldDescriptor : apsFieldDescriptors) + { + nRequiredSize += sFieldDescriptor.nSize; + } - CPLStrlcpy(buff, pszFirstRow + nOffset, 9); - nOffset += 8; - papszRPB = CSLAddNameValue(papszRPB, RPC_LAT_SCALE, buff); + static const char *const apszRPCTXT20ValItems[] = { + RPC_LINE_NUM_COEFF, RPC_LINE_DEN_COEFF, RPC_SAMP_NUM_COEFF, + RPC_SAMP_DEN_COEFF}; - CPLStrlcpy(buff, pszFirstRow + nOffset, 10); - nOffset += 9; - papszRPB = CSLAddNameValue(papszRPB, RPC_LONG_SCALE, buff); + constexpr int RPC_COEFF_COUNT1 = CPL_ARRAYSIZE(apszRPCTXT20ValItems); + constexpr int RPC_COEFF_COUNT2 = 20; + constexpr int RPC_COEFF_SIZE = 12; + nRequiredSize += RPC_COEFF_COUNT1 * RPC_COEFF_COUNT2 * RPC_COEFF_SIZE; + if (strlen(pszFirstRow) < nRequiredSize) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s has only %d bytes wherea %d are required", + m_osRPBSourceFilename.c_str(), int(strlen(pszFirstRow)), + nRequiredSize); + return nullptr; + } - CPLStrlcpy(buff, pszFirstRow + nOffset, 6); - nOffset += 5; - papszRPB = CSLAddNameValue(papszRPB, RPC_HEIGHT_SCALE, buff); + int nOffset = 0; + char buff[16] = {0}; + for (const auto &sFieldDescriptor : apsFieldDescriptors) + { + CPLAssert(sFieldDescriptor.nSize < int(sizeof(buff))); + memcpy(buff, pszFirstRow + nOffset, sFieldDescriptor.nSize); + buff[sFieldDescriptor.nSize] = 0; + aosRPB.SetNameValue(sFieldDescriptor.pszName, buff); + nOffset += sFieldDescriptor.nSize; + } - int i, j; - for (i = 0; apszRPCTXT20ValItems[i] != nullptr; i++) + for (const char *pszItem : apszRPCTXT20ValItems) { - CPLString value; - for (j = 1; j < 21; j++) + std::string osValue; + for (int j = 0; j < RPC_COEFF_COUNT2; j++) { - CPLStrlcpy(buff, pszFirstRow + nOffset, 13); - nOffset += 12; + memcpy(buff, pszFirstRow + nOffset, RPC_COEFF_SIZE); + buff[RPC_COEFF_SIZE] = 0; + nOffset += RPC_COEFF_SIZE; - value = value + " " + CPLString(buff); + if (!osValue.empty()) + osValue += " "; + osValue += buff; } - papszRPB = - CSLAddNameValue(papszRPB, apszRPCTXT20ValItems[i], value); + aosRPB.SetNameValue(pszItem, osValue.c_str()); } } - CSLDestroy(papszLines); - return papszRPB; + return aosRPB.StealList(); } /** From 483bdb485983ee3a815368f68c2d15a8a8adcb6b Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 19:03:04 +0200 Subject: [PATCH 112/230] [Lint] HDF5: make Coverity Scan happy --- frmts/hdf5/hdf5imagedataset.cpp | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/frmts/hdf5/hdf5imagedataset.cpp b/frmts/hdf5/hdf5imagedataset.cpp index e192ab79c24d..bd1defebab5e 100644 --- a/frmts/hdf5/hdf5imagedataset.cpp +++ b/frmts/hdf5/hdf5imagedataset.cpp @@ -543,15 +543,16 @@ CPLErr HDF5ImageRasterBand::IRasterIO(GDALRWFlag eRWFlag, int nXOff, int nYOff, HDF5_GLOBAL_LOCK(); hsize_t count[3] = { - static_cast<hsize_t>( - std::min(poGDS->nBands, - (iBandChunk + 1) * poGDS->m_nBandChunkSize) - - iBandChunk * poGDS->m_nBandChunkSize), + std::min(static_cast<hsize_t>(poGDS->nBands), + static_cast<hsize_t>(iBandChunk + 1) * + poGDS->m_nBandChunkSize) - + static_cast<hsize_t>(iBandChunk) * + poGDS->m_nBandChunkSize, static_cast<hsize_t>(nRasterYSize), static_cast<hsize_t>(nRasterXSize)}; H5OFFSET_TYPE offset[3] = { - static_cast<H5OFFSET_TYPE>(iBandChunk * - poGDS->m_nBandChunkSize), + static_cast<H5OFFSET_TYPE>(iBandChunk) * + poGDS->m_nBandChunkSize, static_cast<H5OFFSET_TYPE>(0), static_cast<H5OFFSET_TYPE>(0)}; herr_t status = From 5bbfa9bda4c3ac606f2c585591a5c9a414424bae Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 19:09:07 +0200 Subject: [PATCH 113/230] CPLZLibInflateEx(): make Coverity Scan happy --- port/cpl_vsil_gzip.cpp | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/port/cpl_vsil_gzip.cpp b/port/cpl_vsil_gzip.cpp index 96cd950cfb75..9a93c0717169 100644 --- a/port/cpl_vsil_gzip.cpp +++ b/port/cpl_vsil_gzip.cpp @@ -5084,13 +5084,18 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, outptr = nullptr; nOutAvailableBytes = nOutBufSize; } + else if (res != LIBDEFLATE_SUCCESS) + { + if (bAllowResizeOutptr) + VSIFree(outptr); + return nullptr; + } else { - if (res != LIBDEFLATE_SUCCESS) + // Nul-terminate if possible. + if (*pnOutBytes < nOutAvailableBytes) { - if (bAllowResizeOutptr) - VSIFree(outptr); - return nullptr; + static_cast<char *>(outptr)[*pnOutBytes] = '\0'; } return outptr; } @@ -5142,8 +5147,8 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, inflateEnd(&strm); return nullptr; } - nOutBufSize = 2 * nBytes; - pszOutBuf = static_cast<char *>(VSI_MALLOC_VERBOSE(nOutBufSize + 1)); + nOutBufSize = 2 * nBytes + 1; + pszOutBuf = static_cast<char *>(VSI_MALLOC_VERBOSE(nOutBufSize)); if (pszOutBuf == nullptr) { inflateEnd(&strm); @@ -5199,9 +5204,9 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, inflateEnd(&strm); return nullptr; } - nOutBufSize = nOutBufSize * 2; + nOutBufSize = nOutBufSize * 2 + 1; char *pszNew = static_cast<char *>( - VSI_REALLOC_VERBOSE(pszReallocatableBuf, nOutBufSize + 1)); + VSI_REALLOC_VERBOSE(pszReallocatableBuf, nOutBufSize)); if (!pszNew) { VSIFree(pszReallocatableBuf); @@ -5222,7 +5227,7 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, { size_t nOutBytes = nOutBufSize - nOutBytesRemaining; // Nul-terminate if possible. - if (outptr != pszOutBuf || nOutBytes < nOutBufSize) + if (nOutBytes < nOutBufSize) { pszOutBuf[nOutBytes] = '\0'; } From 4bad4c5b5537c5f11aa5a93b312578f3c810bcb8 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 19:14:06 +0200 Subject: [PATCH 114/230] test_ogr.cpp: try to make Coverity Scan happy --- autotest/cpp/test_ogr.cpp | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/autotest/cpp/test_ogr.cpp b/autotest/cpp/test_ogr.cpp index 4c5435104ed3..518b11e43547 100644 --- a/autotest/cpp/test_ogr.cpp +++ b/autotest/cpp/test_ogr.cpp @@ -3879,7 +3879,9 @@ TEST_F(test_ogr, OGRFeature_SerializeToBinary) OGRFeature oFeatSrc(&oFDefn); std::vector<GByte> abyBuffer; - oFeatSrc.SetFieldNull(oFDefn.GetFieldIndex("int")); + const int iFieldInt = oFDefn.GetFieldIndex("int"); + ASSERT_TRUE(iFieldInt >= 0); + oFeatSrc.SetFieldNull(iFieldInt); EXPECT_TRUE(oFeatSrc.SerializeToBinary(abyBuffer)); EXPECT_EQ(abyBuffer.size(), 5); @@ -3904,7 +3906,9 @@ TEST_F(test_ogr, OGRFeature_SerializeToBinary) oFeatSrc.SetField("int64", static_cast<GIntBig>(-12345678901234)); oFeatSrc.SetField("real", 1.25); oFeatSrc.SetField("str", "foo"); - oFeatSrc.SetField(oFDefn.GetFieldIndex("binary"), 3, + const int iFieldBinary = oFDefn.GetFieldIndex("binary"); + ASSERT_TRUE(iFieldBinary >= 0); + oFeatSrc.SetField(iFieldBinary, 3, static_cast<const void *>("abc")); oFeatSrc.SetField("intlist", 2, std::vector<int>{1, -123456}.data()); From df53495b9ba8b013c7aee5e99f87cc931990ecef Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 19:25:23 +0200 Subject: [PATCH 115/230] CPLZLibInflateEx(): fix 5bbfa9bda4c3ac606f2c585591a5c9a414424bae breakage --- port/cpl_vsil_gzip.cpp | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/port/cpl_vsil_gzip.cpp b/port/cpl_vsil_gzip.cpp index 9a93c0717169..22c7418ec09a 100644 --- a/port/cpl_vsil_gzip.cpp +++ b/port/cpl_vsil_gzip.cpp @@ -5053,17 +5053,20 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, return nullptr; } enum libdeflate_result res; + size_t nOutBytes = 0; if (nBytes > 2 && static_cast<const GByte *>(ptr)[0] == 0x1F && static_cast<const GByte *>(ptr)[1] == 0x8B) { res = libdeflate_gzip_decompress(dec, ptr, nBytes, outptr, - nOutAvailableBytes, pnOutBytes); + nOutAvailableBytes, &nOutBytes); } else { res = libdeflate_zlib_decompress(dec, ptr, nBytes, outptr, - nOutAvailableBytes, pnOutBytes); + nOutAvailableBytes, &nOutBytes); } + if (pnOutBytes) + *pnOutBytes = nOutBytes; libdeflate_free_decompressor(dec); if (res == LIBDEFLATE_INSUFFICIENT_SPACE && bAllowResizeOutptr) { @@ -5093,9 +5096,9 @@ void *CPLZLibInflateEx(const void *ptr, size_t nBytes, void *outptr, else { // Nul-terminate if possible. - if (*pnOutBytes < nOutAvailableBytes) + if (nOutBytes < nOutAvailableBytes) { - static_cast<char *>(outptr)[*pnOutBytes] = '\0'; + static_cast<char *>(outptr)[nOutBytes] = '\0'; } return outptr; } From 086f596753ea2ebc1c20b422d6d7ddc7a923a4c2 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 20 Apr 2024 19:37:16 +0200 Subject: [PATCH 116/230] reader_alos.cpp: fix compiler warning (master only) --- gcore/mdreader/reader_alos.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcore/mdreader/reader_alos.cpp b/gcore/mdreader/reader_alos.cpp index 2dfcc6e05315..b3194605cf16 100644 --- a/gcore/mdreader/reader_alos.cpp +++ b/gcore/mdreader/reader_alos.cpp @@ -340,7 +340,7 @@ char **GDALMDReaderALOS::LoadRPCTxtFile() constexpr int RPC_COEFF_COUNT2 = 20; constexpr int RPC_COEFF_SIZE = 12; nRequiredSize += RPC_COEFF_COUNT1 * RPC_COEFF_COUNT2 * RPC_COEFF_SIZE; - if (strlen(pszFirstRow) < nRequiredSize) + if (strlen(pszFirstRow) < static_cast<size_t>(nRequiredSize)) { CPLError(CE_Failure, CPLE_AppDefined, "%s has only %d bytes wherea %d are required", From 40a16941e52c407531325c9ca15ffb31016b254d Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 00:20:27 +0200 Subject: [PATCH 117/230] NEWS.md: update for 3.9.0beta1 (up to commit 5eeba207b21130efad3d2f9acb56963e4b19fd21) --- NEWS.md | 531 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 531 insertions(+) diff --git a/NEWS.md b/NEWS.md index 1d2ae1ccaf73..780ce09a1c09 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,534 @@ +# GDAL/OGR 3.9.0 Releases Notes + +GDAL/OGR 3.9.0 is a feature release. +Those notes include changes since GDAL 3.8.0, but not already included in a +GDAL 3.8.x bugfix release. + +## In a nutshell... + +* [RFC 96](https://gdal.org/development/rfc/rfc96_deferred_plugin_loading.html): + Deferred C++ plugin loading +* [RFC 97](https://gdal.org/development/rfc/rfc97_feature_and_fielddefn_sealing.html): + OGRFeatureDefn, OGRFieldDefn and OGRGeomFieldDefn "sealing" +* [RFC 98](https://gdal.org/development/rfc/rfc98_build_requirements_gdal_3_9.html): + Build requirements for GDAL 3.9 +* [RFC 99](https://gdal.org/development/rfc/rfc99_geometry_coordinate_precision.html): + Geometry coordinate precision +* Add [S104](https://gdal.org/drivers/raster/s104.html) (Water Level Information + for Surface Navigation Product) and + [S111](https://gdal.org/drivers/raster/s111.html) (Surface Currents Product) + raster read-only drivers (required libhdf5) +* Add raster [GTI](https://gdal.org/drivers/raster/gti.html) (GDAL Raster Tile + Index) driver to support catalogs with huge number of sources. +* Add vector [MiraMonVector](https://gdal.org/drivers/vector/miramon.html) + read/creation driver (#9688) +* Deprecated ARG driver has been removed (#7920) +* Code linting + +## Build + +* CMake: add ``[GDAL|OGR]_REGISTER_DRIVER_<driver_name>_FOR_LATER_PLUGIN`` + variables (RFC 96) +* CMake: Bump max compatible version to 3.28 +* CMake: add a way of defining an external deferred driver by setting one or + several ADD_EXTERNAL_DEFERRED_PLUGIN_XXX CMake variables (RFC 96) +* CMake: error out if a driver has been asked as a plugin, but conditions are not met +* CMake: rework PROJ detection +* CMAKE_UNITY_BUILD=YES builds are possible, but not recommended for production +* gdal.cmake: set -DDEBUG for CMAKE_BUILD_TYPE=Debug for Windows builds as well +* CMake: add GDAL_FIND_PACKAGE_OpenJPEG_MODE and GDAL_FIND_PACKAGE_PROJ_MODE + variables +* FindSQLite3.cmake: avoid repeating finding `SQLite3_INCLUDE_DIR`/ + `SQLite3_LIBRARY` if existed. +* Add compatibility for Intel Compiler 2024.0.2.29 + +## Backward compatibility issues + +See [MIGRATION_GUIDE.TXT](https://github.com/OSGeo/gdal/blob/release/3.8/MIGRATION_GUIDE.TXT) + +## Changes in installed files + +* data/MM_m_idofic.csv: new +* data/gdalvrtti.xsd: new +* data/pci_datum.txt and data/pci_ellips.txt: updated (#8034) +* include/cpl_minizip_ioapi.h, cpl_minizip_unzip.h, cpl_minizip_zip.h: + removed. They use since 3.5 an header that is not installed, so were unusable + +## GDAL 3.9.0 - Overview of Changes + +### Port + +* /vsicurl: add ANYSAFE & BEARER to auth methods (#8683) +* /vsicurl/: re-emit HTTP error code next times we try opening a resource that + failed the first time (#8922) +* /vsicurl/: add a VSICURL_PC_URL_SIGNING path-specific option to enable + Planetary Computer URL signing only on some URLs +* /vsicurl/: Read(): emit error message when receiving HTTP 416 Range Not + Satisfiable error +* Add VSIVirtualHandle::GetAdviseReadTotalBytesLimit() +* cpl_http: retry "Connection reset by peer" +* Add a VSICURLMultiCleanup() that runs with SIGPIPE ignored (#9677) +* /vsiaz/: fix RmdirRecursive() on an empty directory with just the + .gdal_marker_for_dir special marker file +* /vsis3/: include region to build s3.{region}.amazonaws.com host name (#9449) +* Add VSIVirtualHandle::Printf() +* Add VSIRemovePluginHandler() to enable removal of virtual filesystems (#8772) +* No longer alias CPLMutex, CPLCond and CPLJoinableThread to void in non-DEBUG + builds (ABI change) +* Win32 extended filenames (starting with "\\?\"): various fixes; add support + for network UNC paths +* Add VSIGetDirectorySeparator() to return the directory separator for the + specified path +* Add CPLXMLNodeGetRAMUsageEstimate() +* CPLCreateOrAcquireMutexEx(): fix warning about lock-order inversion (#1108) +* Win32 Stat(VSI_STAT_EXISTS_FLAG): improve performance (#3139) +* Add CPLDebugProgress() to display a debugging message indicating a progression +* CSLConstList/CPLStringList: add iterating facilities and better + ``std::vector<std::string>`` operability +* Add CPLStringList::front(), back() and clear() +* Add CPLStrtodM() +* CPLStrtod(): set errno=0 when no value has been parsed to conform to POSIX +* Add CPLUTF8ForceToASCII() +* VSICACHE: avoid EOF read (#9669) + +### Core + +* GDALRATValuesIOAsString(): fix wrong type for papszStrList argument +* Add GDALDataset::DropCache() (#8938) +* Add GDALDataset::UnMarkSuppressOnClose() and IsMarkSuppressOnClose() (#8980) +* Add GDALGetOutputDriversForDatasetName() +* Modify the logic of selection of overviews for non-nearest resampling; add a + GDAL_OVERVIEW_OVERSAMPLING_THRESHOLD config option (#9040) +* GDALMDArray::AsClassicDataset(): make it possible to use overviews +* GDALOpen(): change error message when a dataset isn't recognized (#9009) +* GDALDeserializeGCPListFromXML(): validate value of GCP Pixel,Line,X,Y,Z + attributes +* PAM: only unset GPF_DIRTY flag +* GDALGetCacheMax64(): fix warning about lock-order inversion (#1837) +* Add gdal::GCP class +* QuietDeleteForCreateCopy(): forward source dataset open options (#9422) + +### Multidimensional API + +* Add GDALCreateRasterAttributeTableFromMDArrays() to return a virtual Raster + Attribute Table from several GDALMDArray's +* fix wrong DataAxisToSRSAxisMapping +* GDALMDArray::AsClassicDataset(): make it possible to use overviews + +### Algorithms + +* Implement basic Line-of-sight algorithm (#9506, #9050) +* Warper: fix artifacts when reprojecting from long/lat to ortho (#9056) +* GDALSuggestedWarpOutput2(): ortho->long/lat: limit extent to + [lon_0-90,lon_0+90] even when poles are included +* Warper: limit artifacts when doing ortho->long/lat (provided that srcNodata is + set) +* GDALSuggestWarpOutput(): make it return original potential non-square pixel + shape for a south-up oriented dataset (#9336) +* Warper: add a EXCLUDED_VALUES warping option to specify pixel values to be + ignored as contributing source pixels during average resampling +* GDALFillNodata(): add a INTERPOLATION=NEAREST option +* GDALChecksumImage(): read by multiple of blocks for floating-point bands to + improve performance + +### Utilities + +* Add GDALArgumentParser class to extend p-ranav/argparse framework +* Use GDALArgumentParser for: gdaladdo, gdalinfo, gdal_translate, gdalwarp, + gdal_grid, gdal_viewshed, gdalbuildvrt, nearblack, ogrinfo, ogr2ogr, sozip +* Add support for ``--config <key>=<value>`` syntax +* gdaladdo: reuse previous resampling method (from GTiff RESAMPLING metadata + item) if not specifying -r and overview levels if not specifying them +* gdaladdo: make --partial-refresh-from-source-timestamp work on GTI datasets +* gdal_contour: fix lowest min value in polygonize mode (#9710) +* gdalbuildvrt: add a -nodata_max_mask_threshold option +* gdal_create: copy GCPs present in the input file +* gdal_edit: add -a_coord_epoch option +* gdal_footprint: fix -split_polys and -convex_hull to not consume the argument + specified just after them +* gdal_footprint: write source dataset path in a 'location' field (#8795) +* gdal_grid: error out on in invalid layer name, SQL statement or failed where + condition (#9406) +* gdallocationinfo: make it output extra content at end of input line, and + add -E echo mode, and -field_sep option (#9411) +* gdalmdimtranslate: add -arrayoptions +* gdalinfo: do not call GDALGetFileList() if -nofl is specified (#9243) +* gdal_translate: add -dmo option, for domain-metadata items (#8935) +* gdal_translate -expand rgb[a]: automatically select Byte output data type if + not specified and color table compatible of it (#9402) +* gdaltransform: make it output extra content at end of input line, and add + -E echo mode, and -field_sep option (#9411) +* gdalmanage: make --utility_version work +* gdal_polygonize.py: handle error if creation of destination fails +* gdal_viewshed: add support for south-up source datasets (#9432) +* gdalwarp: progress bar tunings +* gdalwarp: emit error message when transformation isn't inversible (#9149) +* gdalwarp: fix performance issue when warping to COG (#9416) +* gdalwarp: make target resolution independent from source extent when -te is + specified (#9573) +* allow passing a WKT geometry as -cutline value, and add -cutline_srs (#7658) +* sozip: support generic command line options +* --formats option: append list of extensions (#8958) +* Make gdaltindex a C callable function: GDALTileIndex() +* gdaltindex: add -overwrite, -vrtti_filename, -tr, -te, -ot, -bandcount, + -nodata, -colorinterp, -mask, -mo, -recursive, -filename_filter, + -min_pixel_size, -max_pixel_size, -fetch_md, -lco options +* gdal2tiles: added support for JPEG output +* gdal2tiles: Fix case where --exclude still writes fully transparent tiles + (#9532) +* gdal2tiles: add --excluded-values and --excluded-values-pct-threshold switches +* gdal2xyz: Change -srcwin parameter type to integer. +* Python sample scripts: add gdalbuildvrtofvrt.py (#9451) +* Python utilities: do not display full traceback on OpenDS failures (#9534) +* gdalinfo: suggest trying ogrinfo if appropriate, and vice-versa + +### Raster drivers + +Updates affecting multiple drivers: + * All drivers depending on external libraries that can be built as plugins + have been updated to implement RFC 96 deferred plugin loading + * Drivers that can now be built as plugins if using external libraries and + not vendored/internal libraries: GIF, JPEG, MRF, NITF, RAW drivers (as a + single plugin), PDS related drivers (as a single plugin) + * do not export GMLJP2 if the SRS isn't compatible (#9223): JP2KAK, JP2ECW, + JP2Lura, JP2OPENJPEG + +AAIGRID driver: + * fix reading file whose first value is nan (#9666) + +BAG driver: + * make sure to report a subdataset for the bathymetry coverage when we expose + one(s) for the georeferenced metadata + +GeoPackage driver: + * support maximum level up to 29 or 30; add a ZOOM_LEVEL creation option + +GTiff driver: + * Internal libtiff: resync with upstream + * Use (future) libtiff 4.6.1 TIFFOpenOptionsSetMaxSingleMemAlloc() + * add JXL_LOSSLESS_OVERVIEW, JXL_EFFORT_OVERVIEW, JXL_DISTANCE_OVERVIEW and + JXL_ALPHA_DISTANCE_OVERVIEW configuration options (#8973) + * overviews: generalize saving resampling method in a RESAMPLING metadata item + * friendlier error message when attempting to create JXL compressed file with + unsupported type/bits_per_sample + * deal with issues with multi-band PlanarConfig=Contig, LERC and NaN values + (#9530) + * make BuildOverviews() in update mode work when there is an external .msk file + * no longer condition CreateInternalMaskOverviews() behavior to + GDAL_TIFF_INTERNAL_MASK being unset or set to YES + * change default value of GDAL_TIFF_INTERNAL_MASK config option to YES + * multi-threaded decoding: fix potential mutex deadlock + * MultiThreadedRead(): make it take into account AdviseRead() limit to reduce + the number of I/O requests (#9682) + +HDF5 driver: + * multidim: fix crash on reading compound data type with fixed-length strings + * multidim: implement GDALMDArray::GetBlockSize() and GetStructuralInfo() + * improve performance of band IRasterIO() on hyperspectral products + +JP2KAK driver: + * make result of RasterIO() consistent depending if it is called directly or + through VRT, with non-nearest upsampling (#8911) + * refactor how overviews work + +JP2OpenJPEG driver: + * CreateCopy(): limit number of resolutions taking into account minimum block + width/height + +JPEG driver: + * ReadXMPMetadata(): only stop looking for XMP marker at Start Of Scan + * ReadFLIRMetadata(): stop on Start-Of-Scan marker (speeds up opening large + remote JPEG files) + * CreateCopy(): emit warning/error message before progress bar (#9441) + +MRF driver: + * BuildOverviews: bail out when no overviews are needed + * emit warning when attempting to clean internal overviews (#9145) + * Use ZSTD streaming API for compression: faster and better compression (#9230) + +netCDF driver: + * use the SRS (its geographic part) if found in the file, instead of the + hardcoded WGS84 string, for the GEOLOCATION.SRS metadata item (#9526) + * Add BAND_NAMES creation option + * fix writing of metadata items whose value has an equal sign ["9702) + +NITF driver: + * in xml:DES metadata domain field, expand content of XML_DATA_CONTENT DES as + XML in a <xml_content> sub-node of >field name=DESDATA> instead as a Base64 + value + * fix undefined behavior when using NITFGetField() several times in the same + statement + +OGCAPI driver: + * OGCAPI Maps: support image formats beyond PNG/JPG as well as enables content + negotiation. (#9231, #9420) + +PDF driver: + * correctly initialize PAM when opening a subdataset (specific page e.g.) + * PDFium backend: update to support (and require) PDFium/6309 + * deal with the situation where a multipage PDF has layers with same name but + specific to some page(s) + * Fix build with Poppler 24.05 + +S102 driver: + * add support for spatial metadata of the QualityOfSurvey group + * read nodata value from dataset metadata + +Sentinel2 driver: + * include 10m AOT and WVP bands in 10m subdataset (#9066) + +TileDB driver: + * Added tiledb metadata fields to easily tag array type + * be able to read datasets converted with 'tiledb-cf netcdf-convert' + * make its identify() method more restrictive by not identifying /vsi file + systems it doesn't handle + +VRT driver: + * add a VRTProcessedDataset new mode to apply chained processing steps that + apply to several bands at the same time. Currently available algorithms: + LocalScaleOffset (for dehazing), BandAffineCombination, Trimming, LUT + * vrt:// connection: add a_nodata, sd_name, sd options + * add a NoDataFromMaskSource source: replaces the value of the source with + the value of the NODATA child element when the value of the mask band of + the source is less or equal to the MaskValueThreshold child element. + * VRT serialization: emit a warning if RAM usage of XML serialization reaches + 80% of RAM (#9212) + * VRTWarpedDataset: add an optimized IRasterIO() implementation + +WMS driver: + * change logic to set gdalwmscache directory to honor in particular + XDG_CACHE_HOME (#8987) + * Use GDAL_HTTP_TIMEOUT + +Zarr driver: + * Add capability to read CRS from CF1 conventions + +ZMap driver: + * support reading variant of format where there is no newline character at + end of column + +## OGR 3.9.0 - Overview of Changes + +### Core + +* OGRSQL: Support SELECT * EXCLUDE(...) +* OGRSQL: add UTF-8 support for LIKE/ILIKE (for layers declaring + OLCStringsAsUTF8) (#8835) +* Add OGRLayer::GetExtent3D() (#8806) +* OGRLayer: Have CreateField/CreateGeomField take const OGRFieldDefn + /OGRGeomFieldDefn* argument (#8741) +* OGRFeatureDefn, OGRFieldDefn, OGRGeomFieldDefn: add Seal() and Unseal() + methods +* Fix swq_select::Unparse() +* ExecuteSQL(): add a warning if the dialect name isn't recognized (#8843) +* ExecuteSQL() with OGRSQL and SQLITE dialects: get OLCStringsAsUTF8 from + underlying layer (#9648) +* ExecuteSQL() OGRSQL and SQLITE dialects: error out if SetSpatialFilter() fails + (#9623) +* OGRGeometryFactory::forceTo(): make it honour dimensionality of eTargetType + (in particular fix POLYGON -> LINESTRING or POLYGON -> LINESTRINGZ) (#9080) +* OGRGeometryCollection::importFromWkb(): fix reading corrupted wkb with mixed + 2D and 3D geoms (#9236) +* RFC99: Add C++ OGRGeomCoordinatePrecision class and corresponding C / SWIG API +* Change prototype of GDALDataset::ICreateLayer() to take a + ``const OGRGeomCoordinatePrecision*`` +* Add OGRGeometry::SetPrecision() / OGR_G_SetPrecision(), as wrapper of + GEOSGeom_setPrecision_r() +* Add a OGRGeometry::roundCoordinates() method +* OGRFeature: add SerializeToBinary() / DeserializeFromBinary() +* Add OGR_G_GeodesicArea() / OGRSurface::get_GeodesicArea() +* SQLite SQL dialect: implement ST_Area(geom, use_ellipsoid) +* Add OGR_L_GetDataset() and implement GetDataset() in all drivers with creation + support +* Arrow array: fix decoding of ``date32[days]`` values before Epoch + (Arrow->OGRFeature), and fix rounding when encoding such values + (OGRFeature->Arrow) (#9636) +* OGRLayer::SetIgnoredFields(): make it take a CSLConstList argument instead of + const char* + +### OGRSpatialReference + +* Add OGRSpatialReference::exportToCF1() and importFromCF1() +* OGRCoordinateTransformation::Transform(): change nCount parameter to size_t + (C++ API only for now) (#9074) +* OGRProjCT::TransformWithErrorCodes(): Improve performance of axis swapping + (#9073) +* OSR_CT: fix ``SetDataAxisToSRSAxisMapping([-2, 1])``` on target SRS +* exportToXML(): error out on unsupported projection method (#9223) +* Add OSRSetFromUserInputEx() and map it to SWIG (#9358) +* Add std::string OGRSpatialReference::exportToWkt( + const char* const* papszOptions = nullptr) const + +### Utilities + +* ogrinfo: add a -extent3D switch +* ogrinfo: output coordinate resolution in -json mode +* ogrinfo: add -limit option (#3413) +* ogr2ogr: do not copy private layers (would for example break SQLite -> + SQLite/GPKG) +* ogr2ogr: force -preserve_fid when doing GPX to GPKG (#9225) +* ogr2ogr: propagate input coordinate precision +* ogr2ogr: add options -xyRes, -zRes, -mRes and -unsetCoordPrecision +* ogr2ogr: make -select tokenize only on comma (and not on space), and honour + double-quoting (#9613) +* Remove ogr2ogr.py sample utilities (gdal.VectorTranslate() is a more powerful + replacement) + +### Vector drivers + +Updates affecting multiple drivers: + * Drivers updated for RFC 97 feature definition sealing: GPKG, Shape, + OpenFileGDB, MITAB, MEM, GeoJSON, JSONFG, TopoJSON, ESRIJSON, ODS, XLSX, PG + * Drivers updated for RFC99 geometry coordinate precision: GML, GeoJSON, + GeoJSONSeq, JSONFG, GPKG, CSV, OpenFileGDB, FileGDB, OGR VRT + * All drivers depending on external libraries that can be built as plugins + have been updated to implement RFC 96 deferred plugin loading + * GetExtent3D() implemented in Shapefile, Arrow, Parquet, PostgreSQL, GeoJSON, + GeoPackage drivers + +OGR SQLite/SQLite/GPKG: add UTF-8 support for case-insensitive LIKE (#8835) + +Arrow/Parquet drivers: + * GetArrowSchema(): potential fix when there are ignored fields and the FID + column is not the first one + * Read/write support for GeoArrow (struct-based) encoding + * silently ignore column of type null in GetArrowSchema/GetArrowArray + * fix crash when reading geometries from a geometry column with a + pyarrow-registered extension type + * handle fields with a pyarrow-registered extension type + * preliminary/in-advance read support for future JSON Canonical Extension + +CSV driver: + * parse header with line breaks (#9172) + +DGN (v7) driver: + * emit explicit error when attempting to open a DGNv8 file and the DGNv8 + driver is not available (#9004) + +FileGDB/OpenFileGDB drivers: + * fix co-operation between the 2 drivers to make sure .cdf is opened with + FileGDB + +FileGDB driver: + * remove warning 'Empty Spatial Reference' + +FlatGeobuf driver: + * add support for reading and writing layer title, description and metadata + * CreateFeature(): error out if a string context is not valid UTF-8 (#7458) + +GeoJSON driver: + * OGRGeoJSONDriverIdentify: return -1 when unsure + * writer: add FOREIGN_MEMBERS_FEATURE and FOREIGN_MEMBERS_COLLECTION layer + creation options + * reader: accept a {"type": "Polygon","coordinates": []} as a representation + of POLYGON EMPTY + +GeoPackage driver: + * fixes to make most operations compatible with PRAGMA foreign_keys=1 (#9135) + * writer: set Z/M columns of gpkg_geometry_columns when there are Z/M + geometries in a 2D declared layer + * Read relationships defined using foreign key constraints + * CreateFeature(): allow creating a feature with FID=0 (#9225) + * Add DISCARD_COORD_LSB layer creation option + * map Null CRS to a new srs_id=99999, and add a SRID layer creation option + * add a LAUNDER=YES/NO layer creation option (default: NO) + * fix random error in threaded RTree creation, particularly on Mac M1 ARM64 + +GeoRaster driver: + * Added GENSTATS options, security fixes, and prevent failing when password is + near expiration (#9290) + +GML driver: + * writer: honour geometry field name and isnullable passed to ICreateLayer() + +GMLAS driver: + * faster retrieval of GML and ISO schemas by using zip archives + * use CPLHTTPFetch() instead of /vsicurl_streaming/ to allow alternate HTTP + downloader (for QGIS 3.36 enhanced WFS provider) + +LIBKML driver: + * improve generation of internal ids for layer names starting with digits + (#9538) + +netCDF driver: + * writer: make it more robust to empty geometries + +OpenFileGDB driver: + * add support for Integer64, Date, Time and DateTimeWithOffset data types + added in ArcGIS Pro 3.2 (#8862) + * writer: set xml shape type to "esriGeometryPolyline" for LineString/ + MultiLineString (#9033) + +OSM driver: + * properly escape special characters with TAGS_FORMAT=JSON open option (#9673) + +Parquet driver: + * support reading and writing layer metadata + * Read/write support for covering.bbox struct columns for spatial + filtering, add SORT_BY_BBOX=YES/NO layer creation option (GeoParquet 1.1) + * Read/write support for GeoArrow (struct-based) encoding (GeoParquet 1.1) + * add more configuration options for Parquet dataset case, and default to using + custom I/O layer (#9497) + * ogr2ogr/Parquet: add hack to avoid super lengthy processing when using -limit + with a Parquet dataset source (#9497) + * make it recognize bbox field from Overture Maps 2024-01-17-alpha.0 and + 2024-04-16-beta.0 releases + +PGDUMP driver: + * add a LAUNDER_ASCII=YES/NO (default NO) layer creation option + +PMTiles driver: + * handle decompressing data with a compression ratio > 16 (#9646) + +PostgreSQL driver: + * serialize GDAL multidomain metadata of layer as XML in a + ogr_system_tables.metadata table + * be robust to inexact case for schema when creating a layer + 'schema_name.layer_name' (#9125) + * remove support for PostgreSQL < 9 and PostGIS < 2 (#8937) + * add a LAUNDER_ASCII=YES/NO (default NO) layer creation option + +Shapefile driver: + * Shapelib: resync with upstream + * GetNextArrowArray(): add specialized implementation restricted to situations + where only retrieving of FID values is asked + * make it recognize /vsizip/foo.shp.zip directories + * add read/write support for DBF Logical field type mapped to OGR OFSTBoolean + +VFK driver: + * Fix solving circle parcel geometry (#8993) + +## SWIG Language Bindings + +All bindings: + * add osr.SpatialReference.ImportFromCF1(), ExportToCF1(), ExportToCF1Units() + * expose Geometry.UnaryUnion() + * expose OLCFastWriteArrowBatch + * add gdal.IsLineOfSightVisible() + +CSharp bindings: + * Exposes additional Dataset methods (#918, #9398) + +Java bindings: + * bump minimum version to Java 8 + +Python bindings: + * lots of improvements to documentation + * add a pyproject.toml with numpy as a build requirement (#8926, #8069) + * bump setuptools requirement to >= 67.0 + * define entry_points.console_scripts (#8811) + * add RasterAttributeTable::ReadValuesIOAsString, ReadValuesIOAsInteger, + ReadValuesIOAsDouble, RemoveStatistics() + * implement ``__arrow_c_stream__()`` interface for ogr.Layer + * add a ogr.Layer.GetArrowArrayStreamInterface() method + * add a ogr.Layer.WriteArrow() method consuming ``__arrow_c_stream__`` or + ``__arrow_c_array__`` interfaces (#9132) + * Invalidate layer ref when Dataset closes + * Accept str arg in FeatureDefn.GetFieldDefn + * Add Band.ReadAsMaskedArray + * Make gdal/ogr.GeneralCmdLineProcessor accept int, os.PathLike + * Avoid crash when accessing closed file + # GDAL/OGR 3.8.0 Releases Notes GDAL/OGR 3.8.0 is a feature release. From 9265a80cab57e63863a0743a2a84a183e2d2db0c Mon Sep 17 00:00:00 2001 From: Ryan <ryanfriedman5410+github@gmail.com> Date: Sat, 20 Apr 2024 16:26:08 -0600 Subject: [PATCH 118/230] Doc: add recommendation for finding older GDAL using CMake (#9714) This works on Ubuntu 22 which is distributed with GDAL 3.4 --- doc/source/development/cmake.rst | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/doc/source/development/cmake.rst b/doc/source/development/cmake.rst index c6ba812e4bb3..ce1a2fe136bf 100644 --- a/doc/source/development/cmake.rst +++ b/doc/source/development/cmake.rst @@ -6,7 +6,7 @@ Using GDAL in CMake projects .. versionadded:: 3.5 -The recommended way to use the GDAL library in a CMake project is to +The recommended way to use the GDAL library 3.5 or higher in a CMake project is to link to the imported library target ``GDAL::GDAL`` provided by the CMake configuration which comes with the library. Typical usage is: @@ -25,3 +25,23 @@ number of places. The lookup can be adjusted for all packages by setting the cache variable or environment variable ``CMAKE_PREFIX_PATH``. In particular, CMake will consult (and set) the cache variable ``GDAL_DIR``. + +Before GDAL 3.5, you can use the following to create the imported library target ``GDAL::GDAL``: + +.. code:: + + find_package(GDAL CONFIG QUIET) + if(NOT TARGET GDAL::GDAL) + find_package(GDAL REQUIRED) + if(NOT TARGET GDAL::GDAL) + add_library(GDAL IMPORTED) + if(DEFINED GDAL_LIBRARIES) + target_link_libraries(GDAL INTERFACE "${GDAL_LIBRARIES}") + add_library(GDAL::GDAL ALIAS GDAL) + else() + message(FATAL_ERROR "Missing GDAL_LIBRARIES") + endif() + endif() + endif() + + target_link_libraries(MyApp PRIVATE GDAL::GDAL) From 4131ab1023b8f6d3c1d530d378368073b78c13ba Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 01:18:25 +0200 Subject: [PATCH 119/230] Parquet: recognize 1.1.0 as a valid GeoParquet version --- ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp index 114cf1410de4..ce09378b2c2a 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp @@ -91,7 +91,7 @@ void OGRParquetLayerBase::LoadGeoMetadata( if (osVersion != "0.1.0" && osVersion != "0.2.0" && osVersion != "0.3.0" && osVersion != "0.4.0" && osVersion != "1.0.0-beta.1" && osVersion != "1.0.0-rc.1" && - osVersion != "1.0.0") + osVersion != "1.0.0" && osVersion != "1.1.0") { CPLDebug( "PARQUET", From 6c176229a05c7c5623cfdfe7345a0796891841f6 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 17:31:46 +0200 Subject: [PATCH 120/230] Internal libtiff: avoid potential unsigned integer overflow on corrupted tag Fixes https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=67674&q=gdal --- frmts/gtiff/libtiff/tif_dirread.c | 307 +++++++++++++++++++++++++----- 1 file changed, 260 insertions(+), 47 deletions(-) diff --git a/frmts/gtiff/libtiff/tif_dirread.c b/frmts/gtiff/libtiff/tif_dirread.c index 8f57e1b97052..390a9104d299 100644 --- a/frmts/gtiff/libtiff/tif_dirread.c +++ b/frmts/gtiff/libtiff/tif_dirread.c @@ -4081,9 +4081,16 @@ static int ByteCountLooksBad(TIFF *tif) * To evaluate the IFD data size when reading, save the offset and data size of * all data that does not fit into the IFD entries themselves. */ -static void EvaluateIFDdatasizeReading(TIFF *tif, TIFFDirEntry *dp) +static bool EvaluateIFDdatasizeReading(TIFF *tif, TIFFDirEntry *dp) { - uint64_t datalength = dp->tdir_count * TIFFDataWidth(dp->tdir_type); + const int data_width = TIFFDataWidth(dp->tdir_type); + if (data_width != 0 && dp->tdir_count > UINT64_MAX / data_width) + { + TIFFErrorExtR(tif, "EvaluateIFDdatasizeReading", + "Too large IFD data size"); + return false; + } + const uint64_t datalength = dp->tdir_count * data_width; if (datalength > ((tif->tif_flags & TIFF_BIGTIFF) ? 0x8U : 0x4U)) { tif->tif_dir.td_dirdatasize_read += datalength; @@ -4115,6 +4122,7 @@ static void EvaluateIFDdatasizeReading(TIFF *tif, TIFFDirEntry *dp) .length = datalength; tif->tif_dir.td_dirdatasize_Noffsets++; } + return true; } /* @@ -4509,7 +4517,10 @@ int TIFFReadDirectory(TIFF *tif) uint16_t value; enum TIFFReadDirEntryErr err; err = TIFFReadDirEntryShort(tif, dp, &value); - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + goto bad; + } if (err == TIFFReadDirEntryErrCount) err = TIFFReadDirEntryPersampleShort(tif, dp, &value); @@ -4540,7 +4551,10 @@ int TIFFReadDirectory(TIFF *tif) err = TIFFReadDirEntryErrCount; else err = TIFFReadDirEntryDoubleArray(tif, dp, &data); - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + goto bad; + } if (err != TIFFReadDirEntryErrOk) { fip = TIFFFieldWithTag(tif, dp->tdir_tag); @@ -4583,7 +4597,10 @@ int TIFFReadDirectory(TIFF *tif) } _TIFFmemcpy(&(tif->tif_dir.td_stripoffset_entry), dp, sizeof(TIFFDirEntry)); - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + goto bad; + } } break; case TIFFTAG_STRIPBYTECOUNTS: @@ -4611,7 +4628,10 @@ int TIFFReadDirectory(TIFF *tif) } _TIFFmemcpy(&(tif->tif_dir.td_stripbytecount_entry), dp, sizeof(TIFFDirEntry)); - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + goto bad; + } } break; case TIFFTAG_COLORMAP: @@ -4667,7 +4687,10 @@ int TIFFReadDirectory(TIFF *tif) err = TIFFReadDirEntryErrCount; else err = TIFFReadDirEntryShortArray(tif, dp, &value); - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + goto bad; + } if (err != TIFFReadDirEntryErrOk) { fip = TIFFFieldWithTag(tif, dp->tdir_tag); @@ -6266,7 +6289,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) } } } - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != NULL) + _TIFFfreeExt(tif, data); + return (0); + } if (mb + 1 < (uint32_t)dp->tdir_count) TIFFWarningExtR( tif, module, @@ -6394,7 +6422,10 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryLong8(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + return 0; + } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } @@ -6408,7 +6439,10 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySlong8(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + return 0; + } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } @@ -6422,7 +6456,10 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryFloat(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + return 0; + } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } @@ -6436,7 +6473,10 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryDouble(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + return 0; + } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } @@ -6450,7 +6490,10 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryIfd8(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + return 0; + } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } @@ -6500,7 +6543,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryByteArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6530,7 +6578,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySbyteArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6560,7 +6613,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryShortArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6590,7 +6648,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySshortArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6620,7 +6683,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryLongArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6650,7 +6718,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySlongArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6680,7 +6753,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryLong8Array(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6710,7 +6788,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySlong8Array(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6740,7 +6823,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryFloatArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6772,7 +6860,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryDoubleArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, data); if (data != 0) @@ -6795,7 +6888,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryByteArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; if (data != 0 && dp->tdir_count > 0 && data[dp->tdir_count - 1] != '\0') @@ -6846,7 +6944,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryByteArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -6870,7 +6973,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySbyteArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -6894,7 +7002,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryShortArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -6918,7 +7031,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySshortArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -6942,7 +7060,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryLongArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -6966,7 +7089,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySlongArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -6990,7 +7118,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryLong8Array(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -7014,7 +7147,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySlong8Array(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -7038,7 +7176,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryFloatArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -7062,7 +7205,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryDoubleArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -7086,7 +7234,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryIfd8Array(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint16_t)(dp->tdir_count), data); @@ -7106,7 +7259,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryByteArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; if (data != 0 && dp->tdir_count > 0 && data[dp->tdir_count - 1] != '\0') @@ -7177,7 +7335,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) } if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, count, data); if (data != 0) @@ -7195,7 +7358,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySbyteArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); @@ -7214,7 +7382,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryShortArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); @@ -7233,7 +7406,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySshortArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); @@ -7252,7 +7430,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryLongArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); @@ -7271,7 +7454,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySlongArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); @@ -7290,7 +7478,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryLong8Array(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); @@ -7309,7 +7502,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntrySlong8Array(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); @@ -7328,7 +7526,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryFloatArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); @@ -7347,7 +7550,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryDoubleArray(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); @@ -7366,7 +7574,12 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) err = TIFFReadDirEntryIfd8Array(tif, dp, &data); if (err == TIFFReadDirEntryErrOk) { - EvaluateIFDdatasizeReading(tif, dp); + if (!EvaluateIFDdatasizeReading(tif, dp)) + { + if (data != 0) + _TIFFfreeExt(tif, data); + return 0; + } int m; m = TIFFSetField(tif, dp->tdir_tag, (uint32_t)(dp->tdir_count), data); From 24f3cd87e720c59dd80e8f5916c8f7ddd5f06351 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 21:48:20 +0200 Subject: [PATCH 121/230] tiff_read.py: adapt for latest internal libtiff refresh --- autotest/gcore/tiff_read.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/autotest/gcore/tiff_read.py b/autotest/gcore/tiff_read.py index a21b335ebaf8..3fd3354ee95f 100755 --- a/autotest/gcore/tiff_read.py +++ b/autotest/gcore/tiff_read.py @@ -3687,10 +3687,8 @@ def test_tiff_read_huge_number_strips(): if md["LIBTIFF"] != "INTERNAL": pytest.skip("Test for internal libtiff") - with gdal.quiet_errors(): - ds = gdal.Open("data/huge-number-strips.tif") - with pytest.raises(Exception): - ds.GetRasterBand(1).Checksum() + with pytest.raises(Exception): + gdal.Open("data/huge-number-strips.tif") ############################################################################### From 7129f621d39d9fc82c2f205cdeb44505f6854118 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 18:14:25 +0200 Subject: [PATCH 122/230] OpenFileGDB: writer: fix memleak in multipatch writing when there's an error Fixes https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=67831 --- ogr/ogrpgeogeometry.cpp | 182 ++++++++---------- ogr/ogrpgeogeometry.h | 9 +- .../openfilegdb/filegdbtable_write.cpp | 30 ++- ogr/ogrsf_frmts/shape/shape2ogr.cpp | 45 ++--- 4 files changed, 115 insertions(+), 151 deletions(-) diff --git a/ogr/ogrpgeogeometry.cpp b/ogr/ogrpgeogeometry.cpp index 616b56a90285..ccb5387b7a7f 100644 --- a/ogr/ogrpgeogeometry.cpp +++ b/ogr/ogrpgeogeometry.cpp @@ -1350,8 +1350,10 @@ id,WKT OGRErr OGRCreateMultiPatch(const OGRGeometry *poGeomConst, int bAllowSHPTTriangle, int &nParts, - int *&panPartStart, int *&panPartType, int &nPoints, - OGRRawPoint *&poPoints, double *&padfZ) + std::vector<int> &anPartStart, + std::vector<int> &anPartType, int &nPoints, + std::vector<OGRRawPoint> &aoPoints, + std::vector<double> &adfZ) { const OGRwkbGeometryType eType = wkbFlatten(poGeomConst->getGeometryType()); if (eType != wkbPolygon && eType != wkbTriangle && @@ -1385,11 +1387,11 @@ OGRErr OGRCreateMultiPatch(const OGRGeometry *poGeomConst, } nParts = 0; - panPartStart = nullptr; - panPartType = nullptr; + anPartStart.clear(); + anPartType.clear(); nPoints = 0; - poPoints = nullptr; - padfZ = nullptr; + aoPoints.clear(); + adfZ.clear(); int nBeginLastPart = 0; for (const auto poPoly : *poMPoly) { @@ -1402,124 +1404,109 @@ OGRErr OGRCreateMultiPatch(const OGRGeometry *poGeomConst, if (nRings == 1 && poRing->getNumPoints() == 4) { int nCorrectedPoints = nPoints; - if (nParts > 0 && poPoints != nullptr && - panPartType[nParts - 1] == SHPP_OUTERRING && - nPoints - panPartStart[nParts - 1] == 4) + if (nParts > 0 && anPartType[nParts - 1] == SHPP_OUTERRING && + nPoints - anPartStart[nParts - 1] == 4) { nCorrectedPoints--; } - if (nParts > 0 && poPoints != nullptr && - ((panPartType[nParts - 1] == SHPP_TRIANGLES && - nPoints - panPartStart[nParts - 1] == 3) || - (panPartType[nParts - 1] == SHPP_OUTERRING && - nPoints - panPartStart[nParts - 1] == 4) || - panPartType[nParts - 1] == SHPP_TRIFAN) && - poRing->getX(0) == poPoints[nBeginLastPart].x && - poRing->getY(0) == poPoints[nBeginLastPart].y && - poRing->getZ(0) == padfZ[nBeginLastPart] && - poRing->getX(1) == poPoints[nCorrectedPoints - 1].x && - poRing->getY(1) == poPoints[nCorrectedPoints - 1].y && - poRing->getZ(1) == padfZ[nCorrectedPoints - 1]) + if (nParts > 0 && + ((anPartType[nParts - 1] == SHPP_TRIANGLES && + nPoints - anPartStart[nParts - 1] == 3) || + (anPartType[nParts - 1] == SHPP_OUTERRING && + nPoints - anPartStart[nParts - 1] == 4) || + anPartType[nParts - 1] == SHPP_TRIFAN) && + poRing->getX(0) == aoPoints[nBeginLastPart].x && + poRing->getY(0) == aoPoints[nBeginLastPart].y && + poRing->getZ(0) == adfZ[nBeginLastPart] && + poRing->getX(1) == aoPoints[nCorrectedPoints - 1].x && + poRing->getY(1) == aoPoints[nCorrectedPoints - 1].y && + poRing->getZ(1) == adfZ[nCorrectedPoints - 1]) { nPoints = nCorrectedPoints; - panPartType[nParts - 1] = SHPP_TRIFAN; - - poPoints = static_cast<OGRRawPoint *>( - CPLRealloc(poPoints, (nPoints + 1) * sizeof(OGRRawPoint))); - padfZ = static_cast<double *>( - CPLRealloc(padfZ, (nPoints + 1) * sizeof(double))); - poPoints[nPoints].x = poRing->getX(2); - poPoints[nPoints].y = poRing->getY(2); - padfZ[nPoints] = poRing->getZ(2); + anPartType[nParts - 1] = SHPP_TRIFAN; + + aoPoints.resize(nPoints + 1); + adfZ.resize(nPoints + 1); + aoPoints[nPoints].x = poRing->getX(2); + aoPoints[nPoints].y = poRing->getY(2); + adfZ[nPoints] = poRing->getZ(2); nPoints++; } - else if (nParts > 0 && poPoints != nullptr && - ((panPartType[nParts - 1] == SHPP_TRIANGLES && - nPoints - panPartStart[nParts - 1] == 3) || - (panPartType[nParts - 1] == SHPP_OUTERRING && - nPoints - panPartStart[nParts - 1] == 4) || - panPartType[nParts - 1] == SHPP_TRISTRIP) && - poRing->getX(0) == poPoints[nCorrectedPoints - 2].x && - poRing->getY(0) == poPoints[nCorrectedPoints - 2].y && - poRing->getZ(0) == padfZ[nCorrectedPoints - 2] && - poRing->getX(1) == poPoints[nCorrectedPoints - 1].x && - poRing->getY(1) == poPoints[nCorrectedPoints - 1].y && - poRing->getZ(1) == padfZ[nCorrectedPoints - 1]) + else if (nParts > 0 && + ((anPartType[nParts - 1] == SHPP_TRIANGLES && + nPoints - anPartStart[nParts - 1] == 3) || + (anPartType[nParts - 1] == SHPP_OUTERRING && + nPoints - anPartStart[nParts - 1] == 4) || + anPartType[nParts - 1] == SHPP_TRISTRIP) && + poRing->getX(0) == aoPoints[nCorrectedPoints - 2].x && + poRing->getY(0) == aoPoints[nCorrectedPoints - 2].y && + poRing->getZ(0) == adfZ[nCorrectedPoints - 2] && + poRing->getX(1) == aoPoints[nCorrectedPoints - 1].x && + poRing->getY(1) == aoPoints[nCorrectedPoints - 1].y && + poRing->getZ(1) == adfZ[nCorrectedPoints - 1]) { nPoints = nCorrectedPoints; - panPartType[nParts - 1] = SHPP_TRISTRIP; - - poPoints = static_cast<OGRRawPoint *>( - CPLRealloc(poPoints, (nPoints + 1) * sizeof(OGRRawPoint))); - padfZ = static_cast<double *>( - CPLRealloc(padfZ, (nPoints + 1) * sizeof(double))); - poPoints[nPoints].x = poRing->getX(2); - poPoints[nPoints].y = poRing->getY(2); - padfZ[nPoints] = poRing->getZ(2); + anPartType[nParts - 1] = SHPP_TRISTRIP; + + aoPoints.resize(nPoints + 1); + adfZ.resize(nPoints + 1); + aoPoints[nPoints].x = poRing->getX(2); + aoPoints[nPoints].y = poRing->getY(2); + adfZ[nPoints] = poRing->getZ(2); nPoints++; } else { - if (nParts == 0 || panPartType[nParts - 1] != SHPP_TRIANGLES || + if (nParts == 0 || anPartType[nParts - 1] != SHPP_TRIANGLES || !bAllowSHPTTriangle) { nBeginLastPart = nPoints; - panPartStart = static_cast<int *>( - CPLRealloc(panPartStart, (nParts + 1) * sizeof(int))); - panPartType = static_cast<int *>( - CPLRealloc(panPartType, (nParts + 1) * sizeof(int))); - panPartStart[nParts] = nPoints; - panPartType[nParts] = + anPartStart.resize(nParts + 1); + anPartType.resize(nParts + 1); + anPartStart[nParts] = nPoints; + anPartType[nParts] = bAllowSHPTTriangle ? SHPP_TRIANGLES : SHPP_OUTERRING; nParts++; } - poPoints = static_cast<OGRRawPoint *>( - CPLRealloc(poPoints, (nPoints + 4) * sizeof(OGRRawPoint))); - padfZ = static_cast<double *>( - CPLRealloc(padfZ, (nPoints + 4) * sizeof(double))); + aoPoints.resize(nPoints + 4); + adfZ.resize(nPoints + 4); for (int i = 0; i < 4; i++) { - poPoints[nPoints + i].x = poRing->getX(i); - poPoints[nPoints + i].y = poRing->getY(i); - padfZ[nPoints + i] = poRing->getZ(i); + aoPoints[nPoints + i].x = poRing->getX(i); + aoPoints[nPoints + i].y = poRing->getY(i); + adfZ[nPoints + i] = poRing->getZ(i); } nPoints += bAllowSHPTTriangle ? 3 : 4; } } else { - panPartStart = static_cast<int *>( - CPLRealloc(panPartStart, (nParts + nRings) * sizeof(int))); - panPartType = static_cast<int *>( - CPLRealloc(panPartType, (nParts + nRings) * sizeof(int))); + anPartStart.resize(nParts + nRings); + anPartType.resize(nParts + nRings); for (int i = 0; i < nRings; i++) { - panPartStart[nParts + i] = nPoints; + anPartStart[nParts + i] = nPoints; if (i == 0) { poRing = poPoly->getExteriorRing(); - panPartType[nParts + i] = SHPP_OUTERRING; + anPartType[nParts + i] = SHPP_OUTERRING; } else { poRing = poPoly->getInteriorRing(i - 1); - panPartType[nParts + i] = SHPP_INNERRING; + anPartType[nParts + i] = SHPP_INNERRING; } - poPoints = static_cast<OGRRawPoint *>( - CPLRealloc(poPoints, (nPoints + poRing->getNumPoints()) * - sizeof(OGRRawPoint))); - padfZ = static_cast<double *>( - CPLRealloc(padfZ, (nPoints + poRing->getNumPoints()) * - sizeof(double))); + aoPoints.resize(nPoints + poRing->getNumPoints()); + adfZ.resize(nPoints + poRing->getNumPoints()); for (int k = 0; k < poRing->getNumPoints(); k++) { - poPoints[nPoints + k].x = poRing->getX(k); - poPoints[nPoints + k].y = poRing->getY(k); - padfZ[nPoints + k] = poRing->getZ(k); + aoPoints[nPoints + k].x = poRing->getX(k); + aoPoints[nPoints + k].y = poRing->getY(k); + adfZ[nPoints + k] = poRing->getZ(k); } nPoints += poRing->getNumPoints(); } @@ -1528,9 +1515,9 @@ OGRErr OGRCreateMultiPatch(const OGRGeometry *poGeomConst, } } - if (nParts == 1 && panPartType[0] == SHPP_OUTERRING && nPoints == 4) + if (nParts == 1 && anPartType[0] == SHPP_OUTERRING && nPoints == 4) { - panPartType[0] = SHPP_TRIFAN; + anPartType[0] = SHPP_TRIFAN; nPoints = 3; } @@ -1545,13 +1532,13 @@ OGRErr OGRWriteMultiPatchToShapeBin(const OGRGeometry *poGeom, GByte **ppabyShape, int *pnBytes) { int nParts = 0; - int *panPartStart = nullptr; - int *panPartType = nullptr; + std::vector<int> anPartStart; + std::vector<int> anPartType; int nPoints = 0; - OGRRawPoint *poPoints = nullptr; - double *padfZ = nullptr; - OGRErr eErr = OGRCreateMultiPatch(poGeom, TRUE, nParts, panPartStart, - panPartType, nPoints, poPoints, padfZ); + std::vector<OGRRawPoint> aoPoints; + std::vector<double> adfZ; + OGRErr eErr = OGRCreateMultiPatch(poGeom, TRUE, nParts, anPartStart, + anPartType, nPoints, aoPoints, adfZ); if (eErr != OGRERR_NONE) return eErr; @@ -1610,19 +1597,19 @@ OGRErr OGRWriteMultiPatchToShapeBin(const OGRGeometry *poGeom, for (int i = 0; i < nParts; i++) { - int nPartStart = CPL_LSBWORD32(panPartStart[i]); + int nPartStart = CPL_LSBWORD32(anPartStart[i]); memcpy(pabyPtr, &nPartStart, 4); pabyPtr += 4; } for (int i = 0; i < nParts; i++) { - int nPartType = CPL_LSBWORD32(panPartType[i]); + int nPartType = CPL_LSBWORD32(anPartType[i]); memcpy(pabyPtr, &nPartType, 4); pabyPtr += 4; } - if (poPoints != nullptr) - memcpy(pabyPtr, poPoints, 2 * 8 * nPoints); + if (!aoPoints.empty()) + memcpy(pabyPtr, aoPoints.data(), 2 * 8 * nPoints); // Swap box if needed. Shape doubles are always LSB. if (OGR_SWAP(wkbNDR)) @@ -1643,8 +1630,8 @@ OGRErr OGRWriteMultiPatchToShapeBin(const OGRGeometry *poGeom, } pabyPtr += 16; - if (padfZ != nullptr) - memcpy(pabyPtr, padfZ, 8 * nPoints); + if (!adfZ.empty()) + memcpy(pabyPtr, adfZ.data(), 8 * nPoints); // Swap box if needed. Shape doubles are always LSB. if (OGR_SWAP(wkbNDR)) { @@ -1654,11 +1641,6 @@ OGRErr OGRWriteMultiPatchToShapeBin(const OGRGeometry *poGeom, // pabyPtr += 8 * nPoints; } - CPLFree(panPartStart); - CPLFree(panPartType); - CPLFree(poPoints); - CPLFree(padfZ); - return OGRERR_NONE; } diff --git a/ogr/ogrpgeogeometry.h b/ogr/ogrpgeogeometry.h index d46cafe39652..bb047cd0193b 100644 --- a/ogr/ogrpgeogeometry.h +++ b/ogr/ogrpgeogeometry.h @@ -33,6 +33,8 @@ #include "ogr_geometry.h" +#include <vector> + #define SHPT_NULL 0 #ifndef SHPT_POINT @@ -87,9 +89,10 @@ OGRErr CPL_DLL OGRWriteToShapeBin(const OGRGeometry *poGeom, GByte **ppabyShape, OGRErr CPL_DLL OGRCreateMultiPatch(const OGRGeometry *poGeom, int bAllowSHPTTriangle, int &nParts, - int *&panPartStart, int *&panPartType, - int &nPoints, OGRRawPoint *&poPoints, - double *&padfZ); + std::vector<int> &anPartStart, + std::vector<int> &anPartType, int &nPoints, + std::vector<OGRRawPoint> &aoPoints, + std::vector<double> &adfZ); OGRErr CPL_DLL OGRWriteMultiPatchToShapeBin(const OGRGeometry *poGeom, GByte **ppabyShape, int *pnBytes); diff --git a/ogr/ogrsf_frmts/openfilegdb/filegdbtable_write.cpp b/ogr/ogrsf_frmts/openfilegdb/filegdbtable_write.cpp index b97fb2c258ba..aed5fe2a9b3b 100644 --- a/ogr/ogrsf_frmts/openfilegdb/filegdbtable_write.cpp +++ b/ogr/ogrsf_frmts/openfilegdb/filegdbtable_write.cpp @@ -1195,14 +1195,14 @@ bool FileGDBTable::EncodeGeometry(const FileGDBGeomField *poGeomField, case wkbGeometryCollection: { int nParts = 0; - int *panPartStart = nullptr; - int *panPartType = nullptr; + std::vector<int> anPartStart; + std::vector<int> anPartType; int nPoints = 0; - OGRRawPoint *poPoints = nullptr; - double *padfZ = nullptr; + std::vector<OGRRawPoint> aoPoints; + std::vector<double> adfZ; OGRErr eErr = - OGRCreateMultiPatch(poGeom, TRUE, nParts, panPartStart, - panPartType, nPoints, poPoints, padfZ); + OGRCreateMultiPatch(poGeom, TRUE, nParts, anPartStart, + anPartType, nPoints, aoPoints, adfZ); if (eErr != OGRERR_NONE) return false; @@ -1229,22 +1229,18 @@ bool FileGDBTable::EncodeGeometry(const FileGDBGeomField *poGeomField, if (!EncodeEnvelope(m_abyGeomBuffer, poGeomField, poGeom)) { - CPLFree(panPartStart); - CPLFree(panPartType); - CPLFree(poPoints); - CPLFree(padfZ); return false; } for (int i = 0; i < nParts - 1; i++) { WriteVarUInt(m_abyGeomBuffer, - panPartStart[i + 1] - panPartStart[i]); + anPartStart[i + 1] - anPartStart[i]); } for (int i = 0; i < nParts; i++) { - WriteVarUInt(m_abyGeomBuffer, panPartType[i]); + WriteVarUInt(m_abyGeomBuffer, anPartType[i]); } { @@ -1253,7 +1249,7 @@ bool FileGDBTable::EncodeGeometry(const FileGDBGeomField *poGeomField, for (int i = 0; i < nPoints; ++i) { double dfVal = std::round( - (poPoints[i].x - poGeomField->GetXOrigin()) * + (aoPoints[i].x - poGeomField->GetXOrigin()) * poGeomField->GetXYScale()); CHECK_CAN_BE_ENCODED_ON_VARINT(dfVal, nLastX, "Cannot encode value"); @@ -1261,7 +1257,7 @@ bool FileGDBTable::EncodeGeometry(const FileGDBGeomField *poGeomField, WriteVarInt(m_abyGeomBuffer, nX - nLastX); dfVal = std::round( - (poPoints[i].y - poGeomField->GetYOrigin()) * + (aoPoints[i].y - poGeomField->GetYOrigin()) * poGeomField->GetXYScale()); CHECK_CAN_BE_ENCODED_ON_VARINT(dfVal, nLastY, "Cannot encode Y value"); @@ -1278,7 +1274,7 @@ bool FileGDBTable::EncodeGeometry(const FileGDBGeomField *poGeomField, for (int i = 0; i < nPoints; ++i) { double dfVal = - std::round((padfZ[i] - poGeomField->GetZOrigin()) * + std::round((adfZ[i] - poGeomField->GetZOrigin()) * poGeomField->GetZScale()); CHECK_CAN_BE_ENCODED_ON_VARINT(dfVal, nLastZ, "Bad Z value"); @@ -1289,10 +1285,6 @@ bool FileGDBTable::EncodeGeometry(const FileGDBGeomField *poGeomField, } } } - CPLFree(panPartStart); - CPLFree(panPartType); - CPLFree(poPoints); - CPLFree(padfZ); return true; } diff --git a/ogr/ogrsf_frmts/shape/shape2ogr.cpp b/ogr/ogrsf_frmts/shape/shape2ogr.cpp index d2577b63fbae..457ce1711fc9 100644 --- a/ogr/ogrsf_frmts/shape/shape2ogr.cpp +++ b/ogr/ogrsf_frmts/shape/shape2ogr.cpp @@ -1050,55 +1050,42 @@ static OGRErr SHPWriteOGRObject(SHPHandle hSHP, int iShape, else if (hSHP->nShapeType == SHPT_MULTIPATCH) { int nParts = 0; - int *panPartStart = nullptr; - int *panPartType = nullptr; + std::vector<int> anPartStart; + std::vector<int> anPartType; int nPoints = 0; - OGRRawPoint *poPoints = nullptr; - double *padfZ = nullptr; + std::vector<OGRRawPoint> aoPoints; + std::vector<double> adfZ; OGRErr eErr = OGRCreateMultiPatch(poGeom, FALSE, // no SHPP_TRIANGLES - nParts, panPartStart, panPartType, - nPoints, poPoints, padfZ); + nParts, anPartStart, anPartType, + nPoints, aoPoints, adfZ); if (eErr != OGRERR_NONE) return OGRERR_UNSUPPORTED_GEOMETRY_TYPE; - double *padfX = - static_cast<double *>(CPLMalloc(sizeof(double) * nPoints)); - double *padfY = - static_cast<double *>(CPLMalloc(sizeof(double) * nPoints)); + std::vector<double> adfX(nPoints); + std::vector<double> adfY(nPoints); for (int i = 0; i < nPoints; ++i) { - padfX[i] = poPoints[i].x; - padfY[i] = poPoints[i].y; + adfX[i] = aoPoints[i].x; + adfY[i] = aoPoints[i].y; } - CPLFree(poPoints); - if (!CheckNonFiniteCoordinates(padfX, nPoints) || - !CheckNonFiniteCoordinates(padfY, nPoints) || - !CheckNonFiniteCoordinates(padfZ, nPoints)) + if (!CheckNonFiniteCoordinates(adfX.data(), nPoints) || + !CheckNonFiniteCoordinates(adfY.data(), nPoints) || + !CheckNonFiniteCoordinates(adfZ.data(), nPoints)) { - CPLFree(panPartStart); - CPLFree(panPartType); - CPLFree(padfX); - CPLFree(padfY); - CPLFree(padfZ); return OGRERR_FAILURE; } SHPObject *psShape = - SHPCreateObject(hSHP->nShapeType, iShape, nParts, panPartStart, - panPartType, nPoints, padfX, padfY, padfZ, nullptr); + SHPCreateObject(hSHP->nShapeType, iShape, nParts, + anPartStart.data(), anPartType.data(), nPoints, + adfX.data(), adfY.data(), adfZ.data(), nullptr); if (bRewind) SHPRewindObject(hSHP, psShape); const int nReturnedShapeID = SHPWriteObject(hSHP, iShape, psShape); SHPDestroyObject(psShape); - CPLFree(panPartStart); - CPLFree(panPartType); - CPLFree(padfX); - CPLFree(padfY); - CPLFree(padfZ); - if (nReturnedShapeID == -1) return OGRERR_FAILURE; } From 735e1229556f0bacae52a482451dc928c4e51b6c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 00:16:34 +0200 Subject: [PATCH 123/230] Miramon: avoid integer overflow (fixes https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=68205) --- ogr/ogrsf_frmts/miramon/mm_gdal_functions.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c index 4ce989685b9c..e84b96a583d1 100644 --- a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c @@ -1306,7 +1306,7 @@ int MM_ReadExtendedDBFHeaderFromFile(const char *szFileName, if (some_problems_when_reading > 0) { - if ((offset_fals - 1) - 32 < 0) + if (offset_fals < 1 + 32) pMMBDXP->nFields = 0; else pMMBDXP->nFields = From 3adfef0fa110fee76cc8d5e549ca83a957c4e9c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Apr 2024 02:29:11 +0000 Subject: [PATCH 124/230] Bump actions/checkout from 4.1.2 to 4.1.3 Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.2 to 4.1.3. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/9bb56186c3b09b4f86b1c65136769dd318469633...1d96c772d19495a3b5c517cd2bc0cb401ea0529f) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> --- .github/workflows/android_cmake.yml | 2 +- .github/workflows/clang_static_analyzer.yml | 2 +- .github/workflows/cmake_builds.yml | 12 ++++++------ .github/workflows/code_checks.yml | 14 +++++++------- .github/workflows/codeql.yml | 2 +- .github/workflows/conda.yml | 2 +- .github/workflows/coverity_scan.yml | 2 +- .github/workflows/doc_build.yml | 2 +- .github/workflows/linux_build.yml | 2 +- .github/workflows/macos.yml | 2 +- .github/workflows/scorecard.yml | 2 +- .github/workflows/slow_tests.yml | 2 +- .github/workflows/windows_build.yml | 2 +- 13 files changed, 24 insertions(+), 24 deletions(-) diff --git a/.github/workflows/android_cmake.yml b/.github/workflows/android_cmake.yml index 82806d841d0a..55a556debb9a 100644 --- a/.github/workflows/android_cmake.yml +++ b/.github/workflows/android_cmake.yml @@ -24,7 +24,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Cache uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 diff --git a/.github/workflows/clang_static_analyzer.yml b/.github/workflows/clang_static_analyzer.yml index de666dda1414..07965bbb2675 100644 --- a/.github/workflows/clang_static_analyzer.yml +++ b/.github/workflows/clang_static_analyzer.yml @@ -24,7 +24,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Run run: docker run --rm -v $PWD:$PWD ubuntu:22.04 sh -c "cd $PWD && apt update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends sudo software-properties-common && DEBIAN_FRONTEND=noninteractive sh ./ci/travis/csa_common/before_install.sh && sh ./ci/travis/csa_common/install.sh && sh ./ci/travis/csa_common/script.sh" diff --git a/.github/workflows/cmake_builds.yml b/.github/workflows/cmake_builds.yml index 5c3525b7079b..3ae5e67a55e6 100644 --- a/.github/workflows/cmake_builds.yml +++ b/.github/workflows/cmake_builds.yml @@ -31,7 +31,7 @@ jobs: cache-name: cmake-ubuntu-focal steps: - name: Checkout GDAL - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Setup cache uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 id: cache @@ -311,7 +311,7 @@ jobs: run: | git config --global core.autocrlf false - name: Checkout GDAL - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Install development packages uses: msys2/setup-msys2@cc11e9188b693c2b100158c3322424c4cc1dadea # v2.22.0 with: @@ -404,7 +404,7 @@ jobs: run: | git config --global core.autocrlf false - name: Checkout GDAL - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - uses: ilammy/msvc-dev-cmd@0b201ec74fa43914dc39ae48a89fd1d8cb592756 # v1.13.0 - name: populate JAVA_HOME shell: pwsh @@ -506,7 +506,7 @@ jobs: run: | git config --global core.autocrlf false - name: Checkout GDAL - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3 with: activate-environment: gdalenv @@ -573,7 +573,7 @@ jobs: with: xcode-version: 14.3 - name: Checkout GDAL - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Setup cache uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 id: cache @@ -653,7 +653,7 @@ jobs: run: | git config --global core.autocrlf false - name: Checkout GDAL - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3 with: activate-environment: gdalenv diff --git a/.github/workflows/code_checks.yml b/.github/workflows/code_checks.yml index 3bb185c09ea1..b11e0ceee661 100644 --- a/.github/workflows/code_checks.yml +++ b/.github/workflows/code_checks.yml @@ -24,7 +24,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Install Requirements run: | @@ -46,7 +46,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Detect tabulations run: ./scripts/detect_tabulations.sh @@ -81,7 +81,7 @@ jobs: linting: runs-on: ubuntu-latest steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 @@ -89,7 +89,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Install Requirements run: | @@ -106,7 +106,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Install Requirements run: | @@ -125,7 +125,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Set up Python uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: @@ -142,7 +142,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Install requirements run: | diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 1eb762f33787..ad9192552620 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -41,7 +41,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Install dependencies run: | diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index 21ae565d564e..c0a3018aa73d 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -35,7 +35,7 @@ jobs: CACHE_NUMBER: 0 steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Support longpaths run: git config --system core.longpaths true diff --git a/.github/workflows/coverity_scan.yml b/.github/workflows/coverity_scan.yml index 7f3442e7a22e..e46b38d9a4ea 100644 --- a/.github/workflows/coverity_scan.yml +++ b/.github/workflows/coverity_scan.yml @@ -43,7 +43,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Login to GHCR if: env.CONTAINER_REGISTRY == 'ghcr.io' diff --git a/.github/workflows/doc_build.yml b/.github/workflows/doc_build.yml index 0de4bdd85688..812e057da942 100644 --- a/.github/workflows/doc_build.yml +++ b/.github/workflows/doc_build.yml @@ -23,7 +23,7 @@ jobs: container: ghcr.io/osgeo/proj-docs steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Setup environment shell: bash -l {0} run: | diff --git a/.github/workflows/linux_build.yml b/.github/workflows/linux_build.yml index 23c409842f56..525b18d2f8a6 100644 --- a/.github/workflows/linux_build.yml +++ b/.github/workflows/linux_build.yml @@ -153,7 +153,7 @@ jobs: echo "CONTAINER_NAME_FULL=${CONTAINER_REGISTRY}/${CONTAINER_REGISTRY_USER,,}/${CONTAINER_NAME}:${CONTAINER_TAG_CLEAN}" >>${GITHUB_ENV} - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Login to Docker Hub if: env.CONTAINER_REGISTRY == 'docker.io' diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 9c7a5b8b74f7..d4b5e2087ec9 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -25,7 +25,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3 with: diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index ad15ac8d115c..c52bc1995c14 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -36,7 +36,7 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 with: persist-credentials: false diff --git a/.github/workflows/slow_tests.yml b/.github/workflows/slow_tests.yml index 37cbb46ab79b..2e0a82fb9989 100644 --- a/.github/workflows/slow_tests.yml +++ b/.github/workflows/slow_tests.yml @@ -47,7 +47,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Login to GHCR if: env.CONTAINER_REGISTRY == 'ghcr.io' diff --git a/.github/workflows/windows_build.yml b/.github/workflows/windows_build.yml index 1327c2c26bd1..bba1e21e9b07 100644 --- a/.github/workflows/windows_build.yml +++ b/.github/workflows/windows_build.yml @@ -56,7 +56,7 @@ jobs: git config --global core.autocrlf false - name: Checkout - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - name: Set environment shell: pwsh From 790e59a12ca751c77e029a325e27f89e7d5b9a42 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Apr 2024 02:29:15 +0000 Subject: [PATCH 125/230] Bump actions/upload-artifact from 4.3.1 to 4.3.2 Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4.3.1 to 4.3.2. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/5d5d22a31266ced268874388b861e4b58bb5c2f3...1746f4ab65b179e0ea60a494b83293b640dd5bba) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> --- .github/workflows/cifuzz.yml | 2 +- .github/workflows/conda.yml | 2 +- .github/workflows/doc_build.yml | 6 +++--- .github/workflows/linux_build.yml | 4 ++-- .github/workflows/scorecard.yml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/cifuzz.yml b/.github/workflows/cifuzz.yml index 4b0b8afcd758..54ca94a51caf 100644 --- a/.github/workflows/cifuzz.yml +++ b/.github/workflows/cifuzz.yml @@ -31,7 +31,7 @@ jobs: fuzz-seconds: 600 dry-run: false - name: Upload Crash - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 if: failure() && steps.build.outcome == 'success' with: name: artifacts diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index 21ae565d564e..a15af5b71597 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -72,7 +72,7 @@ jobs: source ../ci/travis/conda/compile.sh working-directory: ./gdal-feedstock - - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 with: name: ${{ matrix.platform }}-conda-package path: ./gdal-feedstock/packages/ diff --git a/.github/workflows/doc_build.yml b/.github/workflows/doc_build.yml index 0de4bdd85688..0817f99ddacd 100644 --- a/.github/workflows/doc_build.yml +++ b/.github/workflows/doc_build.yml @@ -81,15 +81,15 @@ jobs: # run: | # make spelling # working-directory: ./doc - - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 with: name: PDF path: doc/build/latex/gdal.pdf - - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 with: name: HTML path: doc/build/html/* - #- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + #- uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 # with: # name: Misspelled # path: doc/build/spelling/output.txt diff --git a/.github/workflows/linux_build.yml b/.github/workflows/linux_build.yml index 23c409842f56..8d0940fc4875 100644 --- a/.github/workflows/linux_build.yml +++ b/.github/workflows/linux_build.yml @@ -332,14 +332,14 @@ jobs: docker push ${CONTAINER_NAME_FULL} - name: Upload coverage artifacts - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 if: ${{ matrix.id == 'coverage' }} with: name: coverage_index.html path: build-${{ matrix.id }}/coverage_html/index.html - name: Upload coverage artifacts - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 if: ${{ matrix.id == 'coverage' }} with: name: HTML diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index ad15ac8d115c..f6ede9e3ca82 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -63,7 +63,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 with: name: SARIF file path: results.sarif From 9dc588eabb75cbedd36e7335683157edcc985e3e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Apr 2024 02:29:24 +0000 Subject: [PATCH 126/230] Bump github/codeql-action from 3.24.10 to 3.25.1 Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3.24.10 to 3.25.1. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/4355270be187e1b672a7a1c7c7bae5afdc1ab94a...c7f9125735019aa87cfc361530512d50ea439c71) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> --- .github/workflows/codeql.yml | 4 ++-- .github/workflows/scorecard.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 1eb762f33787..f0eb89e6cce7 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -98,7 +98,7 @@ jobs: # We do that after running CMake to avoid CodeQL to trigger during CMake time, # in particular during HDF5 detection which is terribly slow (https://github.com/OSGeo/gdal/issues/9549) - name: Initialize CodeQL - uses: github/codeql-action/init@4355270be187e1b672a7a1c7c7bae5afdc1ab94a # v3.24.10 + uses: github/codeql-action/init@c7f9125735019aa87cfc361530512d50ea439c71 # v3.25.1 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -118,6 +118,6 @@ jobs: (cd build && make -j$(nproc)) - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@4355270be187e1b672a7a1c7c7bae5afdc1ab94a # v3.24.10 + uses: github/codeql-action/analyze@c7f9125735019aa87cfc361530512d50ea439c71 # v3.25.1 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index ad15ac8d115c..b5eeb4805a16 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -71,6 +71,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@4355270be187e1b672a7a1c7c7bae5afdc1ab94a # v3.24.10 + uses: github/codeql-action/upload-sarif@c7f9125735019aa87cfc361530512d50ea439c71 # v3.25.1 with: sarif_file: results.sarif From 36efc7fdb0de572d49009e56d72bf7084c42ad93 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 13:05:39 +0200 Subject: [PATCH 127/230] Miramon: fix Heap-buffer-overflow in OGRMiraMonLayer::GetFeature (https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=68223) --- ogr/ogrsf_frmts/miramon/mm_gdal_functions.c | 12 ++++++++++++ ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c index e84b96a583d1..e1d6239950b4 100644 --- a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c @@ -1405,6 +1405,18 @@ int MM_ReadExtendedDBFHeaderFromFile(const char *szFileName, szMMNomCampIdGraficDefecte)) pMMBDXP->IdGraficField = nIField; + // Limit BytesPerField to avoid later integer overflows + // We could potentially limit further... + if (pMMBDXP->pField[nIField].BytesPerField > (uint32_t)(INT32_MAX - 1)) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_function(pf); + pMMBDXP->pfDataBase = nullptr; + return 1; + } + if (pMMBDXP->pField[nIField].BytesPerField == 0) { if (!MM_ES_DBF_ESTESA(pMMBDXP->dbf_version)) diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp index ad04304d48c5..8ca8c95d2cbe 100644 --- a/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp +++ b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp @@ -990,7 +990,7 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) { if (MMResizeStringToOperateIfNeeded( phMiraMonLayer, - phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField)) + phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField + 1)) { return nullptr; } From 5cc7be72f63c533c59c36ad33e350ec7e78d97d5 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 13:22:41 +0200 Subject: [PATCH 128/230] Prepare for GDAL 3.9.0beta1 --- gdal.cmake | 4 ++-- swig/python/README.rst | 6 +++--- swig/python/gdal-utils/osgeo_utils/__init__.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/gdal.cmake b/gdal.cmake index 577cb0c11edd..54cfbe13aff5 100644 --- a/gdal.cmake +++ b/gdal.cmake @@ -6,8 +6,8 @@ # a new member or virtual function in a public C++ class, etc. # This will typically happen for each GDAL feature release (change of X or Y in # a X.Y.Z numbering scheme), but should not happen for a bugfix release (change of Z) -# Previous value: 34 for GDAL 3.8 -set(GDAL_SOVERSION 34) +# Previous value: 35 for GDAL 3.9 +set(GDAL_SOVERSION 35) # Switches to control build targets(cached) option(ENABLE_GNM "Build GNM (Geography Network Model) component" ON) diff --git a/swig/python/README.rst b/swig/python/README.rst index 58dff252ba80..c7286e95c433 100644 --- a/swig/python/README.rst +++ b/swig/python/README.rst @@ -13,7 +13,7 @@ reference documentation, but the https://gdal.org/api/python_bindings.html#tutor Dependencies ------------ - * libgdal (3.8.0 or greater) and header files (gdal-devel) + * libgdal (3.9.0 or greater) and header files (gdal-devel) * numpy (1.0.0 or greater) and header files (numpy-devel) (not explicitly required, but many examples and utilities will not work without it) @@ -61,14 +61,14 @@ To install the Python dependencies and build numpy-based raster support: Users can verify that numpy-based raster support has been installed with: :: - + python3 -c 'from osgeo import gdal_array' If this command raises an ImportError, numpy-based raster support has not been properly installed: :: - + Traceback (most recent call last): File "<string>", line 1, in <module> File "/usr/local/lib/python3.12/dist-packages/osgeo/gdal_array.py", line 10, in <module> diff --git a/swig/python/gdal-utils/osgeo_utils/__init__.py b/swig/python/gdal-utils/osgeo_utils/__init__.py index bef1858bf9e9..ed0443cbe2b3 100644 --- a/swig/python/gdal-utils/osgeo_utils/__init__.py +++ b/swig/python/gdal-utils/osgeo_utils/__init__.py @@ -1,5 +1,5 @@ __package_name__ = "gdal-utils" -gdal_utils_version = (3, 8, 99, 0) +gdal_utils_version = (3, 9, 0, 0) __version__ = ".".join(str(i) for i in gdal_utils_version) __author__ = "Frank Warmerdam" __author_email__ = "warmerdam@pobox.com" From 27b5611353ae0cfe6d4e0244ef3272723845bd14 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 13:29:26 +0200 Subject: [PATCH 129/230] Update master to 3.10.0dev --- VERSION | 2 +- gcore/gdal_version.h.in | 6 +++--- swig/python/gdal-utils/osgeo_utils/__init__.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/VERSION b/VERSION index a5c4c763394f..30291cba2230 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.9.0 +3.10.0 diff --git a/gcore/gdal_version.h.in b/gcore/gdal_version.h.in index 52650e1b05ee..d7b4c9fd9e90 100644 --- a/gcore/gdal_version.h.in +++ b/gcore/gdal_version.h.in @@ -6,7 +6,7 @@ #ifndef GDAL_VERSION_MAJOR # define GDAL_VERSION_MAJOR 3 -# define GDAL_VERSION_MINOR 9 +# define GDAL_VERSION_MINOR 10 # define GDAL_VERSION_REV 0 # define GDAL_VERSION_BUILD 0 #endif @@ -24,9 +24,9 @@ #if !defined(DO_NOT_DEFINE_GDAL_DATE_NAME) #ifndef GDAL_RELEASE_DATE -# define GDAL_RELEASE_DATE 20239999 +# define GDAL_RELEASE_DATE 20249999 #endif #ifndef GDAL_RELEASE_NAME -# define GDAL_RELEASE_NAME "3.9.0dev" +# define GDAL_RELEASE_NAME "3.10.0dev" #endif #endif diff --git a/swig/python/gdal-utils/osgeo_utils/__init__.py b/swig/python/gdal-utils/osgeo_utils/__init__.py index ed0443cbe2b3..817dc2c6599a 100644 --- a/swig/python/gdal-utils/osgeo_utils/__init__.py +++ b/swig/python/gdal-utils/osgeo_utils/__init__.py @@ -1,5 +1,5 @@ __package_name__ = "gdal-utils" -gdal_utils_version = (3, 9, 0, 0) +gdal_utils_version = (3, 9, 99, 0) __version__ = ".".join(str(i) for i in gdal_utils_version) __author__ = "Frank Warmerdam" __author_email__ = "warmerdam@pobox.com" From 4a849713e74f4c5c6ce6d60a4d734ae8018879ff Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 16:42:23 +0200 Subject: [PATCH 130/230] CI: disabled running Python autotests on OSX Arm64 (works around #9723) --- ci/travis/osx/script.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ci/travis/osx/script.sh b/ci/travis/osx/script.sh index 4bf71823f6b7..038f01d30b25 100755 --- a/ci/travis/osx/script.sh +++ b/ci/travis/osx/script.sh @@ -15,4 +15,5 @@ NPROC=$(sysctl -n hw.ncpu) echo "NPROC=${NPROC}" # Run all the Python autotests -(cd build && ctest -V -R autotest -j${NPROC}) +# FIXME: disabled for now because of https://github.com/OSGeo/gdal/issues/9723 +#(cd build && ctest -V -R autotest -j${NPROC}) From 9b2168d9ff6f732242cdd45e2deaba70b3474ee3 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:01:06 +0200 Subject: [PATCH 131/230] GPKG: suppress Coverity Scan false positive about missing lock (CID 1525521) --- .../gpkg/ogrgeopackagetablelayer.cpp | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp index 6268d1111ac4..22e8fc1fb658 100644 --- a/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp +++ b/ogr/ogrsf_frmts/gpkg/ogrgeopackagetablelayer.cpp @@ -1554,7 +1554,7 @@ void OGRGeoPackageTableLayer::CancelAsyncNextArrowArray() { if (m_poFillArrowArray) { - std::lock_guard<std::mutex> oLock(m_poFillArrowArray->oMutex); + std::lock_guard oLock(m_poFillArrowArray->oMutex); m_poFillArrowArray->nCountRows = -1; m_poFillArrowArray->oCV.notify_one(); } @@ -1572,7 +1572,7 @@ void OGRGeoPackageTableLayer::CancelAsyncNextArrowArray() m_oQueueArrowArrayPrefetchTasks.pop(); { - std::lock_guard<std::mutex> oLock(task->m_oMutex); + std::lock_guard oLock(task->m_oMutex); task->m_bStop = true; task->m_oCV.notify_one(); } @@ -8260,7 +8260,7 @@ int OGRGeoPackageTableLayer::GetNextArrowArrayAsynchronous( if (m_poFillArrowArray) { - std::lock_guard<std::mutex> oLock(m_poFillArrowArray->oMutex); + std::lock_guard oLock(m_poFillArrowArray->oMutex); if (m_poFillArrowArray->bIsFinished) { return 0; @@ -8341,7 +8341,7 @@ int OGRGeoPackageTableLayer::GetNextArrowArrayAsynchronous( } else { - std::lock_guard<std::mutex> oLock(m_poFillArrowArray->oMutex); + std::lock_guard oLock(m_poFillArrowArray->oMutex); if (m_poFillArrowArray->bErrorOccurred) { CPLError(CE_Failure, CPLE_AppDefined, "%s", @@ -8514,7 +8514,7 @@ void OGRGeoPackageTableLayer::GetNextArrowArrayAsynchronousWorker() -1, SQLITE_UTF8 | SQLITE_DETERMINISTIC, nullptr, nullptr, nullptr, nullptr); - std::lock_guard<std::mutex> oLock(m_poFillArrowArray->oMutex); + std::lock_guard oLock(m_poFillArrowArray->oMutex); m_poFillArrowArray->bIsFinished = true; if (m_poFillArrowArray->nCountRows >= 0) { @@ -8625,7 +8625,7 @@ int OGRGeoPackageTableLayer::GetNextArrowArray(struct ArrowArrayStream *stream, const auto stopThread = [&task]() { { - std::lock_guard<std::mutex> oLock(task->m_oMutex); + std::lock_guard oLock(task->m_oMutex); task->m_bStop = true; task->m_oCV.notify_one(); } @@ -8675,7 +8675,7 @@ int OGRGeoPackageTableLayer::GetNextArrowArray(struct ArrowArrayStream *stream, { // Wake-up thread with new task { - std::lock_guard<std::mutex> oLock(task->m_oMutex); + std::lock_guard oLock(task->m_oMutex); task->m_bFetchRows = true; task->m_oCV.notify_one(); } @@ -8777,7 +8777,7 @@ int OGRGeoPackageTableLayer::GetNextArrowArray(struct ArrowArrayStream *stream, auto taskPtr = task.get(); auto taskRunner = [taskPtr]() { - std::unique_lock<std::mutex> oLock(taskPtr->m_oMutex); + std::unique_lock oLock(taskPtr->m_oMutex); do { taskPtr->m_bFetchRows = false; @@ -8789,6 +8789,11 @@ int OGRGeoPackageTableLayer::GetNextArrowArray(struct ArrowArrayStream *stream, if (taskPtr->m_bMemoryLimitReached) break; // cppcheck-suppress knownConditionTrueFalse + // Coverity apparently is confused by the fact that we + // use unique_lock here to guard access for m_bStop whereas + // in other places we use a lock_guard, but there's nothing + // wrong. + // coverity[missing_lock:FALSE] while (!taskPtr->m_bStop && !taskPtr->m_bFetchRows) { taskPtr->m_oCV.wait(oLock); From 8b569cee98ea0ead9d591fc10bc9875b74ce2042 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:01:38 +0200 Subject: [PATCH 132/230] VSIMemHandle::Eof(): add lock (CID 1525499) --- port/cpl_vsi_mem.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/port/cpl_vsi_mem.cpp b/port/cpl_vsi_mem.cpp index a14205a8a4f3..904ef83dc6e0 100644 --- a/port/cpl_vsi_mem.cpp +++ b/port/cpl_vsi_mem.cpp @@ -505,6 +505,7 @@ size_t VSIMemHandle::Write(const void *pBuffer, size_t nSize, size_t nCount) int VSIMemHandle::Eof() { + CPL_SHARED_LOCK oLock(poFile->m_oMutex); return bEOF; } From 8b62275f126c38f2cb1535de75ebd96969da8832 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:02:47 +0200 Subject: [PATCH 133/230] MVT: add lock (CID 1525450) --- ogr/ogrsf_frmts/mvt/ogrmvtdataset.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/ogr/ogrsf_frmts/mvt/ogrmvtdataset.cpp b/ogr/ogrsf_frmts/mvt/ogrmvtdataset.cpp index 4887f26c7cfa..1a9d14dcec01 100644 --- a/ogr/ogrsf_frmts/mvt/ogrmvtdataset.cpp +++ b/ogr/ogrsf_frmts/mvt/ogrmvtdataset.cpp @@ -4265,8 +4265,9 @@ OGRErr OGRMVTWriterDataset::PreGenerateForTileReal( oBuffer.assign(static_cast<char *>(pCompressed), nCompressedSize); CPLFree(pCompressed); + std::unique_ptr<std::lock_guard<std::mutex>> poLockGuard; if (m_bThreadPoolOK) - m_oDBMutex.lock(); + poLockGuard = std::make_unique<std::lock_guard<std::mutex>>(m_oDBMutex); m_nTempTiles++; sqlite3_bind_int(m_hInsertStmt, 1, nZ); @@ -4283,9 +4284,6 @@ OGRErr OGRMVTWriterDataset::PreGenerateForTileReal( int rc = sqlite3_step(m_hInsertStmt); sqlite3_reset(m_hInsertStmt); - if (m_bThreadPoolOK) - m_oDBMutex.unlock(); - if (!(rc == SQLITE_OK || rc == SQLITE_DONE)) { return OGRERR_FAILURE; @@ -4326,9 +4324,8 @@ void OGRMVTWriterDataset::WriterTaskFunc(void *pParam) poTask->nSerial, poTask->poGeom.get(), poTask->sEnvelope); if (eErr != OGRERR_NONE) { - poTask->poDS->m_oDBMutex.lock(); + std::lock_guard oLock(poTask->poDS->m_oDBMutex); poTask->poDS->m_bWriteFeatureError = true; - poTask->poDS->m_oDBMutex.unlock(); } delete poTask; } @@ -4367,6 +4364,7 @@ OGRErr OGRMVTWriterDataset::PreGenerateForTile( // Do not queue more than 1000 jobs to avoid memory exhaustion m_oThreadPool.WaitCompletion(1000); + std::lock_guard oLock(m_oDBMutex); return m_bWriteFeatureError ? OGRERR_FAILURE : OGRERR_NONE; } } From 20ab7e7c30de5fa38d67fec8877ceae78fc12507 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:29:17 +0200 Subject: [PATCH 134/230] GTiff: replace CPLMutex* usage with std::mutex --- frmts/gtiff/gtiffdataset.cpp | 2 -- frmts/gtiff/gtiffdataset.h | 3 ++- frmts/gtiff/gtiffdataset_write.cpp | 36 +++++++++++++----------------- 3 files changed, 18 insertions(+), 23 deletions(-) diff --git a/frmts/gtiff/gtiffdataset.cpp b/frmts/gtiff/gtiffdataset.cpp index 1e3e56cc47a9..2347c03c4c73 100644 --- a/frmts/gtiff/gtiffdataset.cpp +++ b/frmts/gtiff/gtiffdataset.cpp @@ -248,8 +248,6 @@ std::tuple<CPLErr, bool> GTiffDataset::Finalize() CPLFree(m_asCompressionJobs[i].pszTmpFilename); } } - CPLDestroyMutex(m_hCompressThreadPoolMutex); - m_hCompressThreadPoolMutex = nullptr; m_poCompressQueue.reset(); } diff --git a/frmts/gtiff/gtiffdataset.h b/frmts/gtiff/gtiffdataset.h index 7fb575ba444b..06bb73bbd967 100644 --- a/frmts/gtiff/gtiffdataset.h +++ b/frmts/gtiff/gtiffdataset.h @@ -32,6 +32,7 @@ #include "gdal_pam.h" +#include <mutex> #include <queue> #include "cpl_mem_cache.h" @@ -164,7 +165,7 @@ class GTiffDataset final : public GDALPamDataset CPLVirtualMem *m_psVirtualMemIOMapping = nullptr; CPLWorkerThreadPool *m_poThreadPool = nullptr; std::unique_ptr<CPLJobQueue> m_poCompressQueue{}; - CPLMutex *m_hCompressThreadPoolMutex = nullptr; + std::mutex m_oCompressThreadPoolMutex{}; lru11::Cache<int, std::pair<vsi_l_offset, vsi_l_offset>> m_oCacheStrileToOffsetByteCount{1024}; diff --git a/frmts/gtiff/gtiffdataset_write.cpp b/frmts/gtiff/gtiffdataset_write.cpp index 12867f2d77d1..cddfbef11d88 100644 --- a/frmts/gtiff/gtiffdataset_write.cpp +++ b/frmts/gtiff/gtiffdataset_write.cpp @@ -943,8 +943,6 @@ void GTiffDataset::InitCompressionThreads(bool bUpdateMode, &m_asCompressionJobs[i])); m_asCompressionJobs[i].nStripOrTile = -1; } - m_hCompressThreadPoolMutex = CPLCreateMutex(); - CPLReleaseMutex(m_hCompressThreadPoolMutex); // This is kind of a hack, but basically using // TIFFWriteRawStrip/Tile and then TIFFReadEncodedStrip/Tile @@ -1053,13 +1051,11 @@ void GTiffDataset::ThreadCompressionFunc(void *pData) psJob->nCompressedBufferSize = 0; } - auto mutex = poDS->m_poBaseDS ? poDS->m_poBaseDS->m_hCompressThreadPoolMutex - : poDS->m_hCompressThreadPoolMutex; - if (mutex) + auto poMainDS = poDS->m_poBaseDS ? poDS->m_poBaseDS : poDS; + if (poMainDS->m_poCompressQueue) { - CPLAcquireMutex(mutex, 1000.0); + std::lock_guard oLock(poMainDS->m_oCompressThreadPoolMutex); psJob->bReady = true; - CPLReleaseMutex(mutex); } } @@ -1223,13 +1219,11 @@ void GTiffDataset::WriteRawStripOrTile(int nStripOrTile, void GTiffDataset::WaitCompletionForJobIdx(int i) { - auto poQueue = m_poBaseDS ? m_poBaseDS->m_poCompressQueue.get() - : m_poCompressQueue.get(); - auto &oQueue = m_poBaseDS ? m_poBaseDS->m_asQueueJobIdx : m_asQueueJobIdx; - auto &asJobs = - m_poBaseDS ? m_poBaseDS->m_asCompressionJobs : m_asCompressionJobs; - auto mutex = m_poBaseDS ? m_poBaseDS->m_hCompressThreadPoolMutex - : m_hCompressThreadPoolMutex; + auto poMainDS = m_poBaseDS ? m_poBaseDS : this; + auto poQueue = poMainDS->m_poCompressQueue.get(); + auto &oQueue = poMainDS->m_asQueueJobIdx; + auto &asJobs = poMainDS->m_asCompressionJobs; + auto &mutex = poMainDS->m_oCompressThreadPoolMutex; CPLAssert(i >= 0 && static_cast<size_t>(i) < asJobs.size()); CPLAssert(asJobs[i].nStripOrTile >= 0); @@ -1238,9 +1232,11 @@ void GTiffDataset::WaitCompletionForJobIdx(int i) bool bHasWarned = false; while (true) { - CPLAcquireMutex(mutex, 1000.0); - const bool bReady = asJobs[i].bReady; - CPLReleaseMutex(mutex); + bool bReady; + { + std::lock_guard oLock(mutex); + bReady = asJobs[i].bReady; + } if (!bReady) { if (!bHasWarned) @@ -1395,9 +1391,9 @@ bool GTiffDataset::SubmitCompressionJob(int nStripOrTile, GByte *pabyData, return false; } - auto &oQueue = m_poBaseDS ? m_poBaseDS->m_asQueueJobIdx : m_asQueueJobIdx; - auto &asJobs = - m_poBaseDS ? m_poBaseDS->m_asCompressionJobs : m_asCompressionJobs; + auto poMainDS = m_poBaseDS ? m_poBaseDS : this; + auto &oQueue = poMainDS->m_asQueueJobIdx; + auto &asJobs = poMainDS->m_asCompressionJobs; int nNextCompressionJobAvail = -1; From 437de3e94a590dc3fe98a5a1af33e01772934bfd Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:11:48 +0200 Subject: [PATCH 135/230] GTiff: avoid Coverity Scan false positive about missing lock (CID 1525497, 1525186) --- frmts/gtiff/gtiffdataset_write.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/frmts/gtiff/gtiffdataset_write.cpp b/frmts/gtiff/gtiffdataset_write.cpp index cddfbef11d88..220a400f9b82 100644 --- a/frmts/gtiff/gtiffdataset_write.cpp +++ b/frmts/gtiff/gtiffdataset_write.cpp @@ -1262,7 +1262,11 @@ void GTiffDataset::WaitCompletionForJobIdx(int i) } asJobs[i].pabyCompressedBuffer = nullptr; asJobs[i].nBufferSize = 0; - asJobs[i].bReady = false; + { + // Likely useless, but makes Coverity happy + std::lock_guard oLock(mutex); + asJobs[i].bReady = false; + } asJobs[i].nStripOrTile = -1; oQueue.pop(); } From 168fb5b5774714646c09e8b1db2a005cd6815899 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:39:16 +0200 Subject: [PATCH 136/230] CPLReleaseLock(): suppress Coverity false positive (CID 1525432) --- port/cpl_multiproc.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/port/cpl_multiproc.cpp b/port/cpl_multiproc.cpp index 4ef3d6268f0e..ba8c48df49a3 100644 --- a/port/cpl_multiproc.cpp +++ b/port/cpl_multiproc.cpp @@ -2587,6 +2587,7 @@ void CPLReleaseLock(CPLLock *psLock) if (psLock->bDebugPerf && CPLAtomicDec(&(psLock->nCurrentHolders)) == 0) { const GUIntBig nStopTime = CPLrdtscp(); + // coverity[missing_lock:FALSE] const GIntBig nDiffTime = static_cast<GIntBig>(nStopTime - psLock->nStartTime); if (nDiffTime > psLock->nMaxDiff) From 36966d6e0939d41951395b60c0a1b96d507768d0 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:43:42 +0200 Subject: [PATCH 137/230] cpl_vsil_gzip.cpp: avoid accessing a variable outside of a lock (CID 1525353) --- port/cpl_vsil_gzip.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/port/cpl_vsil_gzip.cpp b/port/cpl_vsil_gzip.cpp index 22c7418ec09a..460208255964 100644 --- a/port/cpl_vsil_gzip.cpp +++ b/port/cpl_vsil_gzip.cpp @@ -2331,10 +2331,12 @@ bool VSIGZipWriteHandleMT::ProcessCompletedJobs() { apoFinishedJobs_.erase(iter); + const bool bIsSeqNumberExpectedZero = + (nSeqNumberExpected_ == 0); sMutex_.unlock(); const size_t nToWrite = psJob->sCompressedData_.size(); - if (panSOZIPIndex_ && nSeqNumberExpected_ != 0 && + if (panSOZIPIndex_ && !bIsSeqNumberExpectedZero && !psJob->pBuffer_->empty()) { uint64_t nOffset = poBaseHandle_->Tell() - nStartOffset_; From 5f77a294ce7aec6362eaf1f09d85c56bd68c0d73 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:47:33 +0200 Subject: [PATCH 138/230] /vsicurl: suppress false positive missing_lock (CID 1525347, 1525063) --- port/cpl_vsil_curl.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/port/cpl_vsil_curl.cpp b/port/cpl_vsil_curl.cpp index 38d6140529bb..65fbb68b1e1c 100644 --- a/port/cpl_vsil_curl.cpp +++ b/port/cpl_vsil_curl.cpp @@ -3000,6 +3000,7 @@ size_t VSICurlHandle::PRead(void *pBuffer, size_t nSize, { { std::unique_lock<std::mutex> oLock(poRange->oMutex); + // coverity[missing_lock:FALSE] while (!poRange->bDone) { poRange->oCV.wait(oLock); @@ -3472,6 +3473,7 @@ void VSICurlHandle::AdviseRead(int nRanges, const vsi_l_offset *panOffsets, for (size_t i = 0; i < m_aoAdviseReadRanges.size(); ++i) { + // coverity[missing_lock] if (!m_aoAdviseReadRanges[i]->bDone) { DealWithRequest(aHandles[i]); From e8d64a12ee71894b9d6d155e6491f208cc235fce Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:51:37 +0200 Subject: [PATCH 139/230] GDALDestroyGlobalThreadPool(): destroy it under mutex (CID 1525281) --- gcore/gdal_thread_pool.cpp | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/gcore/gdal_thread_pool.cpp b/gcore/gdal_thread_pool.cpp index 4c34bc390d28..85bc49c7f00e 100644 --- a/gcore/gdal_thread_pool.cpp +++ b/gcore/gdal_thread_pool.cpp @@ -30,12 +30,21 @@ #include <mutex> -static std::mutex gMutexThreadPool; +// For unclear reasons, attempts at making this a std::unique_ptr<>, even +// through a GetCompressThreadPool() method like GetMutexThreadPool(), lead +// to "ctest -R autotest_alg" (and other autotest components as well) +// to hang forever once the tests have terminated. static CPLWorkerThreadPool *gpoCompressThreadPool = nullptr; +static std::mutex &GetMutexThreadPool() +{ + static std::mutex gMutexThreadPool; + return gMutexThreadPool; +} + CPLWorkerThreadPool *GDALGetGlobalThreadPool(int nThreads) { - std::lock_guard<std::mutex> oGuard(gMutexThreadPool); + std::lock_guard oGuard(GetMutexThreadPool()); if (gpoCompressThreadPool == nullptr) { gpoCompressThreadPool = new CPLWorkerThreadPool(); @@ -55,6 +64,7 @@ CPLWorkerThreadPool *GDALGetGlobalThreadPool(int nThreads) void GDALDestroyGlobalThreadPool() { + std::lock_guard oGuard(GetMutexThreadPool()); delete gpoCompressThreadPool; gpoCompressThreadPool = nullptr; } From 651234df260da3a7728d401bbf1125507a01f781 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:53:18 +0200 Subject: [PATCH 140/230] cpl_worker_thread_pool.cpp: suppress Coverity Scan false positive about missing lock (CID 1525235) --- port/cpl_worker_thread_pool.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/port/cpl_worker_thread_pool.cpp b/port/cpl_worker_thread_pool.cpp index ccd3f9a4da86..9087cf0270f7 100644 --- a/port/cpl_worker_thread_pool.cpp +++ b/port/cpl_worker_thread_pool.cpp @@ -691,6 +691,7 @@ bool CPLJobQueue::SubmitJob(CPLThreadFunc pfnFunc, void *pData) void CPLJobQueue::WaitCompletion(int nMaxRemainingJobs) { std::unique_lock<std::mutex> oGuard(m_mutex); + // coverity[missing_lock:FALSE] while (m_nPendingJobs > nMaxRemainingJobs) { m_cv.wait(oGuard); From 5cbf8b03a5e59e5282264a20a90312981e88c313 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 21 Apr 2024 23:54:27 +0200 Subject: [PATCH 141/230] GWKProgressMonoThread(): suppress Coverity Scan false positive about missing lock (CID 1525221) --- alg/gdalwarpkernel.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/alg/gdalwarpkernel.cpp b/alg/gdalwarpkernel.cpp index 1bfb1fa78191..09a5981b361f 100644 --- a/alg/gdalwarpkernel.cpp +++ b/alg/gdalwarpkernel.cpp @@ -275,6 +275,7 @@ static int GWKProgressThread(GWKJobStruct *psJob) static int GWKProgressMonoThread(GWKJobStruct *psJob) { GDALWarpKernel *poWK = psJob->poWK; + // coverity[missing_lock] if (!poWK->pfnProgress( poWK->dfProgressBase + poWK->dfProgressScale * From 7ffc5a3b36388d2d3d071f1456aa958857f14faa Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 00:03:36 +0200 Subject: [PATCH 142/230] CPLMutexHolder constructor: try to avoid Coverity LOCK_EVASION warning (CID 1525195) --- port/cpl_multiproc.cpp | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/port/cpl_multiproc.cpp b/port/cpl_multiproc.cpp index ba8c48df49a3..42d559905b8a 100644 --- a/port/cpl_multiproc.cpp +++ b/port/cpl_multiproc.cpp @@ -218,15 +218,23 @@ CPLMutexHolder::CPLMutexHolder(CPLMutex * /* hMutexIn */, { } #else -CPLMutexHolder::CPLMutexHolder(CPLMutex *hMutexIn, double dfWaitInSeconds, - const char *pszFileIn, int nLineIn) - : hMutex(hMutexIn), pszFile(pszFileIn), nLine(nLineIn) + +static CPLMutex *GetMutexHolderMutexMember(CPLMutex *hMutexIn, + double dfWaitInSeconds) { - if (hMutex != nullptr && !CPLAcquireMutex(hMutex, dfWaitInSeconds)) + if (hMutexIn && !CPLAcquireMutex(hMutexIn, dfWaitInSeconds)) { fprintf(stderr, "CPLMutexHolder: Failed to acquire mutex!\n"); - hMutex = nullptr; + return nullptr; } + return hMutexIn; +} + +CPLMutexHolder::CPLMutexHolder(CPLMutex *hMutexIn, double dfWaitInSeconds, + const char *pszFileIn, int nLineIn) + : hMutex(GetMutexHolderMutexMember(hMutexIn, dfWaitInSeconds)), + pszFile(pszFileIn), nLine(nLineIn) +{ } #endif // ndef MUTEX_NONE From 6ea32dbdafaad629b307db6fcf49ad9209f1b665 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 00:05:16 +0200 Subject: [PATCH 143/230] test_cpl.cpp: suppress Coverity Scan false positive about missing lock (CID 1525189) --- autotest/cpp/test_cpl.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/autotest/cpp/test_cpl.cpp b/autotest/cpp/test_cpl.cpp index 67be3132cb68..98117591073a 100644 --- a/autotest/cpp/test_cpl.cpp +++ b/autotest/cpp/test_cpl.cpp @@ -4616,6 +4616,7 @@ TEST_F(test_cpl, CPLWorkerThreadPool_recursion) // takes sufficiently long that job 2 has been submitted // before it completes std::unique_lock<std::mutex> guard(psData2->psCtxt->mutex); + // coverity[missing_lock:FALSE] while (!psData2->psCtxt->you_can_leave) { psData2->psCtxt->cv.wait(guard); From d731181da168bcf83ccd34519fd2e6963c49525b Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 00:09:07 +0200 Subject: [PATCH 144/230] overview.cpp: suppress Coverity Scan false positive about missing lock (CID 1525129, 1525097) --- gcore/overview.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gcore/overview.cpp b/gcore/overview.cpp index 2596b95d9d43..841b4d9c7425 100644 --- a/gcore/overview.cpp +++ b/gcore/overview.cpp @@ -4535,6 +4535,7 @@ CPLErr GDALRegenerateOverviewsEx(GDALRasterBandH hSrcBand, int nOverviewCount, auto poOldestJob = jobList.front().get(); { std::unique_lock<std::mutex> oGuard(poOldestJob->mutex); + // coverity[missing_lock:FALSE] while (!poOldestJob->bFinished) { poOldestJob->cv.wait(oGuard); @@ -5283,6 +5284,7 @@ CPLErr GDALRegenerateOverviewsMultiBand( auto poOldestJob = jobList.front().get(); { std::unique_lock<std::mutex> oGuard(poOldestJob->mutex); + // coverity[missing_lock:FALSE] while (!poOldestJob->bFinished) { poOldestJob->cv.wait(oGuard); From fe5577885dd00a1d1ae7a8655cf103ccf2da2254 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 00:10:18 +0200 Subject: [PATCH 145/230] GDALGridProgressMonoThread(): suppress Coverity Scan false positive about missing lock (CID 1525052) --- alg/gdalgrid.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/alg/gdalgrid.cpp b/alg/gdalgrid.cpp index 62fbbc2634d3..51378a9dd420 100644 --- a/alg/gdalgrid.cpp +++ b/alg/gdalgrid.cpp @@ -2602,6 +2602,7 @@ static int GDALGridProgressMultiThread(GDALGridJob *psJob) static int GDALGridProgressMonoThread(GDALGridJob *psJob) { const int nCounter = ++(*psJob->pnCounter); + // coverity[missing_lock] if (!psJob->pfnRealProgress(nCounter / static_cast<double>(psJob->nYSize), "", psJob->pRealProgressArg)) { From e41215eaa128eec9224844a900cb422df33e8892 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 19:23:44 +0200 Subject: [PATCH 146/230] GDALSuggestedWarpOutput2(): make changes of PR #9336 honour SRC_METHOD=NO_GEOTRANSFORM Fixes failure on rasterio tests/test_warpedvrt.py::test_transformer_options__width_height --- alg/gdaltransformer.cpp | 104 +++++++++++++++++++--------------------- 1 file changed, 49 insertions(+), 55 deletions(-) diff --git a/alg/gdaltransformer.cpp b/alg/gdaltransformer.cpp index 4c619ac70d40..bff7c93cc3bc 100644 --- a/alg/gdaltransformer.cpp +++ b/alg/gdaltransformer.cpp @@ -397,73 +397,67 @@ CPLErr CPL_STDCALL GDALSuggestedWarpOutput2(GDALDatasetH hSrcDS, if ((!nOptions || (nOptions & GDAL_SWO_FORCE_SQUARE_PIXEL) == 0) && pTransformArg && bIsGDALGenImgProjTransform) { - double adfGeoTransform[6]; + const GDALGenImgProjTransformInfo *psInfo = + static_cast<const GDALGenImgProjTransformInfo *>(pTransformArg); - if (GDALGetGeoTransform(hSrcDS, adfGeoTransform) == CE_None && - adfGeoTransform[2] == 0.0 && adfGeoTransform[4] == 0.0) + if (!psInfo->pSrcTransformer && + !psInfo->bHasCustomTransformationPipeline && + !psInfo->pDstTransformer && psInfo->adfSrcGeoTransform[2] == 0 && + psInfo->adfSrcGeoTransform[4] == 0 && + psInfo->adfDstGeoTransform[0] == 0 && + psInfo->adfDstGeoTransform[1] == 1 && + psInfo->adfDstGeoTransform[2] == 0 && + psInfo->adfDstGeoTransform[3] == 0 && + psInfo->adfDstGeoTransform[4] == 0 && + psInfo->adfDstGeoTransform[5] == 1) { - const GDALGenImgProjTransformInfo *psInfo = - static_cast<const GDALGenImgProjTransformInfo *>(pTransformArg); - - if (psInfo && !psInfo->pSrcTransformer && - !psInfo->bHasCustomTransformationPipeline && - !psInfo->pDstTransformer && - psInfo->adfDstGeoTransform[0] == 0 && - psInfo->adfDstGeoTransform[1] == 1 && - psInfo->adfDstGeoTransform[2] == 0 && - psInfo->adfDstGeoTransform[3] == 0 && - psInfo->adfDstGeoTransform[4] == 0 && - psInfo->adfDstGeoTransform[5] == 1) - { - const OGRSpatialReference *poSourceCRS = nullptr; - const OGRSpatialReference *poTargetCRS = nullptr; + const OGRSpatialReference *poSourceCRS = nullptr; + const OGRSpatialReference *poTargetCRS = nullptr; - if (psInfo->pReprojectArg) - { - const GDALReprojectionTransformInfo *psRTI = - static_cast<const GDALReprojectionTransformInfo *>( - psInfo->pReprojectArg); - poSourceCRS = psRTI->poForwardTransform->GetSourceCS(); - poTargetCRS = psRTI->poForwardTransform->GetTargetCS(); - } + if (psInfo->pReprojectArg) + { + const GDALReprojectionTransformInfo *psRTI = + static_cast<const GDALReprojectionTransformInfo *>( + psInfo->pReprojectArg); + poSourceCRS = psRTI->poForwardTransform->GetSourceCS(); + poTargetCRS = psRTI->poForwardTransform->GetTargetCS(); + } - if ((!poSourceCRS && !poTargetCRS) || - (poSourceCRS && poTargetCRS && - poSourceCRS->IsSame(poTargetCRS))) - { + if ((!poSourceCRS && !poTargetCRS) || + (poSourceCRS && poTargetCRS && + poSourceCRS->IsSame(poTargetCRS))) + { - const bool bNorthUp{adfGeoTransform[5] < 0.0}; + const bool bNorthUp{psInfo->adfSrcGeoTransform[5] < 0.0}; - memcpy(padfGeoTransformOut, adfGeoTransform, - sizeof(double) * 6); + memcpy(padfGeoTransformOut, psInfo->adfSrcGeoTransform, + sizeof(double) * 6); - if (!bNorthUp) - { - padfGeoTransformOut[3] = - padfGeoTransformOut[3] + - nInYSize * padfGeoTransformOut[5]; - padfGeoTransformOut[5] = -padfGeoTransformOut[5]; - } + if (!bNorthUp) + { + padfGeoTransformOut[3] = padfGeoTransformOut[3] + + nInYSize * padfGeoTransformOut[5]; + padfGeoTransformOut[5] = -padfGeoTransformOut[5]; + } - *pnPixels = nInXSize; - *pnLines = nInYSize; + *pnPixels = nInXSize; + *pnLines = nInYSize; - // Calculate extent from hSrcDS - if (padfExtent) + // Calculate extent from hSrcDS + if (padfExtent) + { + padfExtent[0] = psInfo->adfSrcGeoTransform[0]; + padfExtent[1] = psInfo->adfSrcGeoTransform[3] + + nInYSize * psInfo->adfSrcGeoTransform[5]; + padfExtent[2] = psInfo->adfSrcGeoTransform[0] + + nInXSize * psInfo->adfSrcGeoTransform[1]; + padfExtent[3] = psInfo->adfSrcGeoTransform[3]; + if (!bNorthUp) { - padfExtent[0] = adfGeoTransform[0]; - padfExtent[1] = - adfGeoTransform[3] + nInYSize * adfGeoTransform[5]; - padfExtent[2] = - adfGeoTransform[0] + nInXSize * adfGeoTransform[1]; - padfExtent[3] = adfGeoTransform[3]; - if (!bNorthUp) - { - std::swap(padfExtent[1], padfExtent[3]); - } + std::swap(padfExtent[1], padfExtent[3]); } - return CE_None; } + return CE_None; } } } From fe9f10d609f122c635741c440898f34d76222b19 Mon Sep 17 00:00:00 2001 From: Theodore Tsirpanis <theodore.tsirpanis@tiledb.com> Date: Mon, 22 Apr 2024 20:10:51 +0300 Subject: [PATCH 147/230] TileDB: migrate away from deprecated APIs --- frmts/tiledb/tiledbdense.cpp | 33 ++++-- frmts/tiledb/tiledbmultidimarray.cpp | 10 +- frmts/tiledb/tiledbsparse.cpp | 155 ++++++++++++++++----------- 3 files changed, 121 insertions(+), 77 deletions(-) diff --git a/frmts/tiledb/tiledbdense.cpp b/frmts/tiledb/tiledbdense.cpp index b213e3ca2ab5..cb159630c509 100644 --- a/frmts/tiledb/tiledbdense.cpp +++ b/frmts/tiledb/tiledbdense.cpp @@ -374,12 +374,16 @@ CPLErr TileDBRasterBand::IRasterIO(GDALRWFlag eRWFlag, int nXOff, int nYOff, if (poGDS->m_array->schema().domain().ndim() == 3) { - poQuery->set_subarray(oaSubarray); + tiledb::Subarray subarray(*poGDS->m_roCtx, *poGDS->m_roArray); + subarray.set_subarray(oaSubarray); + poQuery->set_subarray(subarray); } else { - poQuery->set_subarray(std::vector<uint64_t>(oaSubarray.cbegin() + 2, + tiledb::Subarray subarray(*poGDS->m_roCtx, *poGDS->m_roArray); + subarray.set_subarray(std::vector<uint64_t>(oaSubarray.cbegin() + 2, oaSubarray.cend())); + poQuery->set_subarray(subarray); } SetBuffer(poQuery.get(), eDataType, osAttrName, pData, nXSize * nYSize); @@ -576,7 +580,9 @@ CPLErr TileDBRasterDataset::IRasterIO( if (poQuery != nullptr) { - poQuery->set_subarray(oaSubarray); + tiledb::Subarray subarray(*m_roCtx, *m_array); + subarray.set_subarray(oaSubarray); + poQuery->set_subarray(subarray); for (int b = 0; b < nBandCount; b++) { @@ -781,7 +787,8 @@ CPLErr TileDBRasterDataset::TrySaveXML() if (nTimestamp) { auto oMeta = std::unique_ptr<tiledb::Array>(new tiledb::Array( - *m_ctx, m_array->uri(), TILEDB_WRITE, nTimestamp)); + *m_ctx, m_array->uri(), TILEDB_WRITE, + tiledb::TemporalPolicy(tiledb::TimeTravel, nTimestamp))); oMeta->put_metadata(GDAL_ATTRIBUTE_NAME, TILEDB_UINT8, static_cast<int>(strlen(pszTree)), pszTree); oMeta->close(); @@ -1177,12 +1184,14 @@ GDALDataset *TileDBRasterDataset::Open(GDALOpenInfo *poOpenInfo) if (eMode == TILEDB_READ) { poDS->m_array.reset(new tiledb::Array( - *poDS->m_ctx, osArrayPath, TILEDB_READ, poDS->nTimestamp)); + *poDS->m_ctx, osArrayPath, TILEDB_READ, + tiledb::TemporalPolicy(tiledb::TimeTravel, poDS->nTimestamp))); } else { poDS->m_array.reset(new tiledb::Array( - *poDS->m_ctx, osArrayPath, TILEDB_WRITE, poDS->nTimestamp)); + *poDS->m_ctx, osArrayPath, TILEDB_WRITE, + tiledb::TemporalPolicy(tiledb::TimeTravel, poDS->nTimestamp))); } } else @@ -2081,9 +2090,10 @@ CPLErr TileDBRasterDataset::CopySubDatasets(GDALDataset *poSrcDS, if (poDstDS->nTimestamp) { - poDstDS->m_array.reset( - new tiledb::Array(*poDstDS->m_ctx, poDstDS->GetDescription(), - TILEDB_WRITE, poDstDS->nTimestamp)); + poDstDS->m_array.reset(new tiledb::Array( + *poDstDS->m_ctx, poDstDS->GetDescription(), TILEDB_WRITE, + tiledb::TemporalPolicy(tiledb::TimeTravel, + poDstDS->nTimestamp))); } else poDstDS->m_array.reset(new tiledb::Array( @@ -2196,8 +2206,9 @@ GDALDataset *TileDBRasterDataset::Create(const char *pszFilename, int nXSize, tiledb::Array::create(osArrayPath, *poDS->m_schema); if (poDS->nTimestamp) - poDS->m_array.reset(new tiledb::Array(*poDS->m_ctx, osArrayPath, - TILEDB_WRITE, poDS->nTimestamp)); + poDS->m_array.reset(new tiledb::Array( + *poDS->m_ctx, osArrayPath, TILEDB_WRITE, + tiledb::TemporalPolicy(tiledb::TimeTravel, poDS->nTimestamp))); else poDS->m_array.reset( new tiledb::Array(*poDS->m_ctx, osArrayPath, TILEDB_WRITE)); diff --git a/frmts/tiledb/tiledbmultidimarray.cpp b/frmts/tiledb/tiledbmultidimarray.cpp index dc5c57db1e08..09649188d551 100644 --- a/frmts/tiledb/tiledbmultidimarray.cpp +++ b/frmts/tiledb/tiledbmultidimarray.cpp @@ -924,7 +924,10 @@ bool TileDBArray::IRead(const GUInt64 *arrayStartIdx, const size_t *count, { tiledb::Query query(m_poSharedResource->GetCtx(), *(m_poTileDBArray.get())); - query.set_subarray(anSubArray); + tiledb::Subarray subarray(m_poSharedResource->GetCtx(), + *(m_poTileDBArray.get())); + subarray.set_subarray(anSubArray); + query.set_subarray(subarray); query.set_data_buffer(m_osAttrName, pDstBuffer, nBufferSize); if (m_bStats) @@ -995,7 +998,10 @@ bool TileDBArray::IWrite(const GUInt64 *arrayStartIdx, const size_t *count, { tiledb::Query query(m_poSharedResource->GetCtx(), *(m_poTileDBArray.get())); - query.set_subarray(anSubArray); + tiledb::Subarray subarray(m_poSharedResource->GetCtx(), + *(m_poTileDBArray.get())); + subarray.set_subarray(anSubArray); + query.set_subarray(subarray); query.set_data_buffer(m_osAttrName, const_cast<void *>(pSrcBuffer), nBufferSize); diff --git a/frmts/tiledb/tiledbsparse.cpp b/frmts/tiledb/tiledbsparse.cpp index eeea3f433b08..1d062d69d818 100644 --- a/frmts/tiledb/tiledbsparse.cpp +++ b/frmts/tiledb/tiledbsparse.cpp @@ -717,8 +717,9 @@ bool OGRTileDBLayer::InitFromStorage(tiledb::Context *poCtx, } if (m_nTimestamp) - m_array.reset( - new tiledb::Array(*m_ctx, m_osFilename, TILEDB_READ, m_nTimestamp)); + m_array.reset(new tiledb::Array( + *m_ctx, m_osFilename, TILEDB_READ, + tiledb::TemporalPolicy(tiledb::TimeTravel, m_nTimestamp))); else m_array.reset(new tiledb::Array(*m_ctx, m_osFilename, TILEDB_READ)); @@ -1238,7 +1239,7 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) m_anFIDs->resize(m_nBatchSize); if (!m_osFIDColumn.empty()) { - m_query->set_buffer(m_osFIDColumn, *(m_anFIDs)); + m_query->set_data_buffer(m_osFIDColumn, *(m_anFIDs)); } if (!m_poFeatureDefn->GetGeomFieldDefn(0)->IsIgnored()) @@ -1254,16 +1255,19 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) const auto colType = m_schema->attribute(pszGeomColName).type(); if (colType == TILEDB_UINT8) { - m_query->set_buffer(pszGeomColName, *m_anGeometryOffsets, - *m_abyGeometries); + m_query->set_data_buffer(pszGeomColName, *m_abyGeometries); + m_query->set_offsets_buffer(pszGeomColName, + *m_anGeometryOffsets); } else if (colType == TILEDB_BLOB) { - m_query->set_buffer( - pszGeomColName, m_anGeometryOffsets->data(), - m_anGeometryOffsets->size(), + m_query->set_data_buffer( + pszGeomColName, reinterpret_cast<std::byte *>(m_abyGeometries->data()), m_abyGeometries->size()); + m_query->set_offsets_buffer(pszGeomColName, + m_anGeometryOffsets->data(), + m_anGeometryOffsets->size()); } else { @@ -1273,15 +1277,15 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) else { m_adfXs->resize(m_nBatchSize); - m_query->set_buffer(m_osXDim, *m_adfXs); + m_query->set_data_buffer(m_osXDim, *m_adfXs); m_adfYs->resize(m_nBatchSize); - m_query->set_buffer(m_osYDim, *m_adfYs); + m_query->set_data_buffer(m_osYDim, *m_adfYs); if (!m_osZDim.empty()) { m_adfZs->resize(m_nBatchSize); - m_query->set_buffer(m_osZDim, *m_adfZs); + m_query->set_data_buffer(m_osZDim, *m_adfZs); } } } @@ -1313,9 +1317,9 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) std::get<std::shared_ptr<VECTOR_OF_BOOL>>(fieldValues)); v.resize(m_nBatchSize); #ifdef VECTOR_OF_BOOL_IS_NOT_UINT8_T - m_query->set_buffer(pszFieldName, v.data(), v.size()); + m_query->set_data_buffer(pszFieldName, v.data(), v.size()); #else - m_query->set_buffer(pszFieldName, v); + m_query->set_data_buffer(pszFieldName, v); #endif } else @@ -1325,21 +1329,21 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) auto &v = *(std::get<std::shared_ptr<std::vector<int16_t>>>( fieldValues)); v.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, v); + m_query->set_data_buffer(pszFieldName, v); } else if (m_aeFieldTypes[i] == TILEDB_INT32) { auto &v = *(std::get<std::shared_ptr<std::vector<int32_t>>>( fieldValues)); v.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, v); + m_query->set_data_buffer(pszFieldName, v); } else if (m_aeFieldTypes[i] == TILEDB_UINT8) { auto &v = *(std::get<std::shared_ptr<std::vector<uint8_t>>>( fieldValues)); v.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, v); + m_query->set_data_buffer(pszFieldName, v); } else if (m_aeFieldTypes[i] == TILEDB_UINT16) { @@ -1347,7 +1351,7 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) *(std::get<std::shared_ptr<std::vector<uint16_t>>>( fieldValues)); v.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, v); + m_query->set_data_buffer(pszFieldName, v); } else { @@ -1390,7 +1394,8 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) pszFieldName, m_anFieldValuesCapacity[i], nMulFactor)); m_anFieldValuesCapacity[i] = v.capacity(); anOffsets.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, anOffsets, v); + m_query->set_data_buffer(pszFieldName, v); + m_query->set_offsets_buffer(pszFieldName, anOffsets); } else if (m_aeFieldTypes[i] == TILEDB_INT32) { @@ -1400,7 +1405,8 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) pszFieldName, m_anFieldValuesCapacity[i], nMulFactor)); m_anFieldValuesCapacity[i] = v.capacity(); anOffsets.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, anOffsets, v); + m_query->set_data_buffer(pszFieldName, v); + m_query->set_offsets_buffer(pszFieldName, anOffsets); } else if (m_aeFieldTypes[i] == TILEDB_UINT8) { @@ -1410,7 +1416,8 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) pszFieldName, m_anFieldValuesCapacity[i], nMulFactor)); m_anFieldValuesCapacity[i] = v.capacity(); anOffsets.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, anOffsets, v); + m_query->set_data_buffer(pszFieldName, v); + m_query->set_offsets_buffer(pszFieldName, anOffsets); } else if (m_aeFieldTypes[i] == TILEDB_UINT16) { @@ -1421,7 +1428,8 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) pszFieldName, m_anFieldValuesCapacity[i], nMulFactor)); m_anFieldValuesCapacity[i] = v.capacity(); anOffsets.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, anOffsets, v); + m_query->set_data_buffer(pszFieldName, v); + m_query->set_offsets_buffer(pszFieldName, anOffsets); } else { @@ -1438,7 +1446,7 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) auto &v = *(std::get<std::shared_ptr<std::vector<int64_t>>>( fieldValues)); v.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, v); + m_query->set_data_buffer(pszFieldName, v); break; } @@ -1456,7 +1464,8 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) nMulFactor)); m_anFieldValuesCapacity[i] = v.capacity(); anOffsets.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, anOffsets, v); + m_query->set_data_buffer(pszFieldName, v); + m_query->set_offsets_buffer(pszFieldName, anOffsets); break; } @@ -1467,14 +1476,14 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) auto &v = *(std::get<std::shared_ptr<std::vector<float>>>( fieldValues)); v.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, v); + m_query->set_data_buffer(pszFieldName, v); } else { auto &v = *(std::get<std::shared_ptr<std::vector<double>>>( fieldValues)); v.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, v); + m_query->set_data_buffer(pszFieldName, v); } break; } @@ -1495,7 +1504,8 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) v.resize(GetValueSize( pszFieldName, m_anFieldValuesCapacity[i], nMulFactor)); m_anFieldValuesCapacity[i] = v.capacity(); - m_query->set_buffer(pszFieldName, anOffsets, v); + m_query->set_data_buffer(pszFieldName, v); + m_query->set_offsets_buffer(pszFieldName, anOffsets); } else { @@ -1504,7 +1514,8 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) v.resize(GetValueSize( pszFieldName, m_anFieldValuesCapacity[i], nMulFactor)); m_anFieldValuesCapacity[i] = v.capacity(); - m_query->set_buffer(pszFieldName, anOffsets, v); + m_query->set_data_buffer(pszFieldName, v); + m_query->set_offsets_buffer(pszFieldName, anOffsets); } break; } @@ -1520,7 +1531,8 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) : 8)); m_anFieldValuesCapacity[i] = v.capacity(); anOffsets.resize(m_nBatchSize); - m_query->set_buffer(pszFieldName, anOffsets, v); + m_query->set_data_buffer(pszFieldName, v); + m_query->set_offsets_buffer(pszFieldName, anOffsets); break; } @@ -1538,13 +1550,16 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) anOffsets.resize(m_nBatchSize); if (eType == TILEDB_UINT8) { - m_query->set_buffer(pszFieldName, anOffsets, v); + m_query->set_data_buffer(pszFieldName, v); + m_query->set_offsets_buffer(pszFieldName, anOffsets); } else if (eType == TILEDB_BLOB) { - m_query->set_buffer( - pszFieldName, anOffsets.data(), anOffsets.size(), - reinterpret_cast<std::byte *>(v.data()), v.size()); + m_query->set_data_buffer( + pszFieldName, reinterpret_cast<std::byte *>(v.data()), + v.size()); + m_query->set_offsets_buffer(pszFieldName, anOffsets.data(), + anOffsets.size()); } else { @@ -2193,8 +2208,9 @@ void OGRTileDBLayer::SwitchToReadingMode() try { if (m_nTimestamp) - m_array.reset(new tiledb::Array(*m_ctx, m_osFilename, - TILEDB_READ, m_nTimestamp)); + m_array.reset(new tiledb::Array( + *m_ctx, m_osFilename, TILEDB_READ, + tiledb::TemporalPolicy(tiledb::TimeTravel, m_nTimestamp))); else m_array.reset( new tiledb::Array(*m_ctx, m_osFilename, TILEDB_READ)); @@ -3747,8 +3763,9 @@ void OGRTileDBLayer::InitializeSchemaAndArray() #endif if (m_nTimestamp) - m_array.reset(new tiledb::Array(*m_ctx, m_osFilename, TILEDB_WRITE, - m_nTimestamp)); + m_array.reset(new tiledb::Array( + *m_ctx, m_osFilename, TILEDB_WRITE, + tiledb::TemporalPolicy(tiledb::TimeTravel, m_nTimestamp))); else m_array.reset( new tiledb::Array(*m_ctx, m_osFilename, TILEDB_WRITE)); @@ -3864,8 +3881,9 @@ void OGRTileDBLayer::SwitchToWritingMode() try { if (m_nTimestamp) - m_array.reset(new tiledb::Array(*m_ctx, m_osFilename, - TILEDB_WRITE, m_nTimestamp)); + m_array.reset(new tiledb::Array( + *m_ctx, m_osFilename, TILEDB_WRITE, + tiledb::TemporalPolicy(tiledb::TimeTravel, m_nTimestamp))); else m_array.reset( new tiledb::Array(*m_ctx, m_osFilename, TILEDB_WRITE)); @@ -4275,11 +4293,11 @@ void OGRTileDBLayer::FlushArrays() tiledb::Query query(*m_ctx, *m_array); query.set_layout(TILEDB_UNORDERED); if (!m_osFIDColumn.empty()) - query.set_buffer(m_osFIDColumn, *m_anFIDs); - query.set_buffer(m_osXDim, *m_adfXs); - query.set_buffer(m_osYDim, *m_adfYs); + query.set_data_buffer(m_osFIDColumn, *m_anFIDs); + query.set_data_buffer(m_osXDim, *m_adfXs); + query.set_data_buffer(m_osYDim, *m_adfYs); if (!m_osZDim.empty()) - query.set_buffer(m_osZDim, *m_adfZs); + query.set_data_buffer(m_osZDim, *m_adfZs); const char *pszGeomColName = GetDatabaseGeomColName(); if (pszGeomColName) @@ -4287,16 +4305,18 @@ void OGRTileDBLayer::FlushArrays() m_anGeometryOffsets->pop_back(); if (m_schema->attribute(pszGeomColName).type() == TILEDB_UINT8) { - query.set_buffer(pszGeomColName, *m_anGeometryOffsets, - *m_abyGeometries); + query.set_data_buffer(pszGeomColName, *m_abyGeometries); + query.set_offsets_buffer(pszGeomColName, *m_anGeometryOffsets); } else if (m_schema->attribute(pszGeomColName).type() == TILEDB_BLOB) { - query.set_buffer( - pszGeomColName, m_anGeometryOffsets->data(), - m_anGeometryOffsets->size(), + query.set_data_buffer( + pszGeomColName, reinterpret_cast<std::byte *>(m_abyGeometries->data()), m_abyGeometries->size()); + query.set_offsets_buffer(pszGeomColName, + m_anGeometryOffsets->data(), + m_anGeometryOffsets->size()); } else { @@ -4332,9 +4352,9 @@ void OGRTileDBLayer::FlushArrays() auto &v = *(std::get<std::shared_ptr<VECTOR_OF_BOOL>>( fieldValues)); #ifdef VECTOR_OF_BOOL_IS_NOT_UINT8_T - query.set_buffer(pszFieldName, v.data(), v.size()); + query.set_data_buffer(pszFieldName, v.data(), v.size()); #else - query.set_buffer(pszFieldName, v); + query.set_data_buffer(pszFieldName, v); #endif } else @@ -4372,7 +4392,7 @@ void OGRTileDBLayer::FlushArrays() case OFTInteger64: { - query.set_buffer( + query.set_data_buffer( pszFieldName, *std::get<std::shared_ptr<std::vector<int64_t>>>( fieldValues)); @@ -4382,10 +4402,11 @@ void OGRTileDBLayer::FlushArrays() case OFTInteger64List: { anOffsets.pop_back(); - query.set_buffer( - pszFieldName, anOffsets, + query.set_data_buffer( + pszFieldName, *std::get<std::shared_ptr<std::vector<int64_t>>>( fieldValues)); + query.set_offsets_buffer(pszFieldName, anOffsets); break; } @@ -4393,14 +4414,14 @@ void OGRTileDBLayer::FlushArrays() { if (poFieldDefn->GetSubType() == OFSTFloat32) { - query.set_buffer( + query.set_data_buffer( pszFieldName, *std::get<std::shared_ptr<std::vector<float>>>( fieldValues)); } else { - query.set_buffer( + query.set_data_buffer( pszFieldName, *std::get<std::shared_ptr<std::vector<double>>>( fieldValues)); @@ -4413,17 +4434,19 @@ void OGRTileDBLayer::FlushArrays() anOffsets.pop_back(); if (poFieldDefn->GetSubType() == OFSTFloat32) { - query.set_buffer( - pszFieldName, anOffsets, + query.set_data_buffer( + pszFieldName, *std::get<std::shared_ptr<std::vector<float>>>( fieldValues)); + query.set_offsets_buffer(pszFieldName, anOffsets); } else { - query.set_buffer( - pszFieldName, anOffsets, + query.set_data_buffer( + pszFieldName, *std::get<std::shared_ptr<std::vector<double>>>( fieldValues)); + query.set_offsets_buffer(pszFieldName, anOffsets); } break; } @@ -4431,9 +4454,10 @@ void OGRTileDBLayer::FlushArrays() case OFTString: { anOffsets.pop_back(); - query.set_buffer( - pszFieldName, anOffsets, + query.set_data_buffer( + pszFieldName, *std::get<std::shared_ptr<std::string>>(fieldValues)); + query.set_offsets_buffer(pszFieldName, anOffsets); break; } @@ -4444,13 +4468,16 @@ void OGRTileDBLayer::FlushArrays() fieldValues)); if (m_aeFieldTypes[i] == TILEDB_UINT8) { - query.set_buffer(pszFieldName, anOffsets, v); + query.set_data_buffer(pszFieldName, v); + query.set_offsets_buffer(pszFieldName, anOffsets); } else if (m_aeFieldTypes[i] == TILEDB_BLOB) { - query.set_buffer( - pszFieldName, anOffsets.data(), anOffsets.size(), + query.set_data_buffer( + pszFieldName, reinterpret_cast<std::byte *>(v.data()), v.size()); + query.set_offsets_buffer(pszFieldName, anOffsets.data(), + anOffsets.size()); } else { @@ -4463,7 +4490,7 @@ void OGRTileDBLayer::FlushArrays() case OFTDateTime: case OFTTime: { - query.set_buffer( + query.set_data_buffer( pszFieldName, *std::get<std::shared_ptr<std::vector<int64_t>>>( fieldValues)); From dad01e672a8956bc95c33d72762d4a63183ad6ec Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 21:54:09 +0200 Subject: [PATCH 148/230] TileDB: fix crash in previous commit --- frmts/tiledb/tiledbdense.cpp | 88 +++++++++++++++--------------------- 1 file changed, 37 insertions(+), 51 deletions(-) diff --git a/frmts/tiledb/tiledbdense.cpp b/frmts/tiledb/tiledbdense.cpp index cb159630c509..3090bc670e77 100644 --- a/frmts/tiledb/tiledbdense.cpp +++ b/frmts/tiledb/tiledbdense.cpp @@ -351,7 +351,6 @@ CPLErr TileDBRasterBand::IRasterIO(GDALRWFlag eRWFlag, int nXOff, int nYOff, nBufferDTSize > 0 && (nPixelSpace % nBufferDTSize) == 0 && (nLineSpace % nBufferDTSize) == 0) { - std::unique_ptr<tiledb::Query> poQuery; const uint64_t nBandIdx = poGDS->nBandStart + nBand - 1; std::vector<uint64_t> oaSubarray = { nBandIdx, nBandIdx, @@ -361,30 +360,25 @@ CPLErr TileDBRasterBand::IRasterIO(GDALRWFlag eRWFlag, int nXOff, int nYOff, std::rotate(oaSubarray.begin(), oaSubarray.begin() + 2, oaSubarray.end()); - if ((eRWFlag == GF_Read) && - ((eAccess == GA_Update) && (poGDS->m_roArray))) - { - poQuery.reset( - new tiledb::Query(*poGDS->m_roCtx, *poGDS->m_roArray)); - } - else - { - poQuery.reset(new tiledb::Query(*poGDS->m_ctx, *poGDS->m_array)); - } + const bool bUseReadOnlyObjs = + ((eRWFlag == GF_Read) && (eAccess == GA_Update) && + (poGDS->m_roArray)); + const auto &oCtxt = bUseReadOnlyObjs ? *poGDS->m_roCtx : *poGDS->m_ctx; + const auto &oArray = + bUseReadOnlyObjs ? *poGDS->m_roArray : *poGDS->m_array; + auto poQuery = std::make_unique<tiledb::Query>(oCtxt, oArray); + tiledb::Subarray subarray(oCtxt, oArray); if (poGDS->m_array->schema().domain().ndim() == 3) { - tiledb::Subarray subarray(*poGDS->m_roCtx, *poGDS->m_roArray); subarray.set_subarray(oaSubarray); - poQuery->set_subarray(subarray); } else { - tiledb::Subarray subarray(*poGDS->m_roCtx, *poGDS->m_roArray); subarray.set_subarray(std::vector<uint64_t>(oaSubarray.cbegin() + 2, oaSubarray.cend())); - poQuery->set_subarray(subarray); } + poQuery->set_subarray(subarray); SetBuffer(poQuery.get(), eDataType, osAttrName, pData, nXSize * nYSize); @@ -564,52 +558,44 @@ CPLErr TileDBRasterDataset::IRasterIO( nBufferDTSize > 0 && (nPixelSpace % nBufferDTSize) == 0 && (nLineSpace % nBufferDTSize) == 0) { - std::unique_ptr<tiledb::Query> poQuery; std::vector<uint64_t> oaSubarray = { (uint64_t)nYOff, (uint64_t)nYOff + nYSize - 1, (uint64_t)nXOff, (uint64_t)nXOff + nXSize - 1}; - if ((eRWFlag == GF_Read) && (eAccess == GA_Update && m_roArray)) - { - poQuery.reset(new tiledb::Query(*m_roCtx, *m_roArray)); - } - else - { - poQuery.reset(new tiledb::Query(*m_ctx, *m_array)); - } - - if (poQuery != nullptr) - { - tiledb::Subarray subarray(*m_roCtx, *m_array); - subarray.set_subarray(oaSubarray); - poQuery->set_subarray(subarray); + const bool bUseReadOnlyObjs = + ((eRWFlag == GF_Read) && (eAccess == GA_Update) && (m_roArray)); + const auto &oCtxt = bUseReadOnlyObjs ? *m_roCtx : *m_ctx; + const auto &oArray = bUseReadOnlyObjs ? *m_roArray : *m_array; - for (int b = 0; b < nBandCount; b++) - { - TileDBRasterBand *poBand = - (TileDBRasterBand *)GetRasterBand(panBandMap[b]); - int nRegionSize = nBufXSize * nBufYSize * nBufferDTSize; - SetBuffer(poQuery.get(), eDataType, poBand->osAttrName, - ((GByte *)pData) + b * nRegionSize, nRegionSize); - } + auto poQuery = std::make_unique<tiledb::Query>(oCtxt, oArray); + tiledb::Subarray subarray(oCtxt, oArray); + subarray.set_subarray(oaSubarray); + poQuery->set_subarray(subarray); - if (bStats) - tiledb::Stats::enable(); + for (int b = 0; b < nBandCount; b++) + { + TileDBRasterBand *poBand = + (TileDBRasterBand *)GetRasterBand(panBandMap[b]); + int nRegionSize = nBufXSize * nBufYSize * nBufferDTSize; + SetBuffer(poQuery.get(), eDataType, poBand->osAttrName, + ((GByte *)pData) + b * nRegionSize, nRegionSize); + } - auto status = poQuery->submit(); + if (bStats) + tiledb::Stats::enable(); - if (bStats) - { - tiledb::Stats::dump(stdout); - tiledb::Stats::disable(); - } + auto status = poQuery->submit(); - if (status == tiledb::Query::Status::FAILED) - return CE_Failure; - else - return CE_None; + if (bStats) + { + tiledb::Stats::dump(stdout); + tiledb::Stats::disable(); } - return CE_Failure; + + if (status == tiledb::Query::Status::FAILED) + return CE_Failure; + else + return CE_None; } return GDALPamDataset::IRasterIO(eRWFlag, nXOff, nYOff, nXSize, nYSize, From 462b147ebb624aa4b810e06677f4f3e7bf99d590 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 22:29:17 +0200 Subject: [PATCH 149/230] gdalurlopen(): handle http.client.RemoteDisconnected exception should avoid failures such as https://github.com/OSGeo/gdal/actions/runs/8788601487/job/24116441947?pr=9724 --- autotest/pymod/gdaltest.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/autotest/pymod/gdaltest.py b/autotest/pymod/gdaltest.py index 509e7f87f52a..c307165e983a 100755 --- a/autotest/pymod/gdaltest.py +++ b/autotest/pymod/gdaltest.py @@ -1912,6 +1912,8 @@ def gdalurlopen(url, timeout=10): urllib.request.install_opener(opener) + import http.client + try: handle = urllib.request.urlopen(url) socket.setdefaulttimeout(old_timeout) @@ -1932,6 +1934,10 @@ def gdalurlopen(url, timeout=10): print(f"HTTP service for {url} timed out") socket.setdefaulttimeout(old_timeout) return None + except http.client.RemoteDisconnected as e: + print(f"HTTP service for {url} is not available: RemoteDisconnected : {e}") + socket.setdefaulttimeout(old_timeout) + return None def runexternal( From fba559b5bd8d33aac215681df4f6a613517a6c43 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 23:50:26 +0200 Subject: [PATCH 150/230] CMake: add missing dependency to generate_gdal_version_h target when building {plugin}_core targets --- cmake/helpers/GdalDriverHelper.cmake | 1 + 1 file changed, 1 insertion(+) diff --git a/cmake/helpers/GdalDriverHelper.cmake b/cmake/helpers/GdalDriverHelper.cmake index e956c5f171fa..49ff81093abb 100644 --- a/cmake/helpers/GdalDriverHelper.cmake +++ b/cmake/helpers/GdalDriverHelper.cmake @@ -84,6 +84,7 @@ function(_set_driver_core_sources _KEY _DRIVER_TARGET) target_compile_definitions(${_DRIVER_TARGET}_core PRIVATE "-DPLUGIN_INSTALLATION_MESSAGE=\"${${_var_PLUGIN_INSTALLATION_MESSAGE}}\"") endif() gdal_standard_includes(${_DRIVER_TARGET}_core) + add_dependencies(${_DRIVER_TARGET}_core generate_gdal_version_h) target_compile_definitions(gdal_frmts PRIVATE -DDEFERRED_${_KEY}_DRIVER) From 317929b3df26d06051c658affdee14067ab5caff Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 00:52:17 +0200 Subject: [PATCH 151/230] Miramon: avoid harmless unsigned integer overflow (https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=68240) --- ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp index 8ca8c95d2cbe..f798a561bc4d 100644 --- a/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp +++ b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp @@ -1633,8 +1633,9 @@ OGRErr OGRMiraMonLayer::ICreateFeature(OGRFeature *poFeature) if (poGeom == nullptr) { eErr = LOG_ACTION(MMProcessGeometry(nullptr, poFeature, TRUE)); - if (phMiraMonLayer->bIsDBF) - poFeature->SetFID(phMiraMonLayer->TopHeader.nElemCount - 1); + if (phMiraMonLayer->bIsDBF && phMiraMonLayer->TopHeader.nElemCount > 0) + poFeature->SetFID((GIntBig)phMiraMonLayer->TopHeader.nElemCount - + 1); return eErr; } From da8eae1da34037dde6bd6c406db40db8679cff0a Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 01:05:35 +0200 Subject: [PATCH 152/230] Miramon: avoid unsigned integer overflow (https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=68241) --- ogr/ogrsf_frmts/miramon/mm_rdlayr.c | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ogr/ogrsf_frmts/miramon/mm_rdlayr.c b/ogr/ogrsf_frmts/miramon/mm_rdlayr.c index 4a8a2ec09126..d52f21c0c0d7 100644 --- a/ogr/ogrsf_frmts/miramon/mm_rdlayr.c +++ b/ogr/ogrsf_frmts/miramon/mm_rdlayr.c @@ -327,7 +327,9 @@ MMAddStringLineCoordinates(struct MiraMonVectLayerInfo *hMiraMonLayer, } } hMiraMonLayer->ReadFeature.nNumpCoord = - pArcHeader[i_elem].nElemCount - (bAvoidFirst ? 1 : 0); + pArcHeader[i_elem].nElemCount == 0 + ? 0 + : pArcHeader[i_elem].nElemCount - (bAvoidFirst ? 1 : 0); return 0; } From ceb89017288b2b75a6db00c66f9a4f83c4e8cd1c Mon Sep 17 00:00:00 2001 From: Kai Pastor <dg0yt@darc.de> Date: Tue, 23 Apr 2024 08:50:54 +0200 Subject: [PATCH 153/230] Fix generated find_dependency The version number must be the second argument. --- cmake/helpers/CheckDependentLibraries.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/helpers/CheckDependentLibraries.cmake b/cmake/helpers/CheckDependentLibraries.cmake index 6eeb5d880066..5d19b4759ffe 100644 --- a/cmake/helpers/CheckDependentLibraries.cmake +++ b/cmake/helpers/CheckDependentLibraries.cmake @@ -142,7 +142,7 @@ macro (gdal_check_package name purpose) gdal_check_package_target(${name} ${GDAL_CHECK_PACKAGE_${name}_TARGETS} REQUIRED) if (${name}_FOUND) get_filename_component(_find_dependency_args "${${name}_CONFIG}" NAME) - string(REPLACE ";" " " _find_dependency_args "${name} NAMES ${GDAL_CHECK_PACKAGE_${name}_NAMES} CONFIGS ${_find_dependency_args} ${_find_package_args}") + string(REPLACE ";" " " _find_dependency_args "${name} ${_find_package_args} NAMES ${GDAL_CHECK_PACKAGE_${name}_NAMES} CONFIGS ${_find_dependency_args}") endif () endif () if (NOT ${name}_FOUND) From 130560b919ef774d204f702fde533fe896eb4e0f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 22 Apr 2024 21:54:46 +0200 Subject: [PATCH 154/230] TileDB: remove TILEDB_DEPRECATED suppression --- frmts/tiledb/CMakeLists.txt | 1 - frmts/tiledb/include_tiledb.h | 9 --------- frmts/tiledb/tiledbdense.cpp | 10 ---------- 3 files changed, 20 deletions(-) diff --git a/frmts/tiledb/CMakeLists.txt b/frmts/tiledb/CMakeLists.txt index 2b68b39f7ef7..86fc626591c6 100644 --- a/frmts/tiledb/CMakeLists.txt +++ b/frmts/tiledb/CMakeLists.txt @@ -25,5 +25,4 @@ endif() gdal_standard_includes(gdal_TileDB) target_include_directories(gdal_TileDB PRIVATE ${GDAL_RASTER_FORMAT_SOURCE_DIR}/mem) -target_compile_definitions(gdal_TileDB PRIVATE -DTILEDB_DEPRECATED=) gdal_target_link_libraries(gdal_TileDB PRIVATE TileDB::tiledb_shared) diff --git a/frmts/tiledb/include_tiledb.h b/frmts/tiledb/include_tiledb.h index f721cb3c59cb..cec9844846ac 100644 --- a/frmts/tiledb/include_tiledb.h +++ b/frmts/tiledb/include_tiledb.h @@ -35,11 +35,6 @@ #pragma GCC system_header #endif -#ifdef _MSC_VER -#pragma warning(push) -#pragma warning(disable : 4996) /* XXXX was deprecated */ -#endif - #ifdef INCLUDE_ONLY_TILEDB_VERSION #include "tiledb/tiledb_version.h" #else @@ -47,10 +42,6 @@ #include "tiledb/tiledb_experimental" #endif -#ifdef _MSC_VER -#pragma warning(pop) -#endif - #if TILEDB_VERSION_MAJOR > 2 || \ (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 9) #define HAS_TILEDB_GROUP diff --git a/frmts/tiledb/tiledbdense.cpp b/frmts/tiledb/tiledbdense.cpp index 3090bc670e77..e431faf60947 100644 --- a/frmts/tiledb/tiledbdense.cpp +++ b/frmts/tiledb/tiledbdense.cpp @@ -31,12 +31,6 @@ #include "tiledbheaders.h" -#ifdef _MSC_VER -#pragma warning(push) -// 'tiledb::Array::Array': was declared deprecated -#pragma warning(disable : 4996) /* XXXX was deprecated */ -#endif - constexpr const char *RASTER_DATASET_TYPE = "raster"; /************************************************************************/ @@ -2412,7 +2406,3 @@ GDALDataset *TileDBRasterDataset::CreateCopy(const char *pszFilename, } return nullptr; } - -#ifdef _MSC_VER -#pragma warning(pop) -#endif From 25b28f1e707f13e845c81ffb4a6233d452f9cfe8 Mon Sep 17 00:00:00 2001 From: AbelPau <a.pau@creaf.uab.cat> Date: Tue, 23 Apr 2024 15:05:10 +0200 Subject: [PATCH 155/230] Miramon: fixes and additional tests MiraMonVector: fix wrong use of number of records in string lists for field values MiraMonVector: fix wrong word Lower instead of Lowest in metadata OpenOptionList MiraMonVector: add new writing tests and improves the existing ones --- .../Arcs/SimpleArcs/SimpleArcFileA.dbf | Bin 789 -> 789 bytes .../miramon/CorruptedFiles/NoDBF/NoDBF.pnt | Bin 0 -> 96 bytes .../miramon/CorruptedFiles/NoDBF/NoDBFT.rel | 58 ++ .../miramon/CorruptedFiles/NoREL/NoREL.pnt | Bin 0 -> 96 bytes .../miramon/CorruptedFiles/NoREL/NoRELT.dbf | Bin 0 -> 254 bytes .../CorruptedFiles/ShortFile/ShortFile.pnt | 1 + .../CorruptedFiles/ShortFile/ShortFileT.dbf | Bin 0 -> 97 bytes .../CorruptedFiles/ShortFile/ShortFileT.rel | 23 + .../CorruptedFiles/WrongDBF/WrongDBF.pnt | 1 + .../CorruptedFiles/WrongDBF/WrongDBFT.DBF | Bin 0 -> 15 bytes .../CorruptedFiles/WrongDBF/WrongDBFT.rel | 23 + .../WrongVersion/WrongVersion.pnt | Bin 0 -> 128 bytes .../WrongVersion/WrongVersionT.dbf | Bin 0 -> 97 bytes .../WrongVersion/WrongVersionT.rel | 23 + .../miramon/Points/3dpoints/Some3dPoints.pnt | Bin 1568 -> 1728 bytes .../miramon/Points/3dpoints/Some3dPointsT.dbf | Bin 2710 -> 2785 bytes .../miramon/Points/3dpoints/Some3dPointsT.rel | 1 + .../Polygons/Multipolygons/Multipolygons.arc | Bin 0 -> 1168 bytes .../Polygons/Multipolygons/Multipolygons.nod | Bin 0 -> 112 bytes .../Polygons/Multipolygons/Multipolygons.pol | Bin 0 -> 256 bytes .../Polygons/Multipolygons/MultipolygonsA.dbf | Bin 0 -> 733 bytes .../Polygons/Multipolygons/MultipolygonsA.rel | 92 +++ .../Polygons/Multipolygons/MultipolygonsN.dbf | Bin 0 -> 305 bytes .../Polygons/Multipolygons/MultipolygonsN.rel | 68 ++ .../Polygons/Multipolygons/MultipolygonsP.dbf | Bin 0 -> 1055 bytes .../Polygons/Multipolygons/MultipolygonsP.rel | 108 ++++ autotest/ogr/ogr_miramon_vector.py | 608 +++++++++++++++++- ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp | 2 +- ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp | 65 +- 29 files changed, 1039 insertions(+), 34 deletions(-) create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoDBF/NoDBF.pnt create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoDBF/NoDBFT.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoREL/NoREL.pnt create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoREL/NoRELT.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFile.pnt create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFileT.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFileT.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBF.pnt create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBFT.DBF create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBFT.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersion.pnt create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersionT.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersionT.rel create mode 100644 autotest/ogr/data/miramon/Polygons/Multipolygons/Multipolygons.arc create mode 100644 autotest/ogr/data/miramon/Polygons/Multipolygons/Multipolygons.nod create mode 100644 autotest/ogr/data/miramon/Polygons/Multipolygons/Multipolygons.pol create mode 100644 autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsA.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsA.rel create mode 100644 autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsN.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsN.rel create mode 100644 autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsP.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsP.rel diff --git a/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileA.dbf b/autotest/ogr/data/miramon/Arcs/SimpleArcs/SimpleArcFileA.dbf index cb3e15a7209bbbe5b85fd7c7b68ae77740320221..3f8b628456601b93fa883c7a1b3f869e88524409 100644 GIT binary patch delta 22 UcmbQrHkEAyJ2T&vOA1H;07p{=B>(^b delta 22 ScmbQrHkEAyJ2RgG3IG5!aRa{q diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoDBF/NoDBF.pnt b/autotest/ogr/data/miramon/CorruptedFiles/NoDBF/NoDBF.pnt new file mode 100644 index 0000000000000000000000000000000000000000..f543cba80f3e4208d1212fbe4fdcdc6db387563e GIT binary patch literal 96 zcmWIW3sErCGh|xgZgJZrwbbG8>DU&wDNPQ!6DphwW7{02T$9KvY3p)eW`KbECRt0K Xb2q@nVftX|VfqE4bV9nOwmAR*uOuXR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoDBF/NoDBFT.rel b/autotest/ogr/data/miramon/CorruptedFiles/NoDBF/NoDBFT.rel new file mode 100644 index 000000000000..69a72d07ba46 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/NoDBF/NoDBFT.rel @@ -0,0 +1,58 @@ +[VERSIO] +Vers=4 +SubVers=3 +VersMetaDades=5 +SubVersMetaDades=0 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +simbolitzable=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern +descriptor_spa=Identificador Gráfico interno +descriptor_eng=Internal Graphic identifier + +[TAULA_PRINCIPAL:ATT1] +descriptor=Atributte1 + +[TAULA_PRINCIPAL:ATTRIBUTE_2] +descriptor=Atributte2 + +[METADADES] +language=cat,spa,eng +MDIdiom=cat,spa,eng +dateStamp=20230628 16344458+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=68ddf845-79e8-4791-bf7a-5459eb951a04_SimplePointsFile + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=68ddf845-79e8-4791-bf7a-5459eb951a04_SimplePointsFile +codeSpace= +DatasetTitle=Simple Points File + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels + +[EXTENT] +MinX=342.325404376834 +MaxX=594.503182156354 +MinY=715.680304471881 +MaxY=848.806850618409 +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16351606+0200 +ContentDate=20230629 12064184+0200 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoREL/NoREL.pnt b/autotest/ogr/data/miramon/CorruptedFiles/NoREL/NoREL.pnt new file mode 100644 index 0000000000000000000000000000000000000000..f543cba80f3e4208d1212fbe4fdcdc6db387563e GIT binary patch literal 96 zcmWIW3sErCGh|xgZgJZrwbbG8>DU&wDNPQ!6DphwW7{02T$9KvY3p)eW`KbECRt0K Xb2q@nVftX|VfqE4bV9nOwmAR*uOuXR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoREL/NoRELT.dbf b/autotest/ogr/data/miramon/CorruptedFiles/NoREL/NoRELT.dbf new file mode 100644 index 0000000000000000000000000000000000000000..57527da74671340fbf8db0f3f2debbbfe2da2835 GIT binary patch literal 254 zcmbPG%_hstz`)SMpavw-fCvMFr%SwhkfWQYGXsMkNR|stiDO8JAyf#c1PXXy3<d_E ze2}M8Xozb(145o3DBlRg+zbr72n9w8aA4pFXDTQ-A+b@IhR#S_7bG?c6UkT<aS#Up Dx6B-o literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFile.pnt b/autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFile.pnt new file mode 100644 index 000000000000..1bbd55b270c1 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFile.pnt @@ -0,0 +1 @@ +PNT .1•j@ÛÖ¦… \ No newline at end of file diff --git a/autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFileT.dbf b/autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFileT.dbf new file mode 100644 index 0000000000000000000000000000000000000000..82935df8b1a3be5791d5c7b1b20c1691da540a9c GIT binary patch literal 97 zcmZRsW*1=qf<y*!5QPeWGM+B+?m>=jp3V#mejrIMR22-42tH5=0|O6&4`%TK04Pxe Avj6}9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFileT.rel b/autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFileT.rel new file mode 100644 index 000000000000..329c866f3352 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/ShortFile/ShortFileT.rel @@ -0,0 +1,23 @@ +[VERSIO] +Vers=4 +SubVers=3 +VersMetaDades=5 +SubVersMetaDades=0 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern + + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=UTM-31N-ETRS89 + +[EXTENT] +MinX=2.9E+301 +MaxX=2.9E+301 +MinY=2.9E+301 +MaxY=2.9E+301 + diff --git a/autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBF.pnt b/autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBF.pnt new file mode 100644 index 000000000000..1bbd55b270c1 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBF.pnt @@ -0,0 +1 @@ +PNT .1•j@ÛÖ¦… \ No newline at end of file diff --git a/autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBFT.DBF b/autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBFT.DBF new file mode 100644 index 0000000000000000000000000000000000000000..9a6b764458005e24ab9ec23b1d52e2ecbbd70793 GIT binary patch literal 15 WcmZQzWMXDvWn<@Hu4WfuU;qFFFaW*) literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBFT.rel b/autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBFT.rel new file mode 100644 index 000000000000..329c866f3352 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/WrongDBF/WrongDBFT.rel @@ -0,0 +1,23 @@ +[VERSIO] +Vers=4 +SubVers=3 +VersMetaDades=5 +SubVersMetaDades=0 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern + + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=UTM-31N-ETRS89 + +[EXTENT] +MinX=2.9E+301 +MaxX=2.9E+301 +MinY=2.9E+301 +MaxY=2.9E+301 + diff --git a/autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersion.pnt b/autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersion.pnt new file mode 100644 index 0000000000000000000000000000000000000000..ef350fae97a8ed7fdfc87f0966e0ceaebbfdeb84 GIT binary patch literal 128 ucmWIW3sK<LGZdPd<#7Ajver6WGy@JWr<9??=85?~V(I@tPysXxrXB!9Pc!WR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersionT.dbf b/autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersionT.dbf new file mode 100644 index 0000000000000000000000000000000000000000..82935df8b1a3be5791d5c7b1b20c1691da540a9c GIT binary patch literal 97 zcmZRsW*1=qf<y*!5QPeWGM+B+?m>=jp3V#mejrIMR22-42tH5=0|O6&4`%TK04Pxe Avj6}9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersionT.rel b/autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersionT.rel new file mode 100644 index 000000000000..329c866f3352 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/WrongVersion/WrongVersionT.rel @@ -0,0 +1,23 @@ +[VERSIO] +Vers=4 +SubVers=3 +VersMetaDades=5 +SubVersMetaDades=0 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern + + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=UTM-31N-ETRS89 + +[EXTENT] +MinX=2.9E+301 +MaxX=2.9E+301 +MinY=2.9E+301 +MaxY=2.9E+301 + diff --git a/autotest/ogr/data/miramon/Points/3dpoints/Some3dPoints.pnt b/autotest/ogr/data/miramon/Points/3dpoints/Some3dPoints.pnt index ed3737140bc0e3cef4de2eadb0b3d2a9dfa0b116..392484110e7d59e2d24a1fca2bfe61e20aca214d 100644 GIT binary patch delta 639 zcmZ{iF-QVY7{|{$Jt7Jk%Il6Yx6UDGGpOJRK~AAtgIY`A66z2{Lj@8vrSL5c1%eGN zNo_ZZEe!>2wgg25K@lxdi(UVB`GrHh<#+FX|L?o+J+9JM?=RcF@LA-;H$JxWlH7l= zVyRMN<)oI3DSs`Iqo_J+RsxFBtmbEXmx_YvL;jdnRl_{}$2xSV8Y6Ysa-(IZP|K9H z7kE!X9W<};aD?7X1eh{#Lv1v_gFQ!eDN__oX`g9AGt75boWlvBmMQHgr8UD+bVV7e z&EIcii-IX_k4}49&>Db_;P@@HW=cJ*Q$`D_VVySpLDkmj4dF?I3mCGBFjI!7bP6FQ zbgBVyofd$^PlrXXgSCuOdD{+3nX)wN)tGr`J{eQ$b-MK%);6qnE9uVdfYR=IU)m}| zdrRbm$3#K2UDvJf`>y@LnzB3d82w@*<~p66joD;dfjxVs%*8voTGt=&nsjJ7@MLq2 O=eWAWn#=#r!~PqL#>mM4 delta 460 zcmZ{hFGvGn7{>44Cqo<<=6!yLx_T>Ov<$9oaW=zl8drqbWFQEKU~&g<G09*H7RPK$ zGG;N_W^_#u!Qjv&KJT%&2wwRe-|u-|zCYu9cO?z&;$7*>*4Ce|I%^jOaY7Ym(}c>h zroC2q`5=jeHu5`k>S(k3kB#Wo(Hm9RbTSX?=nA=8!#dTRg4xGUAG4<j2sv;_c~_ez z>;u}JMiL3xwx|qy0(<_pH`+p1$o7nmTpe7%t_>=)b~A~DY*R(ou5YfOOZCoVj?qHa zT`oH}iY`6h87lg5{a}RbG4|L)R>;}0qHUxe72N|*iW;78KW778vH`&>nnm6lnnG@V mWEJxbE$0%lw&>l{CbzV>n|N_IAlp@nJ)K*J<!7S4`uqjyUWr}+ diff --git a/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.dbf b/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.dbf index e55c35b2df32598a926fa8f6c1cfb903207164f7..38a3f648a69d816d4ebbd4d103b64be9a872e561 100644 GIT binary patch delta 39 mcmbOx`cRaWxrRkdVI%8AE)E4SFgBe0kWp^(2S$D<W(NSwkP9II delta 14 VcmaDTI!%<7xrSLtek1EdE&v{+1FHZ4 diff --git a/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.rel b/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.rel index 94ff8a8dfe67..c864f2cac511 100644 --- a/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.rel +++ b/autotest/ogr/data/miramon/Points/3dpoints/Some3dPointsT.rel @@ -41,6 +41,7 @@ toler_env=0 [OVERVIEW] CreationDate=20240318 15131943+0100 +ContentDate=20240422 21414321+0200 [TAULA_PRINCIPAL] IdGrafic=ID_GRAFIC diff --git a/autotest/ogr/data/miramon/Polygons/Multipolygons/Multipolygons.arc b/autotest/ogr/data/miramon/Polygons/Multipolygons/Multipolygons.arc new file mode 100644 index 0000000000000000000000000000000000000000..5752163a2af3312d2577983523ce3d88269b6454 GIT binary patch literal 1168 zcmah|e@K&26rD>UZKRc{3^fgsr6K-EQRei%_x5QTrIsP;Fj_g(!c3(^3n~<&&{0rJ zlcdxrDq9e022w^L{>TE+j6|2Aq-97gC6W5J`}z#~qp`=w<=yk{z2}@u_AtND;7|vB zVrGuPpoXN*_`W_xJPc-E9B%X?5Hh>Bzi&_rjxzlHFHb3^xjFD;@e}pHG^2!g7`St_ zVrGuQxo)3AO+JIxk><*o5dz;PPHxqmpx`X#n&mQ?4Qe|XbsrL@G_H`x!h9=(1n|7D zp|*BJfa+5o&#fU0J}E}peXE%r2diW<5o$j>TMg5WB?|bMKAvL;0PxqhIS(Hs&{Orr zVAxGysl%_X)<Qrb=D!wk;@^Ji!uIR81bJbd3;?12T1$d6g}F5;HC@&?cxq1kq0&;Y z&p#Op$;EM-DvK#}zV*&Bo{a_di+PPp9EJ3v*6*tK8qOaDrw-FmO-~GXgW_{4P*CQM zrL<pHV;>F--xGPiE0=F<`O4scO^3-iIOJa?lik)LUoKSAq5gPIYI`R|-c0;HP|JDL zB452)#CK~U?ecVt%RJ}J%p`qp1Z6iB3KZ}?Ch7fTF{QC^lptP3U^cj7sL_q$eJOzj zCsl&wC&74uaD6E9TR>6gH~hYo!n@NkR+WyxJ==WA6t+cYmAS7asDB1Uon8X0&&w@$ z_n`RRVZi#U=|6hlJQM;v%{pT~ueXjNK9a|x&rE3@;o*w}>&Gnn?T`O5lO~K^O9brm z`Y`Ozl|rQ3uj$k%2B9|Jr%`~Zn^{IVlYF|ezTKD60@hPA)a$%}{s?Et$HQgc%13+K ze47O*9e8Ca(=o}{_I}HG{8*6uoqeR{e!>|63Kjy7X^NP%4`UG#J1lyh|5tr`zncl- z`w8mZ2k74>g7cLE&i9DGrG{tYWhH?AZQ${C;Qk`g9$|d~w{|&qS#v?!C){hUS1y5K Z<&d~9T+b^!-bYZc9wP1IpFLdJ?{D_0)eisw literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/Multipolygons/Multipolygons.nod b/autotest/ogr/data/miramon/Polygons/Multipolygons/Multipolygons.nod new file mode 100644 index 0000000000000000000000000000000000000000..76dd01f956adec07460d8ad34b585716a5d6f32c GIT binary patch literal 112 zcmebCcTq6ZGh`I+7M?jX(!rtQglvIrm#f2p4W8dLmN+_057}jGZ0+p8!T<q`3``6G ZKqiQefYJ$2Is-_<03(#a1f`jwGyquE561ui literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/Multipolygons/Multipolygons.pol b/autotest/ogr/data/miramon/Polygons/Multipolygons/Multipolygons.pol new file mode 100644 index 0000000000000000000000000000000000000000..4040f9f4d979aad46d143a8c9293f323ecb8ad3d GIT binary patch literal 256 zcmWIW_fas^GxSXIj7m*Qvv%;>=XT}{kDJ4lxQW*nOE@|hybC^i=8B6069WP;LYc@k zj7`WK7NA~`Ss?lX5Kq(Mh@Qvb>o6hapYt!yumhy%e}SZbzLfr5l}VuvY(Oyvc93h> ULDn!agZMxIrkTJ(U`d!b04oSO`~Uy| literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsA.dbf b/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..74d23798fe8a8ca5b1e2b5be1bbee04ebc74bd9e GIT binary patch literal 733 zcmZ{g!3x4K42A=WH!q_1K0(Qnrfx@ZQwCCDig??L2tKQC;wHMTBlb}Ket+^e?d&mM z%>m#A&u*WrH9%dq$7Xk^_W+@HH<Oak-m2zW-7kOq(>Wa5UDLIH`xj}z&#!*|EcL^= ztlB!%Lw{<+$3OJ8KHt)43l&Iy44t!rm=i_l2n$BC%p74<TBcE<$zoSjMU<UlTcA)f z6L-Y1_4zib{2tUy6`IkTV$$?_5$|O@J~K(?tQR#@%~YY$elJT)y+ubEOB~NgPKe&- L-i_cOwM@frzIa*5 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsA.rel b/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsA.rel new file mode 100644 index 000000000000..f2a70de04e3a --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsA.rel @@ -0,0 +1,92 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240418 15564880+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=8d979929-160d-4fea-9333-4111bc4af930_MultipolygonsA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[IDENTIFICATION] +code=8d979929-160d-4fea-9333-4111bc4af930_MultipolygonsA +codeSpace= +DatasetTitle=Multipolygons [Plantill] + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=lat/long-WGS84 + +[EXTENT] +toler_env=0 +MinX=27.399999937575 +MaxX=44.099999937575 +MinY=34.192500062925 +MaxY=41.662500062925 + +[OVERVIEW] +CreationDate=20240418 15564879+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +visible=0 +descriptor=Longitud de l'arc (projecció) + +[TAULA_PRINCIPAL:LONG_ARCE] +unitats=m +descriptor=Longitud de l'arc (el·lipsoide) + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=Multipolygons.pol + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20240418 15564879+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampLongitudArcEllipsoidal=LONG_ARCE +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsN.dbf b/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..0baf50ab841870d687cbe3721df9bdb857851309 GIT binary patch literal 305 zcmZRsVG&|sU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 nXfR0H6(Y}wrk_^<8!*5oXlR5@RKXCNC=Rt)O~9en7`wRua@rya literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsN.rel b/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsN.rel new file mode 100644 index 000000000000..a1d8b2f52192 --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsN.rel @@ -0,0 +1,68 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240418 15564879+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=383b7088-9b01-4fbc-8260-a0334da9a15e_MultipolygonsN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[IDENTIFICATION] +code=383b7088-9b01-4fbc-8260-a0334da9a15e_MultipolygonsN +codeSpace= +DatasetTitle=Multipolygons [Plantill] [Plantill] + +[EXTENT] +toler_env=0 +MinX=27.399999937575 +MaxX=44.099999937575 +MinY=34.192500062925 +MaxY=41.662500062925 + +[OVERVIEW] +CreationDate=20240418 15564877+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20240418 15564878+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsP.dbf b/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..8d5f513c68a4fc8cbc6fef73b6d714914655dd59 GIT binary patch literal 1055 zcmchUyH3L}6oy?D5JIpZSeX0*wS12=rj4m0rD^4+h^;dc0|RWl8_&X<V8@rFrIk<z z&S3vOpU?4cd;EEJI|cxJkG|}<7k2<u<#uk0$7%*Z<LK4imS+2;O{<^RNBBycYN1=B z1HZr6ZIHP3{r7ocbP+1L^AB;`+r>fNYzs59<|6;?f3>Wu`B84K)^^nZ*e3wsIxOPU zY!*{(x>a}nc?4munpSeJJ8qwJC(7k!TKfxl?mlA>^>Xgs-bL+(ksAh47Hxc=(xop4 z%*h*=0!JleRBjduVayd{At?mmOlC+hfpMy_aH13<LgY3%Q7n@=Q!>jHK^bO*_B(lK z1YxgI>U2S3>52BCl87pIDG2{)fAR79{_+OX$zBl3xR9C3y@<B_OeqpVwG>m1IZ-`K bP5#|JjV8PZ_H*2cEW%{pe)>At&mVpO?$UnN literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsP.rel b/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsP.rel new file mode 100644 index 000000000000..4336a8b250d1 --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/Multipolygons/MultipolygonsP.rel @@ -0,0 +1,108 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240418 15565753+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=1a33d20b-d84d-4f3c-8bb8-02e773d488ed_MultipolygonsP + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[IDENTIFICATION] +code=1a33d20b-d84d-4f3c-8bb8-02e773d488ed_MultipolygonsP +codeSpace= +DatasetTitle=Multipolygons + +[OVERVIEW:ASPECTES_TECNICS] +ArcSource=Multipolygons.arc + +[EXTENT] +toler_env=0 +MinX=27.399999937575 +MaxX=44.099999937575 +MinY=34.192500062925 +MaxY=41.662500062925 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:PERIMETRE] +visible=0 +descriptor=Perímetre del polígon (projecció) + +[TAULA_PRINCIPAL:PERIMETREE] +unitats=m +descriptor=Perímetre del polígon (el·lipsoide) + +[TAULA_PRINCIPAL:AREA] +visible=0 +descriptor=Àrea del polígon (projecció) + +[TAULA_PRINCIPAL:AREAE] +unitats=m² +descriptor=Àrea del polígon (el·lipsoide) + +[TAULA_PRINCIPAL:N_ARCS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre d'arcs + +[TAULA_PRINCIPAL:N_POLIG] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de polígons elementals + +[TAULA_PRINCIPAL:NUMBER] +unitats= + +[TAULA_PRINCIPAL:INT64] +unitats= + +[TAULA_PRINCIPAL:DOUBLE] +unitats= + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20240418 15565753+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE] +processes=1 + +[OVERVIEW] +CreationDate=20240418 15565750+0200 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampPerimetreEllipsoidal=PERIMETREE +NomCampArea=AREA +NomCampAreaEllipsoidal=AREAE +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG diff --git a/autotest/ogr/ogr_miramon_vector.py b/autotest/ogr/ogr_miramon_vector.py index 98a3cad117e8..d58a52573002 100644 --- a/autotest/ogr/ogr_miramon_vector.py +++ b/autotest/ogr/ogr_miramon_vector.py @@ -37,8 +37,7 @@ # import ogrtest import pytest -# from osgeo import gdal, ogr, osr -from osgeo import gdal, ogr +from osgeo import gdal, ogr, osr pytestmark = pytest.mark.require_driver("MiraMonVector") @@ -91,7 +90,7 @@ def test_ogr_miramon_read_simple_point(): check_simple_point(ds) -def test_ogr_miramon_write_simple_pointV11(tmp_vsimem): +def test_ogr_miramon_write_simple_point_EmptyVersion(tmp_vsimem): out_filename = str(tmp_vsimem / "out.pnt") gdal.VectorTranslate( @@ -103,7 +102,20 @@ def test_ogr_miramon_write_simple_pointV11(tmp_vsimem): check_simple_point(ds) -def test_ogr_miramon_write_simple_pointV20(tmp_vsimem): +def test_ogr_miramon_write_simple_point_V11(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pnt") + gdal.VectorTranslate( + out_filename, + "data/miramon/Points/SimplePoints/SimplePointsFile.pnt", + format="MiraMonVector", + options="-lco Version=V1.1", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_point(ds) + + +def test_ogr_miramon_write_simple_point_V20(tmp_vsimem): out_filename = str(tmp_vsimem / "out.pnt") gdal.VectorTranslate( @@ -117,6 +129,20 @@ def test_ogr_miramon_write_simple_pointV20(tmp_vsimem): check_simple_point(ds) +def test_ogr_miramon_write_simple_point_last_version(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pnt") + gdal.VectorTranslate( + out_filename, + "data/miramon/Points/SimplePoints/SimplePointsFile.pnt", + format="MiraMonVector", + options="-lco Version=last_version", + ) + + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_point(ds) + + ############################################################################### # basic linestring test @@ -187,7 +213,7 @@ def check_simple_arc(ds): assert f.GetField("NODE_INI") == 6 assert f.GetField("NODE_FI") == 7 assert f.GetFieldAsString("ATT1") == "E" - assert f.GetFieldAsString("ATT2") == "F" + assert f.GetFieldAsString("ATT2") == "FÈÊ" def test_ogr_miramon_read_simple_arc(): @@ -197,20 +223,34 @@ def test_ogr_miramon_read_simple_arc(): check_simple_arc(ds) -def test_ogr_miramon_write_simple_arcV11(tmp_vsimem): +def test_ogr_miramon_write_simple_arc_EmptyVersion(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.arc") + gdal.VectorTranslate( + out_filename, + "data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc", + format="MiraMonVector", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_arc(ds) + del ds + + +def test_ogr_miramon_write_simple_arc_V11(tmp_vsimem): out_filename = str(tmp_vsimem / "out.arc") gdal.VectorTranslate( out_filename, "data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc", format="MiraMonVector", + options="-lco Version=V1.1", ) ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) check_simple_arc(ds) del ds -def test_ogr_miramon_write_simple_arcV20(tmp_vsimem): +def test_ogr_miramon_write_simple_arc_V20(tmp_vsimem): out_filename = str(tmp_vsimem / "out.arc") gdal.VectorTranslate( @@ -224,6 +264,20 @@ def test_ogr_miramon_write_simple_arcV20(tmp_vsimem): del ds +def test_ogr_miramon_write_simple_arc_last_version(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.arc") + gdal.VectorTranslate( + out_filename, + "data/miramon/Arcs/SimpleArcs/SimpleArcFile.arc", + format="MiraMonVector", + options="-lco Version=last_version", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_arc(ds) + del ds + + ############################################################################### # basic polygon test @@ -296,7 +350,7 @@ def test_ogr_miramon_read_simple_polygon(): check_simple_polygon(ds) -def test_ogr_miramon_write_simple_polygonV11(tmp_vsimem): +def test_ogr_miramon_write_simple_polygon_EmptyVersion(tmp_vsimem): out_filename = str(tmp_vsimem / "out.pol") gdal.VectorTranslate( @@ -308,7 +362,20 @@ def test_ogr_miramon_write_simple_polygonV11(tmp_vsimem): check_simple_polygon(ds) -def test_ogr_miramon_write_simple_polygonV20(tmp_vsimem): +def test_ogr_miramon_write_simple_polygon__V11(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pol") + gdal.VectorTranslate( + out_filename, + "data/miramon/Polygons/SimplePolygons/SimplePolFile.pol", + format="MiraMonVector", + options="-lco Version=V1.1", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_polygon(ds) + + +def test_ogr_miramon_write_simple_polygon_V20(tmp_vsimem): out_filename = str(tmp_vsimem / "out.pol") gdal.VectorTranslate( @@ -321,6 +388,70 @@ def test_ogr_miramon_write_simple_polygonV20(tmp_vsimem): check_simple_polygon(ds) +def test_ogr_miramon_write_simple_polygon_last_version(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pol") + gdal.VectorTranslate( + out_filename, + "data/miramon/Polygons/SimplePolygons/SimplePolFile.pol", + format="MiraMonVector", + options="-lco Version=last_version", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_simple_polygon(ds) + + +############################################################################### +# basic multipolygon test + + +def check_multi_polygon(ds): + + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 1 + assert lyr.GetGeomType() == ogr.wkbMultiPolygon + + # going to the first polygon + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 0 + assert ( + f.GetGeometryRef().ExportToWkt() + == "MULTIPOLYGON (((32.699999937575 36.072500062925,31.959999937575 36.532500062925,30.899999937575 36.902500062925,30.509999937575 36.492500062925,29.859999937575 36.192500062925,28.789999937575 36.502500062925,27.619999937575 38.012500062925,27.399999937575 39.872500062925,31.899999937575 41.312500062925,36.079999937575 41.662500062925,37.489999937575 41.182500062925,40.329999937575 40.932500062925,41.589999937575 41.562500062925,43.929999937575 39.382500062925,44.099999937575 36.542500062925,39.489999937575 34.192500062925,35.729999937575 34.312500062925,36.129999937575 34.942500062925,35.959999937575 35.942500062925,36.339999937575 36.862500062925,35.639999937575 36.942500062925,34.719999937575 36.622500062925,34.109999937575 36.702500062925,33.549999937575 36.172500062925,32.839999937575 36.062500062925,32.699999937575 36.072500062925),(42.449999937575 38.462500062925,43.079999937575 38.402500062925,43.389999937575 38.382500062925,43.289999937575 38.722500062925,43.699999937575 38.962500062925,43.449999937575 39.102500062925,43.009999937575 38.892500062925,42.339999937575 38.772500062925,42.449999937575 38.462500062925),(37.929999937575 36.832500062925,38.139999937575 36.422500062925,37.889999937575 35.962500062925,38.469999937575 35.702500062925,38.829999937575 35.982500062925,38.229999937575 36.122500062925,38.439999937575 36.662500062925,38.019999937575 36.932500062925,37.929999937575 36.832500062925)),((34.269999937575 35.602500062925,34.779999937575 35.762500062925,34.669999937575 35.582500062925,33.919999937575 35.172500062925,33.889999937575 34.812500062925,32.819999937575 34.612500062925,32.299999937575 34.892500062925,32.409999937575 35.182500062925,32.909999937575 35.242500062925,32.939999937575 35.412500062925,33.599999937575 35.282500062925,34.269999937575 35.602500062925)))" + ) + assert f.GetFieldAsString("ID_GRAFIC") == "(2:1,1)" + assert f.GetFieldAsString("N_VERTEXS") == "(2:56,56)" + assert f.GetFieldAsString("N_ARCS") == "(2:4,4)" + assert f.GetFieldAsString("N_POLIG") == "(2:4,4)" + assert f.GetFieldAsString("TEXT") == "(2:Multip 1,Multip 2)" + assert f.GetFieldAsString("NUMBER") == "(2:1,2)" + assert f.GetFieldAsString("DATA") == "2024/04/18" + + +def test_ogr_miramon_read_multi_polygon(): + + ds = gdal.OpenEx( + "data/miramon/Polygons/Multipolygons/Multipolygons.pol", gdal.OF_VECTOR + ) + assert ds is not None, "Failed to get dataset" + check_multi_polygon(ds) + + +def test_ogr_miramon_write_multi_polygon_EmptyVersion(tmp_vsimem): + + out_filename = str(tmp_vsimem / "out.pol") + gdal.VectorTranslate( + out_filename, + "data/miramon/Polygons/Multipolygons/Multipolygons.pol", + format="MiraMonVector", + ) + ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) + check_multi_polygon(ds) + + ############################################################################### # testing empty layers @@ -389,19 +520,21 @@ def check_3d_point(ds): assert lyr is not None, "Failed to get layer" - assert lyr.GetFeatureCount() == 31 + assert lyr.GetFeatureCount() == 32 assert lyr.GetGeomType() == ogr.wkbPoint25D f = lyr.GetNextFeature() assert f is not None, "Failed to get feature" assert f.GetFID() == 0 - assert f.GetGeometryRef().ExportToWkt() == "POINT (440551.66 4635315.3 619.96)" + assert ( + f.GetGeometryRef().ExportToWkt() == "POINT (440551.66 4635315.3 619.9599609375)" + ) g = f.GetGeometryRef() assert g is not None, "Failed to get geometry" assert g.GetCoordinateDimension() == 3 - assert g.GetZ() == 619.96 + assert g.GetZ() == 619.9599609375 f = lyr.GetFeature(30) assert f is not None, "Failed to get feature" @@ -417,6 +550,36 @@ def test_ogr_miramon_read_3d_point(tmp_vsimem): check_3d_point(ds) +@pytest.mark.parametrize( + "Height,expected_height", + [ + ("First", 250.0), + ("Lowest", 250.0), + ("Highest", 277.0), + ], +) +def test_ogr_miramon_read_multi_3d_point(Height, expected_height): + + ds = gdal.OpenEx( + "data/miramon/Points/3dpoints/Some3dPoints.pnt", + gdal.OF_VECTOR, + open_options=["Height=" + Height], + ) + + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 32 + assert lyr.GetGeomType() == ogr.wkbPoint25D + + f = lyr.GetFeature(31) + assert f is not None, "Failed to get feature" + g = f.GetGeometryRef() + assert g is not None, "Failed to get geometry" + assert g.GetZ() == expected_height + + def test_ogr_miramon_write_3d_point(tmp_vsimem): out_filename = str(tmp_vsimem / "out.pnt") @@ -567,6 +730,7 @@ def test_ogr_miramon_write_3d_pol(tmp_vsimem): "Polygons/SimplePolygons/SimplePolFile.pol", "Polygons/EmptyPolygons/Empty_POL.pol", "Polygons/3dPolygons/tin_3d.pol", + "Polygons/Multipolygons/Multipolygons.pol", ], ) def test_ogr_miramon_test_ogrsf(filename): @@ -675,3 +839,423 @@ def test_ogr_miramon_OpenLanguageArc(Language, expected_description): field_def = layer_def.GetFieldDefn(field_index) field_description = field_def.GetAlternativeNameRef() assert field_description == expected_description + + +############################################################################### +# unexisting file, file shorter than expected, wrong version, no sidecar files + + +@pytest.mark.parametrize( + "name,message", + [ + ( + "data/miramon/CorruptedFiles/ShortFile/ShortFile.pnt", + "not recognized as being in a supported file format", + ), + ( + "data/miramon/CorruptedFiles/WrongVersion/WrongVersion.pnt", + "not recognized as being in a supported file format", + ), + ( + "data/miramon/CorruptedFiles/WrongDBF/WrongDBF.pnt", + "not recognized as being in a supported file format", + ), + ( + "data/miramon/CorruptedFiles/NoDBF/NoDBF.pnt", + "Error reading the format in the DBF file", + ), + ("data/miramon/CorruptedFiles/NoREL/NoREL.pnt", "rel must exist."), + ], +) +def test_ogr_miramon_corrupted_files(name, message): + with pytest.raises(Exception, match=message): + gdal.OpenEx( + name, + gdal.OF_VECTOR, + ) + + +############################################################################### +# multiregister test + + +@pytest.mark.parametrize( + "expected_MultiRecordIndex,textField,expectedResult", + [ + ("0", "TEXT", "Multip 1"), + ("1", "TEXT", "Multip 2"), + ("Last", "TEXT", "Multip 2"), + ("JSON", "TEXT", "[Multip 1,Multip 2]"), + ("0", "NUMBER", "1"), + ("1", "NUMBER", "2"), + ("Last", "NUMBER", "2"), + ("JSON", "NUMBER", "[1,2]"), + ("0", "DOUBLE", "22.558"), + ("1", "DOUBLE", "22.000"), + ("Last", "DOUBLE", "22.000"), + ("JSON", "DOUBLE", "[22.558,22.000]"), + ], +) +def test_multiregister(expected_MultiRecordIndex, textField, expectedResult): + ds = gdal.OpenEx( + "data/miramon/Polygons/Multipolygons/Multipolygons.pol", + gdal.OF_VECTOR, + open_options=["MultiRecordIndex=" + expected_MultiRecordIndex], + ) + assert ds is not None, "Failed to get dataset" + + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + + assert lyr.GetFeatureCount() == 1 + assert lyr.GetGeomType() == ogr.wkbMultiPolygon + + # going to the first polygon + f = lyr.GetNextFeature() + assert f is not None, "Failed to get feature" + assert f.GetFID() == 0 + assert f.GetFieldAsString(textField) == expectedResult + + +############################################################################### +# basic writing test + + +def create_common_attributes(lyr): + lyr.CreateField(ogr.FieldDefn("strfield", ogr.OFTString)) + lyr.CreateField(ogr.FieldDefn("intfield", ogr.OFTInteger)) + lyr.CreateField(ogr.FieldDefn("int64field", ogr.OFTInteger64)) + lyr.CreateField(ogr.FieldDefn("doublefield", ogr.OFTReal)) + lyr.CreateField(ogr.FieldDefn("strlistfield", ogr.OFTStringList)) + lyr.CreateField(ogr.FieldDefn("intlistfield", ogr.OFTIntegerList)) + lyr.CreateField(ogr.FieldDefn("int64listfield", ogr.OFTInteger64List)) + lyr.CreateField(ogr.FieldDefn("doulistfield", ogr.OFTRealList)) + + +def assign_common_attributes(f): + f["strfield"] = "foo" + f["intfield"] = 123456789 + f["int64field"] = 12345678912345678 + f["doublefield"] = 1.5 + f["strlistfield"] = ["foo", "bar"] + f["intlistfield"] = [123456789] + f["int64listfield"] = [12345678912345678] + f["doulistfield"] = [1.5, 4.2] + + +def check_common_attributes(f): + assert f["strfield"] == ["foo", ""] + assert f["intfield"] == [123456789] + assert f["int64field"] == [12345678912345678] + assert f["doublefield"] == [1.5] + assert f["strlistfield"] == ["foo", "bar"] + assert f["intlistfield"] == [123456789] + assert f["int64listfield"] == [12345678912345678] + assert f["doulistfield"] == [1.5, 4.2] + + +def test_ogr_miramon_write_basic_polygon(tmp_path): + + filename = str(tmp_path / "DataSetPOL") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("POLYGON ((0 0,0 1,1 1,0 0))")) + lyr.CreateFeature(f) + f = None + ds = None + + layername = filename + "/test.pol" + ds = ogr.Open(layername) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [1, 1] + assert f["N_VERTEXS"] == [4, 4] + assert f["PERIMETRE"] == [3.414213562, 3.414213562] + assert f["AREA"] == [0.500000000000, 0.500000000000] + assert f["N_ARCS"] == [1, 1] + assert f["N_POLIG"] == [1, 1] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "POLYGON ((0 0,0 1,1 1,0 0))" + ds = None + + +def test_ogr_miramon_write_basic_multipolygon(tmp_path): + + filename = str(tmp_path / "DataSetMULTIPOL") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry( + ogr.CreateGeometryFromWkt( + "MULTIPOLYGON (((0 0,0 5,5 5,5 0,0 0), (1 1,2 1,2 2,1 2,1 1), (3 3,4 3,4 4,3 4,3 3)),((5 6,5 7,6 7,6 6,5 6)))" + ) + ) + + lyr.CreateFeature(f) + f = None + ds = None + + layername = filename + "/test.pol" + ds = ogr.Open(layername) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [1, 1] + assert f["N_VERTEXS"] == [20, 20] + assert f["PERIMETRE"] == [32, 32] + assert f["AREA"] == [24, 24] + assert f["N_ARCS"] == [4, 4] + assert f["N_POLIG"] == [4, 4] + check_common_attributes(f) + assert ( + f.GetGeometryRef().ExportToIsoWkt() + == "MULTIPOLYGON (((0 0,0 5,5 5,5 0,0 0),(1 1,2 1,2 2,1 2,1 1),(3 3,4 3,4 4,3 4,3 3)),((5 6,5 7,6 7,6 6,5 6)))" + ) + ds = None + + +def test_ogr_miramon_write_basic_linestring(tmp_path): + + filename = str(tmp_path / "DataSetLINESTRING") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("LINESTRING (0 0,0 1,1 1)")) + lyr.CreateFeature(f) + f = None + ds = None + + layername = filename + "/test.arc" + ds = ogr.Open(layername) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [0, 0] + assert f["N_VERTEXS"] == [3, 3] + assert f["LONG_ARC"] == [2.0, 2.0] + assert f["NODE_INI"] == [0, 0] + assert f["NODE_FI"] == [1, 1] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "LINESTRING (0 0,0 1,1 1)" + ds = None + + +def test_ogr_miramon_write_basic_linestringZ(tmp_path): + + filename = str(tmp_path / "DataSetLINESTRING") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("LINESTRING Z (0 0 4,0 1 3,1 1 2)")) + lyr.CreateFeature(f) + f = None + ds = None + + layername = filename + "/test.arc" + ds = ogr.Open(layername) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [0, 0] + assert f["N_VERTEXS"] == [3, 3] + assert f["LONG_ARC"] == [2.0, 2.0] + assert f["NODE_INI"] == [0, 0] + assert f["NODE_FI"] == [1, 1] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "LINESTRING Z (0 0 4,0 1 3,1 1 2)" + ds = None + + +def test_ogr_miramon_write_basic_multilinestring(tmp_path): + + filename = str(tmp_path / "DataSetMULTILINESTRING") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry( + ogr.CreateGeometryFromWkt("MULTILINESTRING ((0 0,0 1,1 1),(0 0,0 3))") + ) + lyr.CreateFeature(f) + f = None + ds = None + + layername = filename + "/test.arc" + ds = ogr.Open(layername) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [0, 0] + assert f["N_VERTEXS"] == [3, 3] + assert f["LONG_ARC"] == [2.0, 2.0] + assert f["NODE_INI"] == [0, 0] + assert f["NODE_FI"] == [1, 1] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "LINESTRING (0 0,0 1,1 1)" + + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [1, 1] + assert f["N_VERTEXS"] == [2, 2] + assert f["LONG_ARC"] == [3.0, 3.0] + assert f["NODE_INI"] == [2, 2] + assert f["NODE_FI"] == [3, 3] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "LINESTRING (0 0,0 3)" + + ds = None + + +def test_ogr_miramon_write_basic_point(tmp_path): + + filename = str(tmp_path / "DataSetPOINT") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + options = ["DBFEncoding=UTF8"] + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown, options=options) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (0 0)")) + lyr.CreateFeature(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (1 0)")) + lyr.CreateFeature(f) + + f = None + ds = None + + layername = filename + "/test.pnt" + ds = ogr.Open(layername) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [0, 0] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "POINT (0 0)" + + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [1, 1] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "POINT (1 0)" + + ds = None + + +def test_ogr_miramon_write_basic_pointZ(tmp_path): + + filename = str(tmp_path / "DataSetPOINT") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT Z (0 0 6)")) + lyr.CreateFeature(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT Z (1 0 5)")) + lyr.CreateFeature(f) + + f = None + ds = None + + layername = filename + "/test.pnt" + ds = ogr.Open(layername) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [0, 0] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "POINT Z (0 0 6)" + + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [1, 1] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "POINT Z (1 0 5)" + + ds = None + + +def test_ogr_miramon_write_basic_multipoint(tmp_path): + + filename = str(tmp_path / "DataSetMULTIPOINT") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("MULTIPOINT (0 0, 1 0)")) + lyr.CreateFeature(f) + + f = None + ds = None + + layername = filename + "/test.pnt" + ds = ogr.Open(layername) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [0, 0] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "POINT (0 0)" + + f = lyr.GetNextFeature() + + assert f["ID_GRAFIC"] == [1, 1] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "POINT (1 0)" + + ds = None diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp index 6795c7895863..35e6c08ec0fd 100644 --- a/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp +++ b/ogr/ogrsf_frmts/miramon/ogrmiramondriver.cpp @@ -145,7 +145,7 @@ void RegisterOGRMiraMon() " description='Sets which of the possible heights is chosen: " "the first, the highest or the lowest one.'>" " <Value>First</Value>" - " <Value>Lower</Value>" + " <Value>Lowest</Value>" " <Value>Highest</Value>" " </Option>" " <Option name='MultiRecordIndex' scope='vector' type='string' " diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp index f798a561bc4d..503e3633860a 100644 --- a/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp +++ b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp @@ -418,9 +418,21 @@ OGRMiraMonLayer::OGRMiraMonLayer(GDALDataset *poDS, const char *pszFilename, ? OFTRealList : OFTReal); else - oField.SetType(phMiraMonLayer->isListField - ? OFTIntegerList - : OFTInteger); + { + if (phMiraMonLayer->pMMBDXP->pField[nIField] + .BytesPerField < 10) + { + oField.SetType(phMiraMonLayer->isListField + ? OFTIntegerList + : OFTInteger); + } + else + { + oField.SetType(phMiraMonLayer->isListField + ? OFTInteger64List + : OFTInteger64); + } + } } // It's a serialized JSON array else if (phMiraMonLayer->iMultiRecord == @@ -1217,6 +1229,7 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); continue; } + MM_EXT_DBF_N_MULTIPLE_RECORDS nRealMR = 0; for (nIRecord = 0; nIRecord < phMiraMonLayer->pMultRecordIndex[nIElem].nMR; nIRecord++) @@ -1234,13 +1247,15 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) .BytesPerField] = '\0'; - padfValues[nIRecord] = - atof(phMiraMonLayer->szStringToOperate); + if (!MMIsEmptyString(phMiraMonLayer->szStringToOperate)) + { + padfValues[nRealMR] = + atof(phMiraMonLayer->szStringToOperate); + nRealMR++; + } } - poFeature->SetField( - nIField, phMiraMonLayer->pMultRecordIndex[nIElem].nMR, - padfValues); + poFeature->SetField(nIField, nRealMR, padfValues); } else if (poFeature->GetDefnRef() ->GetFieldDefn(nIField) @@ -1254,6 +1269,7 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) phMiraMonLayer->pMMBDXP->pField[nIField].BytesPerField); continue; } + MM_EXT_DBF_N_MULTIPLE_RECORDS nRealMR = 0; for (nIRecord = 0; nIRecord < phMiraMonLayer->pMultRecordIndex[nIElem].nMR; nIRecord++) @@ -1271,13 +1287,15 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) .BytesPerField] = '\0'; - pnInt64Values[nIRecord] = - CPLAtoGIntBig(phMiraMonLayer->szStringToOperate); + if (!MMIsEmptyString(phMiraMonLayer->szStringToOperate)) + { + pnInt64Values[nRealMR] = + CPLAtoGIntBig(phMiraMonLayer->szStringToOperate); + nRealMR++; + } } - poFeature->SetField( - nIField, phMiraMonLayer->pMultRecordIndex[nIElem].nMR, - pnInt64Values); + poFeature->SetField(nIField, nRealMR, pnInt64Values); } else if (poFeature->GetDefnRef() ->GetFieldDefn(nIField) @@ -1335,8 +1353,19 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) .BytesPerField] = '\0'; MM_RemoveWhitespacesFromEndOfString( phMiraMonLayer->szStringToOperate); - poFeature->SetField(nIField, - atof(phMiraMonLayer->szStringToOperate)); + + if (poFeature->GetDefnRef()->GetFieldDefn(nIField)->GetType() == + OFTInteger64) + { + poFeature->SetField( + nIField, + CPLAtoGIntBig(phMiraMonLayer->szStringToOperate)); + } + else + { + poFeature->SetField( + nIField, atof(phMiraMonLayer->szStringToOperate)); + } } else if (poFeature->GetDefnRef() ->GetFieldDefn(nIField) @@ -2140,8 +2169,6 @@ OGRErr OGRMiraMonLayer::TranslateFieldsValuesToMM(OGRFeature *poFeature) // It will contains the i-th element of the list. for (nIRecord = 0; nIRecord < nRealNumRecords; nIRecord++) { - hMMFeature.pRecords[nIRecord].nNumField = nNumFields; - if (MMResizeMiraMonFieldValue( &(hMMFeature.pRecords[nIRecord].pField), &hMMFeature.pRecords[nIRecord].nMaxField, @@ -2184,8 +2211,6 @@ OGRErr OGRMiraMonLayer::TranslateFieldsValuesToMM(OGRFeature *poFeature) // It will contains the i-th element of the list. for (nIRecord = 0; nIRecord < nRealNumRecords; nIRecord++) { - hMMFeature.pRecords[nIRecord].nNumField = nNumFields; - if (MMResizeMiraMonFieldValue( &(hMMFeature.pRecords[nIRecord].pField), &hMMFeature.pRecords[nIRecord].nMaxField, @@ -2229,8 +2254,6 @@ OGRErr OGRMiraMonLayer::TranslateFieldsValuesToMM(OGRFeature *poFeature) // It will contains the i-th element of the list. for (nIRecord = 0; nIRecord < nRealNumRecords; nIRecord++) { - hMMFeature.pRecords[nIRecord].nNumField = iField; - if (MMResizeMiraMonFieldValue( &(hMMFeature.pRecords[nIRecord].pField), &hMMFeature.pRecords[nIRecord].nMaxField, From e440870001312795fa05b894195910e47adf3e51 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 19:41:00 +0200 Subject: [PATCH 156/230] CMake: require TileDB >= 2.15 --- cmake/helpers/CheckDependentLibraries.cmake | 2 +- doc/source/development/building_from_source.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/cmake/helpers/CheckDependentLibraries.cmake b/cmake/helpers/CheckDependentLibraries.cmake index 6eeb5d880066..fd06585acf00 100644 --- a/cmake/helpers/CheckDependentLibraries.cmake +++ b/cmake/helpers/CheckDependentLibraries.cmake @@ -751,7 +751,7 @@ gdal_check_package(Crnlib "enable gdal_DDS driver" CAN_DISABLE) gdal_check_package(basisu "Enable BASISU driver" CONFIG CAN_DISABLE) gdal_check_package(IDB "enable ogr_IDB driver" CAN_DISABLE) gdal_check_package(rdb "enable RIEGL RDB library" CONFIG CAN_DISABLE) -gdal_check_package(TileDB "enable TileDB driver" CONFIG CAN_DISABLE VERSION "2.7") +gdal_check_package(TileDB "enable TileDB driver" CONFIG CAN_DISABLE VERSION "2.15") gdal_check_package(OpenEXR "OpenEXR >=2.2" CAN_DISABLE) gdal_check_package(MONGOCXX "Enable MongoDBV3 driver" CAN_DISABLE) diff --git a/doc/source/development/building_from_source.rst b/doc/source/development/building_from_source.rst index bed99ec09c82..af4eaa44fef6 100644 --- a/doc/source/development/building_from_source.rst +++ b/doc/source/development/building_from_source.rst @@ -1907,6 +1907,8 @@ TileDB The `TileDB <https://github.com/TileDB-Inc/TileDB>` library is required for the :ref:`raster.tiledb` driver. Specify install prefix in the ``CMAKE_PREFIX_PATH`` variable. +TileDB >= 2.15 is required since GDAL 3.9 + .. option:: GDAL_USE_TILEDB=ON/OFF Control whether to use TileDB. Defaults to ON when TileDB is found. From 7fcdc92ff2af401407ba8d420f53004d990daca4 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 20:05:19 +0200 Subject: [PATCH 157/230] cppcheck.sh: avoid warnings related to PROJ --- ogr/ogrspatialreference.cpp | 1 + scripts/cppcheck.sh | 3 +++ 2 files changed, 4 insertions(+) diff --git a/ogr/ogrspatialreference.cpp b/ogr/ogrspatialreference.cpp index b2d4c94d5364..5948a18df705 100644 --- a/ogr/ogrspatialreference.cpp +++ b/ogr/ogrspatialreference.cpp @@ -4256,6 +4256,7 @@ OGRErr OGRSpatialReference::importFromURNPart(const char *pszAuthority, const char *pszURN) { #if PROJ_AT_LEAST_VERSION(8, 1, 0) + (void)this; (void)pszAuthority; (void)pszCode; (void)pszURN; diff --git a/scripts/cppcheck.sh b/scripts/cppcheck.sh index 80d4f9b3c029..ce53b7017555 100755 --- a/scripts/cppcheck.sh +++ b/scripts/cppcheck.sh @@ -99,6 +99,9 @@ for dirname in alg port gcore ogr frmts gnm apps fuzzers; do -DFLT_EVAL_METHOD \ -DKDU_HAS_ROI_RECT \ -Dflatbuffers=gdal_flatbuffers \ + -DPROJ_VERSION_MAJOR=9 \ + -DPROJ_VERSION_MINOR=4 \ + -DPROJ_VERSION_PATCH=0 \ --include="${CPL_CONFIG_H}" \ --include=port/cpl_port.h \ -I "${CPL_CONFIG_H_DIR}" \ From 430767cf3cf86ceaf902a32948bee467dedf25f4 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 19:24:22 +0200 Subject: [PATCH 158/230] Add OSRIsDerivedProjected() / OGRSpatialReference::IsDerivedProjected() --- ogr/ogr_spatialref.h | 1 + ogr/ogr_srs_api.h | 1 + ogr/ogrspatialreference.cpp | 47 ++++++++++++++++++++++++++++++++++++- swig/include/osr.i | 4 ++++ 4 files changed, 52 insertions(+), 1 deletion(-) diff --git a/ogr/ogr_spatialref.h b/ogr/ogr_spatialref.h index 8753e2bdecac..588c6e350854 100644 --- a/ogr/ogr_spatialref.h +++ b/ogr/ogr_spatialref.h @@ -387,6 +387,7 @@ class CPL_DLL OGRSpatialReference int IsGeographic() const; int IsDerivedGeographic() const; int IsProjected() const; + int IsDerivedProjected() const; int IsGeocentric() const; bool IsDynamic() const; diff --git a/ogr/ogr_srs_api.h b/ogr/ogr_srs_api.h index c7a8ec5d9f3c..a92d40237be0 100644 --- a/ogr/ogr_srs_api.h +++ b/ogr/ogr_srs_api.h @@ -553,6 +553,7 @@ int CPL_DLL OSRIsGeographic(OGRSpatialReferenceH); int CPL_DLL OSRIsDerivedGeographic(OGRSpatialReferenceH); int CPL_DLL OSRIsLocal(OGRSpatialReferenceH); int CPL_DLL OSRIsProjected(OGRSpatialReferenceH); +int CPL_DLL OSRIsDerivedProjected(OGRSpatialReferenceH); int CPL_DLL OSRIsCompound(OGRSpatialReferenceH); int CPL_DLL OSRIsGeocentric(OGRSpatialReferenceH); int CPL_DLL OSRIsVertical(OGRSpatialReferenceH); diff --git a/ogr/ogrspatialreference.cpp b/ogr/ogrspatialreference.cpp index 5948a18df705..fad5d53c090c 100644 --- a/ogr/ogrspatialreference.cpp +++ b/ogr/ogrspatialreference.cpp @@ -8859,7 +8859,7 @@ int OGRSpatialReference::IsDerivedGeographic() const /* OSRIsDerivedGeographic() */ /************************************************************************/ /** - * \brief Check if derived geographic coordinate system. + * \brief Check if the CRS is a derived geographic coordinate system. * (for example a rotated long/lat grid) * * This function is the same as OGRSpatialReference::IsDerivedGeographic(). @@ -8872,6 +8872,51 @@ int OSRIsDerivedGeographic(OGRSpatialReferenceH hSRS) return ToPointer(hSRS)->IsDerivedGeographic(); } +/************************************************************************/ +/* IsDerivedProjected() */ +/************************************************************************/ + +/** + * \brief Check if the CRS is a derived projected coordinate system. + * + * This method is the same as the C function OSRIsDerivedGeographic(). + * + * @since GDAL 3.9.0 (and may only return non-zero starting with PROJ 9.2.0) + */ + +int OGRSpatialReference::IsDerivedProjected() const + +{ +#if PROJ_AT_LEAST_VERSION(9, 2, 0) + d->refreshProjObj(); + d->demoteFromBoundCRS(); + const bool isDerivedProjected = + d->m_pjType == PJ_TYPE_DERIVED_PROJECTED_CRS; + d->undoDemoteFromBoundCRS(); + return isDerivedProjected ? TRUE : FALSE; +#else + return FALSE; +#endif +} + +/************************************************************************/ +/* OSRIsDerivedProjected() */ +/************************************************************************/ +/** + * \brief Check if the CRS is a derived projected coordinate system. + * + * This function is the same as OGRSpatialReference::IsDerivedProjected(). + * + * @since GDAL 3.9.0 (and may only return non-zero starting with PROJ 9.2.0) + */ +int OSRIsDerivedProjected(OGRSpatialReferenceH hSRS) + +{ + VALIDATE_POINTER1(hSRS, "OSRIsDerivedProjected", 0); + + return ToPointer(hSRS)->IsDerivedProjected(); +} + /************************************************************************/ /* IsLocal() */ /************************************************************************/ diff --git a/swig/include/osr.i b/swig/include/osr.i index 59abe8d19202..ebd9424e608c 100644 --- a/swig/include/osr.i +++ b/swig/include/osr.i @@ -363,6 +363,10 @@ public: return OSRIsProjected(self); } + int IsDerivedProjected() { + return OSRIsDerivedProjected(self); + } + int IsCompound() { return OSRIsCompound(self); } From 29c9be19952ec8a4d2b2a1fb8d2e1f86bd981e8b Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 17:58:02 +0200 Subject: [PATCH 159/230] OGR_CT: use PROJJSON rather than in WKT:2019 We export CRS objects to PROJJSON rather than WKT2:2019 because PROJJSON is a bit more verbose, which helps in situations like https://github.com/OSGeo/gdal/issues/9732 / https://github.com/OSGeo/PROJ/pull/4124 where we want to export a DerivedProjectedCRS whose base ProjectedCRS has non-metre axis. Fixes https://github.com/OSGeo/gdal/issues/9732 --- autotest/osr/osr_ct.py | 55 ++++++++++++++++++++++++++++++++++++++++++ ogr/ogrct.cpp | 21 ++++++++++++---- 2 files changed, 71 insertions(+), 5 deletions(-) diff --git a/autotest/osr/osr_ct.py b/autotest/osr/osr_ct.py index e79e557b7ec4..4b1d326d7227 100755 --- a/autotest/osr/osr_ct.py +++ b/autotest/osr/osr_ct.py @@ -898,3 +898,58 @@ def test_osr_ct_source_and_target_mapping_minus_two_one(): x, y, _ = ct.TransformPoint(10, 20, 0) assert x == pytest.approx(10) assert y == pytest.approx(20) + + +############################################################################### +# Test bug fix for https://github.com/OSGeo/gdal/issues/9732 + + +@pytest.mark.require_proj(9, 2) +def test_osr_ct_fix_9732(): + + s = osr.SpatialReference() + s.ImportFromEPSG(6453) + + assert not s.IsDerivedProjected() + + t = osr.SpatialReference() + t.SetFromUserInput( + """DERIVEDPROJCRS["Ground for NAD83(2011) / Idaho West (ftUS)", + BASEPROJCRS["NAD83(2011) / Idaho West (ftUS)", + BASEGEOGCRS["NAD83(2011)", + DATUM["NAD83 (National Spatial Reference System 2011)",ELLIPSOID["GRS 1980",6378137,298.257222101,LENGTHUNIT["metre",1]]], + PRIMEM["Greenwich",0,ANGLEUNIT["degree",0.0174532925199433]], + ID["EPSG",6318]], + CONVERSION["SPCS83 Idaho West zone (US Survey feet)", + METHOD["Transverse Mercator",ID["EPSG",9807]], + PARAMETER["Latitude of natural origin",41.6666666666667,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8801]], + PARAMETER["Longitude of natural origin",-115.75,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8802]], + PARAMETER["Scale factor at natural origin",0.999933333,SCALEUNIT["unity",1],ID["EPSG",8805]], + PARAMETER["False easting",2624666.667,LENGTHUNIT["US survey foot",0.304800609601219],ID["EPSG",8806]], + PARAMETER["False northing",0,LENGTHUNIT["US survey foot",0.304800609601219],ID["EPSG",8807]]], + CS[Cartesian,2], + AXIS["easting (X)",east,ORDER[1],LENGTHUNIT["US survey foot",0.304800609601219]], + AXIS["northing (Y)",north,ORDER[2],LENGTHUNIT["US survey foot",0.304800609601219]], + USAGE[SCOPE["Engineering survey, topographic mapping."],AREA["United States (USA) - Idaho - counties of Ada; Adams; Benewah; Boise; Bonner; Boundary; Canyon; Clearwater; Elmore; Gem; Idaho; Kootenai; Latah; Lewis; Nez Perce; Owyhee; Payette; Shoshone; Valley; Washington."],BBOX[41.99,-117.24,49.01,-114.32]], + ID["EPSG",6453]], + DERIVINGCONVERSION["Grid to ground", + METHOD["Similarity transformation",ID["EPSG",9621]], + PARAMETER["Ordinate 1 of evaluation point in target CRS",1000,LENGTHUNIT["US survey foot",0.304800609601219],ID["EPSG",8621]], + PARAMETER["Ordinate 2 of evaluation point in target CRS",0,LENGTHUNIT["US survey foot",0.304800609601219],ID["EPSG",8622]], + PARAMETER["Scale factor for source CRS axes",1,SCALEUNIT["unity",1],ID["EPSG",1061]], + PARAMETER["Rotation angle of source CRS axes", 0,ANGLEUNIT["degree",0.0],ID["EPSG",8614]]], + CS[Cartesian,2], + AXIS["easting (X)",east, + ORDER[1], + LENGTHUNIT["US survey foot",0.304800609601219]], + AXIS["northing (Y)",north, + ORDER[2], + LENGTHUNIT["US survey foot",0.304800609601219]]]""" + ) + + assert t.IsDerivedProjected() + + ct = osr.CoordinateTransformation(s, t) + x, y, _ = ct.TransformPoint(2300000, 2000000, 0) + assert x == pytest.approx(2301000) + assert y == pytest.approx(2000000) diff --git a/ogr/ogrct.cpp b/ogr/ogrct.cpp index baf3da169880..e1fd09e67a4b 100644 --- a/ogr/ogrct.cpp +++ b/ogr/ogrct.cpp @@ -177,13 +177,12 @@ void OGRCoordinateTransformationOptions::Private::RefreshCheckWithInvertProj() } /************************************************************************/ -/* GetWktOrProjString() */ +/* GetAsAProjRecognizableString() */ /************************************************************************/ -static char *GetWktOrProjString(const OGRSpatialReference *poSRS) +static char *GetAsAProjRecognizableString(const OGRSpatialReference *poSRS) { CPLErrorStateBackuper oErrorStateBackuper(CPLQuietErrorHandler); - const char *const apszOptionsWKT2_2018[] = {"FORMAT=WKT2_2018", nullptr}; // If there's a PROJ4 EXTENSION node in WKT1, then use // it. For example when dealing with "+proj=longlat +lon_wrap=180" char *pszText = nullptr; @@ -197,8 +196,20 @@ static char *GetWktOrProjString(const OGRSpatialReference *poSRS) pszText = CPLStrdup(tmpText.c_str()); } } + else if (poSRS->IsEmpty()) + { + pszText = CPLStrdup(""); + } else - poSRS->exportToWkt(&pszText, apszOptionsWKT2_2018); + { + // We export to PROJJSON rather than WKT2:2019 because PROJJSON + // is a bit more verbose, which helps in situations like + // https://github.com/OSGeo/gdal/issues/9732 / + // https://github.com/OSGeo/PROJ/pull/4124 where we want to export + // a DerivedProjectedCRS whose base ProjectedCRS has non-metre axis. + poSRS->exportToPROJJSON(&pszText, nullptr); + } + return pszText; } @@ -269,7 +280,7 @@ static char *GetTextRepresentation(const OGRSpatialReference *poSRS) } if (pszText == nullptr) { - pszText = GetWktOrProjString(poSRS); + pszText = GetAsAProjRecognizableString(poSRS); } return pszText; } From e4e71a89cae863a9d84b476679245680e71ecbbc Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 20:10:54 +0200 Subject: [PATCH 160/230] TileDB: cleanups related to requiring TileDB >= 2.7 --- frmts/tiledb/tiledbdense.cpp | 137 +---------------------------------- 1 file changed, 2 insertions(+), 135 deletions(-) diff --git a/frmts/tiledb/tiledbdense.cpp b/frmts/tiledb/tiledbdense.cpp index e431faf60947..40565fb2e812 100644 --- a/frmts/tiledb/tiledbdense.cpp +++ b/frmts/tiledb/tiledbdense.cpp @@ -107,8 +107,6 @@ static const char *index_type_name(TILEDB_INTERLEAVE_MODE eMode) /* SetBuffer() */ /************************************************************************/ -#if ((TILEDB_VERSION_MAJOR > 2) || \ - (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 4)) static CPLErr SetBuffer(tiledb::Query *poQuery, GDALDataType eType, const CPLString &osAttrName, void *pImage, int nSize) { @@ -175,74 +173,6 @@ static CPLErr SetBuffer(tiledb::Query *poQuery, GDALDataType eType, } return CE_None; } -#else -static CPLErr SetBuffer(tiledb::Query *poQuery, GDALDataType eType, - const CPLString &osAttrName, void *pImage, int nSize) -{ - switch (eType) - { - case GDT_Byte: - poQuery->set_buffer( - osAttrName, reinterpret_cast<unsigned char *>(pImage), nSize); - break; - case GDT_Int8: - poQuery->set_buffer(osAttrName, reinterpret_cast<int8_t *>(pImage), - nSize); - break; - case GDT_UInt16: - poQuery->set_buffer( - osAttrName, reinterpret_cast<unsigned short *>(pImage), nSize); - break; - case GDT_UInt32: - poQuery->set_buffer( - osAttrName, reinterpret_cast<unsigned int *>(pImage), nSize); - break; - case GDT_UInt64: - poQuery->set_buffer(osAttrName, - reinterpret_cast<uint64_t *>(pImage), nSize); - break; - case GDT_Int16: - poQuery->set_buffer(osAttrName, reinterpret_cast<short *>(pImage), - nSize); - break; - case GDT_Int32: - poQuery->set_buffer(osAttrName, reinterpret_cast<int *>(pImage), - nSize); - break; - case GDT_Int64: - poQuery->set_buffer(osAttrName, reinterpret_cast<int64_t *>(pImage), - nSize); - break; - case GDT_Float32: - poQuery->set_buffer(osAttrName, reinterpret_cast<float *>(pImage), - nSize); - break; - case GDT_Float64: - poQuery->set_buffer(osAttrName, reinterpret_cast<double *>(pImage), - nSize); - break; - case GDT_CInt16: - poQuery->set_buffer(osAttrName, reinterpret_cast<short *>(pImage), - nSize * 2); - break; - case GDT_CInt32: - poQuery->set_buffer(osAttrName, reinterpret_cast<int *>(pImage), - nSize * 2); - break; - case GDT_CFloat32: - poQuery->set_buffer(osAttrName, reinterpret_cast<float *>(pImage), - nSize * 2); - break; - case GDT_CFloat64: - poQuery->set_buffer(osAttrName, reinterpret_cast<double *>(pImage), - nSize * 2); - break; - default: - return CE_Failure; - } - return CE_None; -} -#endif /************************************************************************/ /* TileDBRasterBand() */ @@ -683,11 +613,7 @@ CPLErr TileDBRasterDataset::TrySaveXML() if (psTree == nullptr) { /* If we have unset all metadata, we have to delete the PAM file */ -#if TILEDB_VERSION_MAJOR == 1 && TILEDB_VERSION_MINOR < 7 - vfs.remove_file(psPam->pszPamFilename); -#else m_array->delete_metadata(GDAL_ATTRIBUTE_NAME); -#endif return CE_None; } @@ -757,11 +683,9 @@ CPLErr TileDBRasterDataset::TrySaveXML() /* Try saving the auxiliary metadata. */ /* -------------------------------------------------------------------- */ - bool bSaved = false; CPLErrorHandlerPusher oQuietError(CPLQuietErrorHandler); char *pszTree = CPLSerializeXMLTree(psTree); -#if TILEDB_VERSION_MAJOR > 1 || TILEDB_VERSION_MINOR >= 7 if (eAccess == GA_ReadOnly) { if (nTimestamp) @@ -791,64 +715,8 @@ CPLErr TileDBRasterDataset::TrySaveXML() static_cast<int>(strlen(pszTree)), pszTree); } - bSaved = true; -#endif - - // cppcheck-suppress knownConditionTrueFalse - if (!bSaved) - { - vfs.touch(psPam->pszPamFilename); - tiledb::VFS::filebuf fbuf(vfs); - fbuf.open(psPam->pszPamFilename, std::ios::out); - std::ostream os(&fbuf); - - if (os.good()) - { - os.write(pszTree, strlen(pszTree)); - bSaved = true; - } - - fbuf.close(); - } - CPLFree(pszTree); - /* -------------------------------------------------------------------- - */ - /* If it fails, check if we have a proxy directory for auxiliary */ - /* metadata to be stored in, and try to save there. */ - /* -------------------------------------------------------------------- - */ - CPLErr eErr = CE_None; - - if (bSaved) - eErr = CE_None; - else - { - const char *pszBasename = GetDescription(); - - if (psPam->osPhysicalFilename.length() > 0) - pszBasename = psPam->osPhysicalFilename; - - const char *pszNewPam = nullptr; - if (PamGetProxy(pszBasename) == nullptr && - ((pszNewPam = PamAllocateProxy(pszBasename)) != nullptr)) - { - CPLErrorReset(); - CPLFree(psPam->pszPamFilename); - psPam->pszPamFilename = CPLStrdup(pszNewPam); - eErr = TrySaveXML(); - } - /* No way we can save into a /vsicurl resource */ - else if (!VSISupportsSequentialWrite(psPam->pszPamFilename, false)) - { - CPLError(CE_Warning, CPLE_AppDefined, - "Unable to save auxiliary information in %s.", - psPam->pszPamFilename); - eErr = CE_Warning; - } - } - /* -------------------------------------------------------------------- */ /* Cleanup */ @@ -857,7 +725,7 @@ CPLErr TileDBRasterDataset::TrySaveXML() if (psTree) CPLDestroyXMLNode(psTree); - return eErr; + return CE_None; } catch (const std::exception &e) { @@ -927,7 +795,6 @@ CPLErr TileDBRasterDataset::TryLoadCachedXML(char ** /*papszSiblingFiles*/, { CPLErrorHandlerPusher oQuietError(CPLQuietErrorHandler); -#if TILEDB_VERSION_MAJOR > 1 || TILEDB_VERSION_MINOR >= 7 if (bReload) { tiledb_datatype_t v_type = @@ -957,7 +824,7 @@ CPLErr TileDBRasterDataset::TryLoadCachedXML(char ** /*papszSiblingFiles*/, } psTree = CPLParseXMLString(osMetaDoc); } -#endif + if (bReload && psTree == nullptr && vfs.is_file(psPam->pszPamFilename)) { From 0a202ae4ca0cbdb68b9febb50a24e582c67da4d5 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 19:42:53 +0200 Subject: [PATCH 161/230] TileDB: cleanups related to requiring TileDB >= 2.9 --- autotest/ogr/ogr_tiledb.py | 5 ----- frmts/tiledb/include_tiledb.h | 5 ----- frmts/tiledb/tiledbcommon.cpp | 4 ---- frmts/tiledb/tiledbdrivercore.cpp | 2 -- frmts/tiledb/tiledbsparse.cpp | 14 -------------- 5 files changed, 30 deletions(-) diff --git a/autotest/ogr/ogr_tiledb.py b/autotest/ogr/ogr_tiledb.py index e9fcf1f2c2bd..2311a3f6f962 100644 --- a/autotest/ogr/ogr_tiledb.py +++ b/autotest/ogr/ogr_tiledb.py @@ -1029,11 +1029,6 @@ def test_ogr_tiledb_switch_between_read_and_write(): def test_ogr_tiledb_create_group(): - if "CREATE_GROUP" not in gdal.GetDriverByName("TileDB").GetMetadataItem( - gdal.DMD_CREATIONOPTIONLIST - ): - pytest.skip("CREATE_GROUP not supported in TileDB < 2.9") - if os.path.exists("tmp/test.tiledb"): shutil.rmtree("tmp/test.tiledb") diff --git a/frmts/tiledb/include_tiledb.h b/frmts/tiledb/include_tiledb.h index cec9844846ac..9eecd245d561 100644 --- a/frmts/tiledb/include_tiledb.h +++ b/frmts/tiledb/include_tiledb.h @@ -42,11 +42,6 @@ #include "tiledb/tiledb_experimental" #endif -#if TILEDB_VERSION_MAJOR > 2 || \ - (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 9) -#define HAS_TILEDB_GROUP -#endif - #if TILEDB_VERSION_MAJOR > 2 || \ (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 10) #define HAS_TILEDB_BOOL diff --git a/frmts/tiledb/tiledbcommon.cpp b/frmts/tiledb/tiledbcommon.cpp index 5679a6f5b9eb..eb38195c9424 100644 --- a/frmts/tiledb/tiledbcommon.cpp +++ b/frmts/tiledb/tiledbcommon.cpp @@ -150,14 +150,12 @@ int TileDBDataset::Identify(GDALOpenInfo *poOpenInfo) CPLString osArrayPath = TileDBDataset::VSI_to_tiledb_uri(poOpenInfo->pszFilename); const auto eType = tiledb::Object::object(ctx, osArrayPath).type(); -#ifdef HAS_TILEDB_GROUP if ((poOpenInfo->nOpenFlags & GDAL_OF_VECTOR) != 0) { if (eType == tiledb::Object::Type::Array || eType == tiledb::Object::Type::Group) return true; } -#endif #ifdef HAS_TILEDB_MULTIDIM if ((poOpenInfo->nOpenFlags & GDAL_OF_MULTIDIM_RASTER) != 0) { @@ -259,7 +257,6 @@ GDALDataset *TileDBDataset::Open(GDALOpenInfo *poOpenInfo) TileDBDataset::VSI_to_tiledb_uri(poOpenInfo->pszFilename); const auto eType = tiledb::Object::object(oCtx, osPath).type(); -#ifdef HAS_TILEDB_GROUP if ((poOpenInfo->nOpenFlags & GDAL_OF_VECTOR) != 0 && eType == tiledb::Object::Type::Group) { @@ -314,7 +311,6 @@ GDALDataset *TileDBDataset::Open(GDALOpenInfo *poOpenInfo) } return nullptr; } -#endif #endif tiledb::ArraySchema schema(oCtx, osPath); diff --git a/frmts/tiledb/tiledbdrivercore.cpp b/frmts/tiledb/tiledbdrivercore.cpp index d9ae98c5da1d..b83b94e45370 100644 --- a/frmts/tiledb/tiledbdrivercore.cpp +++ b/frmts/tiledb/tiledbdrivercore.cpp @@ -149,11 +149,9 @@ void TileDBDriverSetCommonMetadata(GDALDriver *poDriver) " <Option name='BOUNDS' scope='raster' type='string' " "description='Specify " "bounds for sparse array, minx, miny, maxx, maxy'/>\n" -#ifdef HAS_TILEDB_GROUP " <Option name='CREATE_GROUP' scope='vector' type='boolean' " "description='Whether to create a group for multiple layer support' " "default='NO'/>" -#endif "</CreationOptionList>\n"); // clang-format off diff --git a/frmts/tiledb/tiledbsparse.cpp b/frmts/tiledb/tiledbsparse.cpp index 1d062d69d818..37f7827f6a2e 100644 --- a/frmts/tiledb/tiledbsparse.cpp +++ b/frmts/tiledb/tiledbsparse.cpp @@ -274,7 +274,6 @@ GDALDataset *OGRTileDBDataset::Open(GDALOpenInfo *poOpenInfo, }; CPL_IGNORE_RET_VAL(objectType); -#ifdef HAS_TILEDB_GROUP if (objectType == tiledb::Object::Type::Group) { poDS->m_osGroupName = osFilename; @@ -293,7 +292,6 @@ GDALDataset *OGRTileDBDataset::Open(GDALOpenInfo *poOpenInfo, } } else -#endif { if (!AddLayer(osFilename)) return nullptr; @@ -347,7 +345,6 @@ OGRTileDBDataset::ICreateLayer(const char *pszName, return nullptr; } -#ifdef HAS_TILEDB_GROUP if (!m_osGroupName.empty() && strchr(pszName, '/')) { // Otherwise a layer name wit ha slash when groups are enabled causes @@ -357,7 +354,6 @@ OGRTileDBDataset::ICreateLayer(const char *pszName, "Slash is not supported in layer name"); return nullptr; } -#endif const auto eGType = poGeomFieldDefn ? poGeomFieldDefn->GetType() : wkbNone; const auto poSpatialRef = @@ -365,17 +361,11 @@ OGRTileDBDataset::ICreateLayer(const char *pszName, if (m_osGroupName.empty() && !m_apoLayers.empty()) { -#ifdef HAS_TILEDB_GROUP CPLError(CE_Failure, CPLE_NotSupported, "CreateLayer() failed: no more than one layer per dataset " "supported on a array object. Create a dataset with the " "CREATE_GROUP=YES creation option or open such group " "to enable multiple layer creation."); -#else - CPLError(CE_Failure, CPLE_NotSupported, - "CreateLayer() failed: no more than one layer per dataset " - "supported on a array object."); -#endif return nullptr; } @@ -565,7 +555,6 @@ GDALDataset *OGRTileDBDataset::Create(const char *pszFilename, poDS->m_ctx.reset(new tiledb::Context()); } -#ifdef HAS_TILEDB_GROUP if (CPLTestBool(CSLFetchNameValueDef(papszOptions, "CREATE_GROUP", "NO"))) { try @@ -579,7 +568,6 @@ GDALDataset *OGRTileDBDataset::Create(const char *pszFilename, } poDS->m_osGroupName = poDS->GetDescription(); } -#endif return poDS.release(); } @@ -3754,13 +3742,11 @@ void OGRTileDBLayer::InitializeSchemaAndArray() tiledb::Array::create(m_osFilename, *m_schema); -#ifdef HAS_TILEDB_GROUP if (!m_osGroupName.empty()) { tiledb::Group group(*m_ctx, m_osGroupName, TILEDB_WRITE); group.add_member(m_osFilename, false, GetDescription()); } -#endif if (m_nTimestamp) m_array.reset(new tiledb::Array( From 98001f5533056ec745a0935a4685c1388db0c8d2 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 19:49:40 +0200 Subject: [PATCH 162/230] TileDB: cleanups related to requiring TileDB >= 2.10 --- autotest/ogr/ogr_tiledb.py | 60 +++++++++--------------- frmts/tiledb/include_tiledb.h | 5 -- frmts/tiledb/tiledbdrivercore.cpp | 7 +-- frmts/tiledb/tiledbsparse.cpp | 77 ++++++------------------------- 4 files changed, 38 insertions(+), 111 deletions(-) diff --git a/autotest/ogr/ogr_tiledb.py b/autotest/ogr/ogr_tiledb.py index 2311a3f6f962..ae41ed3fb492 100644 --- a/autotest/ogr/ogr_tiledb.py +++ b/autotest/ogr/ogr_tiledb.py @@ -45,7 +45,7 @@ ############################################################################### -def create_tiledb_dataset(nullable, batch_size, include_bool, extra_feature=False): +def create_tiledb_dataset(nullable, batch_size, extra_feature=False): ds = ogr.GetDriverByName("TileDB").CreateDataSource("tmp/test.tiledb") srs = osr.SpatialReference() @@ -79,11 +79,10 @@ def create_tiledb_dataset(nullable, batch_size, include_bool, extra_feature=Fals with gdal.config_option("TILEDB_INT_TYPE", "UINT16"): lyr.CreateField(fld_defn) - if include_bool: - fld_defn = ogr.FieldDefn("boolfield", ogr.OFTInteger) - fld_defn.SetNullable(nullable) - fld_defn.SetSubType(ogr.OFSTBoolean) - lyr.CreateField(fld_defn) + fld_defn = ogr.FieldDefn("boolfield", ogr.OFTInteger) + fld_defn.SetNullable(nullable) + fld_defn.SetSubType(ogr.OFSTBoolean) + lyr.CreateField(fld_defn) fld_defn = ogr.FieldDefn("int64field", ogr.OFTInteger64) fld_defn.SetNullable(nullable) @@ -111,11 +110,10 @@ def create_tiledb_dataset(nullable, batch_size, include_bool, extra_feature=Fals fld_defn.SetSubType(ogr.OFSTInt16) lyr.CreateField(fld_defn) - if include_bool: - fld_defn = ogr.FieldDefn("boollistfield", ogr.OFTIntegerList) - fld_defn.SetNullable(nullable) - fld_defn.SetSubType(ogr.OFSTBoolean) - lyr.CreateField(fld_defn) + fld_defn = ogr.FieldDefn("boollistfield", ogr.OFTIntegerList) + fld_defn.SetNullable(nullable) + fld_defn.SetSubType(ogr.OFSTBoolean) + lyr.CreateField(fld_defn) fld_defn = ogr.FieldDefn("doublelistfield", ogr.OFTRealList) fld_defn.SetNullable(nullable) @@ -148,8 +146,7 @@ def create_tiledb_dataset(nullable, batch_size, include_bool, extra_feature=Fals f["strfield"] = "foo" f["intfield"] = -123456789 f["int16field"] = -32768 - if include_bool: - f["boolfield"] = True + f["boolfield"] = True f["uint8field"] = 0 f["uint16field"] = 0 f["int64field"] = -1234567890123456 @@ -158,8 +155,7 @@ def create_tiledb_dataset(nullable, batch_size, include_bool, extra_feature=Fals f["binaryfield"] = b"\xDE\xAD\xBE\xEF" f["intlistfield"] = [-123456789, 123] f["int16listfield"] = [-32768, 32767] - if include_bool: - f["boollistfield"] = [True, False] + f["boollistfield"] = [True, False] f["doublelistfield"] = [1.2345, -1.2345] f["floatlistfield"] = [1.5, -1.5, 0] f["datetimefield"] = "2023-04-07T12:34:56.789Z" @@ -184,8 +180,7 @@ def create_tiledb_dataset(nullable, batch_size, include_bool, extra_feature=Fals f["strfield"] = "barbaz" f["intfield"] = 123456789 f["int16field"] = 32767 - if include_bool: - f["boolfield"] = False + f["boolfield"] = False f["uint8field"] = 255 f["uint16field"] = 65535 f["int64field"] = 1234567890123456 @@ -194,8 +189,7 @@ def create_tiledb_dataset(nullable, batch_size, include_bool, extra_feature=Fals f["binaryfield"] = b"\xBE\xEF\xDE\xAD" f["intlistfield"] = [123456789, -123] f["int16listfield"] = [32767, -32768] - if include_bool: - f["boollistfield"] = [False, True] + f["boollistfield"] = [False, True] f["doublelistfield"] = [-1.2345, 1.2345] f["floatlistfield"] = [0.0, -1.5, 1.5] # Will be transformed to "2023/04/07 10:19:56.789+00" @@ -212,8 +206,7 @@ def create_tiledb_dataset(nullable, batch_size, include_bool, extra_feature=Fals f["strfield"] = "something" f["intfield"] = 8765432 f["int16field"] = 32767 - if include_bool: - f["boolfield"] = False + f["boolfield"] = False f["uint8field"] = 255 f["uint16field"] = 65535 f["int64field"] = 9876543210123456 @@ -222,8 +215,7 @@ def create_tiledb_dataset(nullable, batch_size, include_bool, extra_feature=Fals f["binaryfield"] = b"\xDE\xAD\xBE\xEF" f["intlistfield"] = [-123456789, -123] f["int16listfield"] = [32767, -32768] - if include_bool: - f["boollistfield"] = [False, True] + f["boollistfield"] = [False, True] f["doublelistfield"] = [-1.2345, 1.2345] f["floatlistfield"] = [0.0, -1.5, 1.5] # Will be transformed to "2023/04/07 10:19:56.789+00" @@ -248,7 +240,7 @@ def test_ogr_tiledb_basic(nullable, batch_size): if os.path.exists("tmp/test.tiledb"): shutil.rmtree("tmp/test.tiledb") - field_count, srs, options = create_tiledb_dataset(nullable, batch_size, True) + field_count, srs, options = create_tiledb_dataset(nullable, batch_size) ds = gdal.OpenEx("tmp/test.tiledb", open_options=options) lyr = ds.GetLayer(0) @@ -1132,10 +1124,7 @@ def test_ogr_tiledb_arrow_stream_pyarrow(nullable, batch_size): if os.path.exists("tmp/test.tiledb"): shutil.rmtree("tmp/test.tiledb") - include_bool = "Boolean" in gdal.GetDriverByName("TileDB").GetMetadataItem( - gdal.DMD_CREATIONFIELDDATASUBTYPES - ) - _, _, options = create_tiledb_dataset(nullable, batch_size, include_bool) + _, _, options = create_tiledb_dataset(nullable, batch_size) ds = gdal.OpenEx("tmp/test.tiledb", open_options=options) lyr = ds.GetLayer(0) @@ -1173,11 +1162,10 @@ def test_ogr_tiledb_arrow_stream_pyarrow(nullable, batch_size): ("timefield", "time32[ms]"), ("intfieldextra", "int32"), ("wkb_geometry", "large_binary"), + ("boolfield", "bool"), + ("boollistfield", "large_list<item: bool not null>"), ] ) - if include_bool: - expected_fields.add(("boolfield", "bool")) - expected_fields.add(("boollistfield", "large_list<item: bool not null>")) assert fields == expected_fields def check_batch(batch): @@ -1243,10 +1231,7 @@ def test_ogr_tiledb_arrow_stream_numpy(nullable, batch_size): if os.path.exists("tmp/test.tiledb"): shutil.rmtree("tmp/test.tiledb") - include_bool = True - _, _, options = create_tiledb_dataset( - nullable, batch_size, include_bool, extra_feature=True - ) + _, _, options = create_tiledb_dataset(nullable, batch_size, extra_feature=True) ds = gdal.OpenEx("tmp/test.tiledb", open_options=options) lyr = ds.GetLayer(0) @@ -1342,10 +1327,9 @@ def check_batch(batch): "timefield", "intfieldextra", "wkb_geometry", + "boolfield", + "boollistfield", } - if include_bool: - expected_fields.add("boolfield") - expected_fields.add("boollistfield") assert batch.keys() == expected_fields check_batch(batch) diff --git a/frmts/tiledb/include_tiledb.h b/frmts/tiledb/include_tiledb.h index 9eecd245d561..ce720b7cc1a4 100644 --- a/frmts/tiledb/include_tiledb.h +++ b/frmts/tiledb/include_tiledb.h @@ -42,11 +42,6 @@ #include "tiledb/tiledb_experimental" #endif -#if TILEDB_VERSION_MAJOR > 2 || \ - (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 10) -#define HAS_TILEDB_BOOL -#endif - #if TILEDB_VERSION_MAJOR > 2 || \ (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 11) #define HAS_TILEDB_WORKING_OR_FILTER diff --git a/frmts/tiledb/tiledbdrivercore.cpp b/frmts/tiledb/tiledbdrivercore.cpp index b83b94e45370..4c5fddddc512 100644 --- a/frmts/tiledb/tiledbdrivercore.cpp +++ b/frmts/tiledb/tiledbdrivercore.cpp @@ -102,12 +102,7 @@ void TileDBDriverSetCommonMetadata(GDALDriver *poDriver) "Integer Integer64 Real String Date Time DateTime " "IntegerList Integer64List RealList Binary"); poDriver->SetMetadataItem(GDAL_DMD_CREATIONFIELDDATASUBTYPES, -#ifdef HAS_TILEDB_BOOL - "Boolean Int16 Float32" -#else - "Int16 Float32" -#endif - ); + "Boolean Int16 Float32"); poDriver->SetMetadataItem( GDAL_DMD_CREATIONOPTIONLIST, "<CreationOptionList>\n" diff --git a/frmts/tiledb/tiledbsparse.cpp b/frmts/tiledb/tiledbsparse.cpp index 37f7827f6a2e..823dffd20f35 100644 --- a/frmts/tiledb/tiledbsparse.cpp +++ b/frmts/tiledb/tiledbsparse.cpp @@ -73,12 +73,10 @@ template <> struct GetType<TILEDB_UINT16> using EltType = std::vector<uint16_t>; }; -#ifdef HAS_TILEDB_BOOL template <> struct GetType<TILEDB_BOOL> { using EltType = VECTOR_OF_BOOL; }; -#endif template <> struct GetType<TILEDB_INT64> { @@ -143,11 +141,9 @@ template <template <class> class Func> struct ProcessField case TILEDB_UINT16: Func<GetType<TILEDB_UINT16>::EltType>::exec(array); break; -#ifdef HAS_TILEDB_BOOL case TILEDB_BOOL: Func<GetType<TILEDB_BOOL>::EltType>::exec(array); break; -#endif case TILEDB_INT64: Func<GetType<TILEDB_INT64>::EltType>::exec(array); break; @@ -761,13 +757,11 @@ bool OGRTileDBLayer::InitFromStorage(tiledb::Context *poCtx, eType = OFTString; fieldValues.push_back(std::make_shared<std::string>()); break; -#ifdef HAS_TILEDB_BOOL case TILEDB_BOOL: eType = bIsSingle ? OFTInteger : OFTIntegerList; eSubType = OFSTBoolean; fieldValues.push_back(std::make_shared<VECTOR_OF_BOOL>()); break; -#endif case TILEDB_DATETIME_DAY: eType = OFTDate; fieldValues.push_back(std::make_shared<std::vector<int64_t>>()); @@ -1298,7 +1292,6 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) { case OFTInteger: { -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { auto &v = *( @@ -1310,9 +1303,7 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) m_query->set_data_buffer(pszFieldName, v); #endif } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { auto &v = *(std::get<std::shared_ptr<std::vector<int16_t>>>( fieldValues)); @@ -1356,7 +1347,6 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) ? static_cast<int>( std::min<uint64_t>(1000, iter->second)) : 8; -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { auto &v = *( @@ -1372,9 +1362,7 @@ void OGRTileDBLayer::SetReadBuffers(bool bGrowVariableSizeArrays) m_query->set_data_buffer(pszFieldName, v); #endif } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { auto &v = *(std::get<std::shared_ptr<std::vector<int16_t>>>( fieldValues)); @@ -1909,16 +1897,13 @@ bool OGRTileDBLayer::SetupQuery(tiledb::QueryCondition *queryCondition) { case OFTInteger: { -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { auto &v = *(std::get<std::shared_ptr<VECTOR_OF_BOOL>>( fieldValues)); v.resize(result.second); } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { auto &v = *(std::get<std::shared_ptr<std::vector<int16_t>>>( @@ -1955,7 +1940,6 @@ bool OGRTileDBLayer::SetupQuery(tiledb::QueryCondition *queryCondition) case OFTIntegerList: { -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { auto &v = *(std::get<std::shared_ptr<VECTOR_OF_BOOL>>( @@ -1969,9 +1953,7 @@ bool OGRTileDBLayer::SetupQuery(tiledb::QueryCondition *queryCondition) v.resize(static_cast<size_t>(result.second)); } } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { auto &v = *(std::get<std::shared_ptr<std::vector<int16_t>>>( @@ -2457,7 +2439,7 @@ std::unique_ptr<tiledb::QueryCondition> OGRTileDBLayer::CreateQueryCondition( CPLAssert(false); return nullptr; } -#ifdef HAS_TILEDB_BOOL + if (m_aeFieldTypes[poColumn->field_index] == TILEDB_BOOL) { if (nVal == 0 || nVal == 1) @@ -2478,9 +2460,7 @@ std::unique_ptr<tiledb::QueryCondition> OGRTileDBLayer::CreateQueryCondition( return nullptr; } } - else -#endif - if (m_aeFieldTypes[poColumn->field_index] == TILEDB_INT16) + else if (m_aeFieldTypes[poColumn->field_index] == TILEDB_INT16) { return CreateQueryConditionForIntType<int16_t>( *(m_ctx.get()), poFieldDefn, nVal, tiledb_op, @@ -3017,7 +2997,6 @@ OGRFeature *OGRTileDBLayer::TranslateCurrentFeature() { case OFTInteger: { -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { const auto &v = *( @@ -3025,9 +3004,7 @@ OGRFeature *OGRTileDBLayer::TranslateCurrentFeature() poFeature->SetFieldSameTypeUnsafe(i, v[m_nOffsetInResultSet]); } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { const auto &v = *(std::get<std::shared_ptr<std::vector<int16_t>>>( @@ -3068,7 +3045,6 @@ OGRFeature *OGRTileDBLayer::TranslateCurrentFeature() case OFTIntegerList: { -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { const auto &v = *( @@ -3085,9 +3061,7 @@ OGRFeature *OGRTileDBLayer::TranslateCurrentFeature() poFeature->SetField(i, static_cast<int>(nEltCount), tmp.data()); } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { const auto &v = *(std::get<std::shared_ptr<std::vector<int16_t>>>( @@ -3586,16 +3560,13 @@ void OGRTileDBLayer::InitializeSchemaAndArray() case OFTInteger: case OFTIntegerList: { -#ifdef HAS_TILEDB_BOOL if (poFieldDefn->GetSubType() == OFSTBoolean) { CreateAttr(TILEDB_BOOL, eType == OFTIntegerList); aFieldValues.push_back( std::make_shared<VECTOR_OF_BOOL>()); } - else -#endif - if (poFieldDefn->GetSubType() == OFSTInt16) + else if (poFieldDefn->GetSubType() == OFSTInt16) { CreateAttr(TILEDB_INT16, eType == OFTIntegerList); aFieldValues.push_back( @@ -3971,13 +3942,10 @@ OGRErr OGRTileDBLayer::ICreateFeature(OGRFeature *poFeature) { const int nVal = bFieldIsValid ? poFeature->GetFieldAsIntegerUnsafe(i) : 0; -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) std::get<std::shared_ptr<VECTOR_OF_BOOL>>(fieldValues) ->push_back(static_cast<uint8_t>(nVal)); - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) std::get<std::shared_ptr<std::vector<int16_t>>>(fieldValues) ->push_back(static_cast<int16_t>(nVal)); else if (m_aeFieldTypes[i] == TILEDB_INT32) @@ -4004,7 +3972,6 @@ OGRErr OGRTileDBLayer::ICreateFeature(OGRFeature *poFeature) poFeature->GetFieldAsIntegerList(i, &nCount); if (anOffsets.empty()) anOffsets.push_back(0); -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { auto &v = *( @@ -4014,9 +3981,7 @@ OGRErr OGRTileDBLayer::ICreateFeature(OGRFeature *poFeature) anOffsets.push_back(anOffsets.back() + nCount * sizeof(v[0])); } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { auto &v = *(std::get<std::shared_ptr<std::vector<int16_t>>>( fieldValues)); @@ -4332,7 +4297,6 @@ void OGRTileDBLayer::FlushArrays() query.set_offsets_buffer(pszFieldName, anOffsets); } -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { auto &v = *(std::get<std::shared_ptr<VECTOR_OF_BOOL>>( @@ -4343,9 +4307,7 @@ void OGRTileDBLayer::FlushArrays() query.set_data_buffer(pszFieldName, v); #endif } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { auto &v = *(std::get<std::shared_ptr<std::vector<int16_t>>>( @@ -4619,12 +4581,9 @@ int OGRTileDBLayer::GetArrowSchema(struct ArrowArrayStream *out_stream, eType == OFTInteger ? out_schema->children[iSchemaChild]->format : out_schema->children[iSchemaChild]->children[0]->format; -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) format = "b"; - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) format = "s"; else if (m_aeFieldTypes[i] == TILEDB_INT32) format = "i"; @@ -5319,14 +5278,11 @@ int OGRTileDBLayer::GetNextArrowArray(struct ArrowArrayStream *stream, { case OFTInteger: { -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { FillBoolArray(psChild, i, abyValidityFromFilters); } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { FillPrimitiveArray<int16_t>(psChild, i, abyValidityFromFilters); @@ -5355,14 +5311,11 @@ int OGRTileDBLayer::GetNextArrowArray(struct ArrowArrayStream *stream, case OFTIntegerList: { -#ifdef HAS_TILEDB_BOOL if (m_aeFieldTypes[i] == TILEDB_BOOL) { FillBoolListArray(psChild, i, abyValidityFromFilters); } - else -#endif - if (m_aeFieldTypes[i] == TILEDB_INT16) + else if (m_aeFieldTypes[i] == TILEDB_INT16) { FillPrimitiveListArray<int16_t>(psChild, i, abyValidityFromFilters); From 097431c49f1505cfb9880e061e1c61316167f146 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 19:51:34 +0200 Subject: [PATCH 163/230] TileDB: cleanups related to requiring TileDB >= 2.11 --- autotest/ogr/ogr_tiledb.py | 130 +++++++++++++----------------- frmts/tiledb/include_tiledb.h | 5 -- frmts/tiledb/tiledbdrivercore.cpp | 4 - frmts/tiledb/tiledbsparse.cpp | 2 - 4 files changed, 58 insertions(+), 83 deletions(-) diff --git a/autotest/ogr/ogr_tiledb.py b/autotest/ogr/ogr_tiledb.py index ae41ed3fb492..c88bef3ffb9b 100644 --- a/autotest/ogr/ogr_tiledb.py +++ b/autotest/ogr/ogr_tiledb.py @@ -578,38 +578,33 @@ def test_ogr_tiledb_basic(nullable, batch_size): assert set(f.GetFID() for f in lyr) == set([1]) # Test OR - has_working_or_filter = ( - gdal.GetDriverByName("TileDB").GetMetadataItem("HAS_TILEDB_WORKING_OR_FILTER") - != "NO" - ) - if has_working_or_filter: - lyr.SetAttributeFilter("intfield = 321 OR intfield = -123456789") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == set([1]) + lyr.SetAttributeFilter("intfield = 321 OR intfield = -123456789") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == set([1]) - lyr.SetAttributeFilter("intfield = -123456789 OR intfield = 321") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == set([1]) + lyr.SetAttributeFilter("intfield = -123456789 OR intfield = 321") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == set([1]) - lyr.SetAttributeFilter("intfield = 321 OR intfield = 123") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == set([]) + lyr.SetAttributeFilter("intfield = 321 OR intfield = 123") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == set([]) - lyr.SetAttributeFilter("(1 = 1) OR intfield = -123456789") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "NONE" - assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) + lyr.SetAttributeFilter("(1 = 1) OR intfield = -123456789") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "NONE" + assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) - lyr.SetAttributeFilter("(1 = 0) OR intfield = -123456789") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "NONE" - assert set(f.GetFID() for f in lyr) == set([1]) + lyr.SetAttributeFilter("(1 = 0) OR intfield = -123456789") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "NONE" + assert set(f.GetFID() for f in lyr) == set([1]) - lyr.SetAttributeFilter("intfield = -123456789 OR (1 = 1)") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "NONE" - assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) + lyr.SetAttributeFilter("intfield = -123456789 OR (1 = 1)") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "NONE" + assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) - lyr.SetAttributeFilter("intfield = -123456789 OR (1 = 0)") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "NONE" - assert set(f.GetFID() for f in lyr) == set([1]) + lyr.SetAttributeFilter("intfield = -123456789 OR (1 = 0)") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "NONE" + assert set(f.GetFID() for f in lyr) == set([1]) # Test NOT lyr.SetAttributeFilter("NOT (intfield = -123456789)") @@ -617,14 +612,13 @@ def test_ogr_tiledb_basic(nullable, batch_size): assert set(f.GetFID() for f in lyr) == (set([3]) if nullable else set([2, 3])) # Test IN - if has_working_or_filter: - lyr.SetAttributeFilter("intfield IN (321, -123456789)") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == set([1]) + lyr.SetAttributeFilter("intfield IN (321, -123456789)") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == set([1]) - lyr.SetAttributeFilter("intfield IN (-123456789, 321)") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == set([1]) + lyr.SetAttributeFilter("intfield IN (-123456789, 321)") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == set([1]) # Test IS NULL / IS NOT NULL lyr.SetAttributeFilter("strfield IS NULL") @@ -688,54 +682,46 @@ def test_ogr_tiledb_basic(nullable, batch_size): assert set(f.GetFID() for f in lyr) == set() # Test IS NULL and OR (for always_false situations) - if has_working_or_filter: - lyr.SetAttributeFilter("intfield IS NULL OR intfieldextra <> 4") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) + lyr.SetAttributeFilter("intfield IS NULL OR intfieldextra <> 4") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) - lyr.SetAttributeFilter("intfield IS NULL OR intfield IS NULL") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == (set([2]) if nullable else set()) + lyr.SetAttributeFilter("intfield IS NULL OR intfield IS NULL") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == (set([2]) if nullable else set()) - lyr.SetAttributeFilter("intfieldextra <> 4 OR intfield IS NULL") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) + lyr.SetAttributeFilter("intfieldextra <> 4 OR intfield IS NULL") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) - lyr.SetAttributeFilter("intfield IS NULL OR intfieldextra = 4") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == (set([2]) if nullable else set()) + lyr.SetAttributeFilter("intfield IS NULL OR intfieldextra = 4") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == (set([2]) if nullable else set()) - lyr.SetAttributeFilter("intfieldextra = 4 OR intfield IS NULL") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == (set([2]) if nullable else set()) + lyr.SetAttributeFilter("intfieldextra = 4 OR intfield IS NULL") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == (set([2]) if nullable else set()) # Test IS NOT NULL and OR (for always_true situations) - if has_working_or_filter: - lyr.SetAttributeFilter("intfield IS NOT NULL OR intfieldextra <> 4") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) - - lyr.SetAttributeFilter("intfield IS NOT NULL OR intfield IS NOT NULL") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == ( - set([1, 3]) if nullable else set([1, 2, 3]) - ) + lyr.SetAttributeFilter("intfield IS NOT NULL OR intfieldextra <> 4") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) + + lyr.SetAttributeFilter("intfield IS NOT NULL OR intfield IS NOT NULL") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == (set([1, 3]) if nullable else set([1, 2, 3])) - lyr.SetAttributeFilter("intfieldextra <> 4 OR intfield IS NOT NULL") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) + lyr.SetAttributeFilter("intfieldextra <> 4 OR intfield IS NOT NULL") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == set([1, 2, 3]) - lyr.SetAttributeFilter("intfield IS NOT NULL OR intfieldextra = 4") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == ( - set([1, 3]) if nullable else set([1, 2, 3]) - ) + lyr.SetAttributeFilter("intfield IS NOT NULL OR intfieldextra = 4") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == (set([1, 3]) if nullable else set([1, 2, 3])) - lyr.SetAttributeFilter("intfieldextra = 4 OR intfield IS NOT NULL") - assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" - assert set(f.GetFID() for f in lyr) == ( - set([1, 3]) if nullable else set([1, 2, 3]) - ) + lyr.SetAttributeFilter("intfieldextra = 4 OR intfield IS NOT NULL") + assert lyr.GetMetadataItem("ATTRIBUTE_FILTER_TRANSLATION", "_DEBUG_") == "WHOLE" + assert set(f.GetFID() for f in lyr) == (set([1, 3]) if nullable else set([1, 2, 3])) tiledb_md = json.loads(lyr.GetMetadata_List("json:TILEDB")[0]) md = tiledb_md["array"]["metadata"] diff --git a/frmts/tiledb/include_tiledb.h b/frmts/tiledb/include_tiledb.h index ce720b7cc1a4..b2a0822d146c 100644 --- a/frmts/tiledb/include_tiledb.h +++ b/frmts/tiledb/include_tiledb.h @@ -42,11 +42,6 @@ #include "tiledb/tiledb_experimental" #endif -#if TILEDB_VERSION_MAJOR > 2 || \ - (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 11) -#define HAS_TILEDB_WORKING_OR_FILTER -#endif - #if TILEDB_VERSION_MAJOR > 2 || \ (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 14) #define HAS_TILEDB_WORKING_UTF8_STRING_FILTER diff --git a/frmts/tiledb/tiledbdrivercore.cpp b/frmts/tiledb/tiledbdrivercore.cpp index 4c5fddddc512..35a0e83a05e0 100644 --- a/frmts/tiledb/tiledbdrivercore.cpp +++ b/frmts/tiledb/tiledbdrivercore.cpp @@ -286,10 +286,6 @@ void TileDBDriverSetCommonMetadata(GDALDriver *poDriver) poDriver->SetMetadataItem("HAS_TILEDB_WORKING_UTF8_STRING_FILTER", "NO"); #endif -#if !defined(HAS_TILEDB_WORKING_OR_FILTER) - poDriver->SetMetadataItem("HAS_TILEDB_WORKING_OR_FILTER", "NO"); -#endif - poDriver->pfnIdentify = TileDBDriverIdentifySimplified; poDriver->SetMetadataItem(GDAL_DCAP_OPEN, "YES"); poDriver->SetMetadataItem(GDAL_DCAP_CREATE, "YES"); diff --git a/frmts/tiledb/tiledbsparse.cpp b/frmts/tiledb/tiledbsparse.cpp index 823dffd20f35..a73c7ec86ea9 100644 --- a/frmts/tiledb/tiledbsparse.cpp +++ b/frmts/tiledb/tiledbsparse.cpp @@ -2679,7 +2679,6 @@ OGRTileDBLayer::CreateQueryCondition(const swq_expr_node *poNode, return right; } -#ifdef HAS_TILEDB_WORKING_OR_FILTER // A OR B else if (poNode->eNodeType == SNT_OPERATION && poNode->nOperation == SWQ_OR && poNode->nSubExprCount == 2) @@ -2761,7 +2760,6 @@ OGRTileDBLayer::CreateQueryCondition(const swq_expr_node *poNode, bAlwaysFalse = true; return cond; } -#endif // field_name =/<>/</>/<=/>= constant (or the reverse) else if (poNode->eNodeType == SNT_OPERATION && From bdd2cbfa4df20a0f1f4d1b1e81553e4c67cb4741 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 19:53:27 +0200 Subject: [PATCH 164/230] TileDB: cleanups related to requiring TileDB >= 2.14 --- frmts/tiledb/include_tiledb.h | 5 ----- frmts/tiledb/tiledbdrivercore.cpp | 8 -------- frmts/tiledb/tiledbheaders.h | 4 ---- frmts/tiledb/tiledbsparse.cpp | 21 --------------------- 4 files changed, 38 deletions(-) diff --git a/frmts/tiledb/include_tiledb.h b/frmts/tiledb/include_tiledb.h index b2a0822d146c..d841f26c28b8 100644 --- a/frmts/tiledb/include_tiledb.h +++ b/frmts/tiledb/include_tiledb.h @@ -42,11 +42,6 @@ #include "tiledb/tiledb_experimental" #endif -#if TILEDB_VERSION_MAJOR > 2 || \ - (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 14) -#define HAS_TILEDB_WORKING_UTF8_STRING_FILTER -#endif - #if TILEDB_VERSION_MAJOR > 2 || \ (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 15) #define HAS_TILEDB_DIMENSION_LABEL diff --git a/frmts/tiledb/tiledbdrivercore.cpp b/frmts/tiledb/tiledbdrivercore.cpp index 35a0e83a05e0..180f040047e8 100644 --- a/frmts/tiledb/tiledbdrivercore.cpp +++ b/frmts/tiledb/tiledbdrivercore.cpp @@ -218,11 +218,7 @@ void TileDBDriverSetCommonMetadata(GDALDriver *poDriver) "array at this timestamp, the timestamp should be > 0'/>" " <Option name='TILEDB_STRING_TYPE' type='string-select' " "description='Which TileDB type to create string attributes' " -#ifdef HAS_TILEDB_WORKING_UTF8_STRING_FILTER "default='UTF8'" -#else - "default='ASCII'" -#endif ">" " <Value>UTF8</Value>" " <Value>ASCII</Value>" @@ -282,10 +278,6 @@ void TileDBDriverSetCommonMetadata(GDALDriver *poDriver) "</MultiDimArrayCreationOptionList>"); #endif -#if !defined(HAS_TILEDB_WORKING_UTF8_STRING_FILTER) - poDriver->SetMetadataItem("HAS_TILEDB_WORKING_UTF8_STRING_FILTER", "NO"); -#endif - poDriver->pfnIdentify = TileDBDriverIdentifySimplified; poDriver->SetMetadataItem(GDAL_DCAP_OPEN, "YES"); poDriver->SetMetadataItem(GDAL_DCAP_CREATE, "YES"); diff --git a/frmts/tiledb/tiledbheaders.h b/frmts/tiledb/tiledbheaders.h index ccb8ecbbb08f..0068ed19e292 100644 --- a/frmts/tiledb/tiledbheaders.h +++ b/frmts/tiledb/tiledbheaders.h @@ -333,11 +333,7 @@ class OGRTileDBLayer final : public OGRLayer, uint64_t m_nRowCountInResultSet = 0; int m_nUseOptimizedAttributeFilter = -1; // uninitialized -#ifdef HAS_TILEDB_WORKING_UTF8_STRING_FILTER tiledb_datatype_t m_eTileDBStringType = TILEDB_STRING_UTF8; -#else - tiledb_datatype_t m_eTileDBStringType = TILEDB_STRING_ASCII; -#endif std::string m_osXDim = "_X"; std::string m_osYDim = "_Y"; diff --git a/frmts/tiledb/tiledbsparse.cpp b/frmts/tiledb/tiledbsparse.cpp index a73c7ec86ea9..8933c54a6578 100644 --- a/frmts/tiledb/tiledbsparse.cpp +++ b/frmts/tiledb/tiledbsparse.cpp @@ -2540,11 +2540,6 @@ std::unique_ptr<tiledb::QueryCondition> OGRTileDBLayer::CreateQueryCondition( CPLAssert(false); return nullptr; } -#if !defined(HAS_TILEDB_WORKING_UTF8_STRING_FILTER) - if (m_schema->attribute(poFieldDefn->GetNameRef()).type() != - TILEDB_STRING_ASCII) - return nullptr; -#endif return std::make_unique<tiledb::QueryCondition>( tiledb::QueryCondition::create( *(m_ctx.get()), poFieldDefn->GetNameRef(), @@ -2781,14 +2776,6 @@ OGRTileDBLayer::CreateQueryCondition(const swq_expr_node *poNode, { const OGRFieldDefn *poFieldDefn = m_poFeatureDefn->GetFieldDefn(poNode->papoSubExpr[0]->field_index); -#if !defined(HAS_TILEDB_WORKING_UTF8_STRING_FILTER) - if (poFieldDefn->GetType() == OFTString && - m_schema->attribute(poFieldDefn->GetNameRef()).type() != - TILEDB_STRING_ASCII) - { - return nullptr; - } -#endif if (!poFieldDefn->IsNullable()) { bAlwaysFalse = true; @@ -2810,14 +2797,6 @@ OGRTileDBLayer::CreateQueryCondition(const swq_expr_node *poNode, { const OGRFieldDefn *poFieldDefn = m_poFeatureDefn->GetFieldDefn( poNode->papoSubExpr[0]->papoSubExpr[0]->field_index); -#if !defined(HAS_TILEDB_WORKING_UTF8_STRING_FILTER) - if (poFieldDefn->GetType() == OFTString && - m_schema->attribute(poFieldDefn->GetNameRef()).type() != - TILEDB_STRING_ASCII) - { - return nullptr; - } -#endif if (!poFieldDefn->IsNullable()) { bAlwaysTrue = true; From bcf261528e9b94fb928aa548f5e1c524281b171f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 24 Apr 2024 00:43:08 +0200 Subject: [PATCH 165/230] TileDB: make cppcheck happy --- frmts/tiledb/tiledbmultidim.h | 6 ++++-- frmts/tiledb/tiledbmultidimarray.cpp | 7 ++++--- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/frmts/tiledb/tiledbmultidim.h b/frmts/tiledb/tiledbmultidim.h index 65af731fb66b..604e93bdcf52 100644 --- a/frmts/tiledb/tiledbmultidim.h +++ b/frmts/tiledb/tiledbmultidim.h @@ -596,7 +596,8 @@ class TileDBArrayGroup final : public GDALGroup std::vector<std::shared_ptr<GDALMDArray>> m_apoArrays; public: - TileDBArrayGroup(const std::vector<std::shared_ptr<GDALMDArray>> &apoArrays) + explicit TileDBArrayGroup( + const std::vector<std::shared_ptr<GDALMDArray>> &apoArrays) : GDALGroup(std::string(), "/"), m_apoArrays(apoArrays) { } @@ -624,7 +625,8 @@ class TileDBMultiDimDataset final : public GDALDataset std::shared_ptr<GDALGroup> m_poRG{}; public: - TileDBMultiDimDataset(const std::shared_ptr<GDALGroup> &poRG) : m_poRG(poRG) + explicit TileDBMultiDimDataset(const std::shared_ptr<GDALGroup> &poRG) + : m_poRG(poRG) { } diff --git a/frmts/tiledb/tiledbmultidimarray.cpp b/frmts/tiledb/tiledbmultidimarray.cpp index 09649188d551..a1b92634f349 100644 --- a/frmts/tiledb/tiledbmultidimarray.cpp +++ b/frmts/tiledb/tiledbmultidimarray.cpp @@ -1408,9 +1408,10 @@ std::shared_ptr<TileDBArray> TileDBArray::CreateOnDisk( { const auto osSanitizedName = TileDBSharedResource::SanitizeNameForPath(osName); - if (osSanitizedName.empty() || osName.find("./") == 0 || - osName.find("../") == 0 || osName.find(".\\") == 0 || - osName.find("..\\") == 0) + if (osSanitizedName.empty() || STARTS_WITH(osName.c_str(), "./") || + STARTS_WITH(osName.c_str(), "../") || + STARTS_WITH(osName.c_str(), ".\\") || + STARTS_WITH(osName.c_str(), "..\\")) { CPLError(CE_Failure, CPLE_AppDefined, "Invalid array name"); return nullptr; From 848608b4dc2d8c12f83839c422dc5a8da3c481de Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 23 Apr 2024 19:56:38 +0200 Subject: [PATCH 166/230] TileDB: cleanups related to requiring TileDB >= 2.15 --- autotest/gdrivers/tiledb_multidim.py | 13 +------------ frmts/tiledb/include_tiledb.h | 9 --------- frmts/tiledb/tiledbcommon.cpp | 13 +------------ frmts/tiledb/tiledbdrivercore.cpp | 2 -- frmts/tiledb/tiledbheaders.h | 3 +-- frmts/tiledb/tiledbmultidim.cpp | 4 ---- frmts/tiledb/tiledbmultidim.h | 4 ---- frmts/tiledb/tiledbmultidimarray.cpp | 4 ---- frmts/tiledb/tiledbmultidimattribute.cpp | 4 ---- frmts/tiledb/tiledbmultidimattributeholder.cpp | 4 ---- frmts/tiledb/tiledbmultidimgroup.cpp | 4 ---- 11 files changed, 3 insertions(+), 61 deletions(-) diff --git a/autotest/gdrivers/tiledb_multidim.py b/autotest/gdrivers/tiledb_multidim.py index 774a4c29f299..6e163a05d4e2 100755 --- a/autotest/gdrivers/tiledb_multidim.py +++ b/autotest/gdrivers/tiledb_multidim.py @@ -38,18 +38,7 @@ from osgeo import gdal, osr - -def has_tiledb_multidim(): - drv = gdal.GetDriverByName("TileDB") - if drv is None: - return False - return drv.GetMetadataItem(gdal.DCAP_CREATE_MULTIDIMENSIONAL) == "YES" - - -pytestmark = [ - pytest.mark.require_driver("TileDB"), - pytest.mark.skipif(not has_tiledb_multidim(), reason="TileDB >= 2.15 required"), -] +pytestmark = pytest.mark.require_driver("TileDB") def test_tiledb_multidim_basic(): diff --git a/frmts/tiledb/include_tiledb.h b/frmts/tiledb/include_tiledb.h index d841f26c28b8..53efb201735e 100644 --- a/frmts/tiledb/include_tiledb.h +++ b/frmts/tiledb/include_tiledb.h @@ -42,15 +42,6 @@ #include "tiledb/tiledb_experimental" #endif -#if TILEDB_VERSION_MAJOR > 2 || \ - (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 15) -#define HAS_TILEDB_DIMENSION_LABEL -#endif - -#ifdef HAS_TILEDB_DIMENSION_LABEL -#define HAS_TILEDB_MULTIDIM -#endif - #if TILEDB_VERSION_MAJOR > 2 || \ (TILEDB_VERSION_MAJOR == 2 && TILEDB_VERSION_MINOR >= 21) #define HAS_TILEDB_GEOM_WKB_WKT diff --git a/frmts/tiledb/tiledbcommon.cpp b/frmts/tiledb/tiledbcommon.cpp index eb38195c9424..010ab1345f13 100644 --- a/frmts/tiledb/tiledbcommon.cpp +++ b/frmts/tiledb/tiledbcommon.cpp @@ -156,20 +156,17 @@ int TileDBDataset::Identify(GDALOpenInfo *poOpenInfo) eType == tiledb::Object::Type::Group) return true; } -#ifdef HAS_TILEDB_MULTIDIM + if ((poOpenInfo->nOpenFlags & GDAL_OF_MULTIDIM_RASTER) != 0) { if (eType == tiledb::Object::Type::Array || eType == tiledb::Object::Type::Group) return true; } -#endif if ((poOpenInfo->nOpenFlags & GDAL_OF_RASTER) != 0) { -#ifdef HAS_TILEDB_MULTIDIM if (eType == tiledb::Object::Type::Group) return GDAL_IDENTIFY_UNKNOWN; -#endif return eType == tiledb::Object::Type::Array; } } @@ -231,12 +228,10 @@ GDALDataset *TileDBDataset::Open(GDALOpenInfo *poOpenInfo) } else { -#ifdef HAS_TILEDB_MULTIDIM if ((poOpenInfo->nOpenFlags & GDAL_OF_MULTIDIM_RASTER) != 0) { return TileDBDataset::OpenMultiDimensional(poOpenInfo); } -#endif const char *pszConfig = CSLFetchNameValue( poOpenInfo->papszOpenOptions, "TILEDB_CONFIG"); @@ -262,7 +257,6 @@ GDALDataset *TileDBDataset::Open(GDALOpenInfo *poOpenInfo) { return OGRTileDBDataset::Open(poOpenInfo, eType); } -#ifdef HAS_TILEDB_MULTIDIM else if ((poOpenInfo->nOpenFlags & GDAL_OF_RASTER) != 0 && eType == tiledb::Object::Type::Group) { @@ -311,7 +305,6 @@ GDALDataset *TileDBDataset::Open(GDALOpenInfo *poOpenInfo) } return nullptr; } -#endif tiledb::ArraySchema schema(oCtx, osPath); @@ -364,7 +357,6 @@ GDALDataset *TileDBDataset::CreateCopy(const char *pszFilename, void *pProgressData) { -#ifdef HAS_TILEDB_MULTIDIM if (poSrcDS->GetRootGroup()) { auto poDrv = GDALDriver::FromHandle(GDALGetDriverByName("TileDB")); @@ -375,7 +367,6 @@ GDALDataset *TileDBDataset::CreateCopy(const char *pszFilename, pProgressData); } } -#endif try { @@ -413,9 +404,7 @@ void GDALRegister_TileDB() poDriver->pfnCreate = TileDBDataset::Create; poDriver->pfnCreateCopy = TileDBDataset::CreateCopy; poDriver->pfnDelete = TileDBDataset::Delete; -#ifdef HAS_TILEDB_MULTIDIM poDriver->pfnCreateMultiDimensional = TileDBDataset::CreateMultiDimensional; -#endif GetGDALDriverManager()->RegisterDriver(poDriver); } diff --git a/frmts/tiledb/tiledbdrivercore.cpp b/frmts/tiledb/tiledbdrivercore.cpp index 180f040047e8..9d85d2ea5ec6 100644 --- a/frmts/tiledb/tiledbdrivercore.cpp +++ b/frmts/tiledb/tiledbdrivercore.cpp @@ -228,7 +228,6 @@ void TileDBDriverSetCommonMetadata(GDALDriver *poDriver) "</LayerCreationOptionList>"); // clang-format on -#ifdef HAS_TILEDB_MULTIDIM poDriver->SetMetadataItem(GDAL_DCAP_MULTIDIM_RASTER, "YES"); poDriver->SetMetadataItem(GDAL_DCAP_CREATE_MULTIDIMENSIONAL, "YES"); @@ -276,7 +275,6 @@ void TileDBDriverSetCommonMetadata(GDALDriver *poDriver) "description='Whether the array should be only in-memory. Useful to " "create an indexing variable that is serialized as a dimension label'/>" "</MultiDimArrayCreationOptionList>"); -#endif poDriver->pfnIdentify = TileDBDriverIdentifySimplified; poDriver->SetMetadataItem(GDAL_DCAP_OPEN, "YES"); diff --git a/frmts/tiledb/tiledbheaders.h b/frmts/tiledb/tiledbheaders.h index 0068ed19e292..536c6a74c7c7 100644 --- a/frmts/tiledb/tiledbheaders.h +++ b/frmts/tiledb/tiledbheaders.h @@ -190,13 +190,12 @@ class TileDBDataset : public GDALPamDataset char **papszOptions, GDALProgressFunc pfnProgress, void *pProgressData); -#ifdef HAS_TILEDB_MULTIDIM + static GDALDataset *OpenMultiDimensional(GDALOpenInfo *); static GDALDataset * CreateMultiDimensional(const char *pszFilename, CSLConstList papszRootGroupOptions, CSLConstList papszOptions); -#endif }; /************************************************************************/ diff --git a/frmts/tiledb/tiledbmultidim.cpp b/frmts/tiledb/tiledbmultidim.cpp index b95d939963b5..b2841eed50d6 100644 --- a/frmts/tiledb/tiledbmultidim.cpp +++ b/frmts/tiledb/tiledbmultidim.cpp @@ -28,8 +28,6 @@ #include "tiledbmultidim.h" -#ifdef HAS_TILEDB_MULTIDIM - /************************************************************************/ /* TileDBSingleArrayGroup::SanitizeNameForPath() */ /************************************************************************/ @@ -213,5 +211,3 @@ TileDBDataset::CreateMultiDimensional(const char *pszFilename, poDS->SetDescription(pszFilename); return poDS; } - -#endif // HAS_TILEDB_MULTIDIM diff --git a/frmts/tiledb/tiledbmultidim.h b/frmts/tiledb/tiledbmultidim.h index 604e93bdcf52..391ba1880d22 100644 --- a/frmts/tiledb/tiledbmultidim.h +++ b/frmts/tiledb/tiledbmultidim.h @@ -31,8 +31,6 @@ #include "tiledbheaders.h" -#ifdef HAS_TILEDB_MULTIDIM - constexpr const char *CRS_ATTRIBUTE_NAME = "_CRS"; constexpr const char *UNIT_ATTRIBUTE_NAME = "_UNIT"; constexpr const char *DIM_TYPE_ATTRIBUTE_NAME = "_DIM_TYPE"; @@ -636,6 +634,4 @@ class TileDBMultiDimDataset final : public GDALDataset } }; -#endif // HAS_TILEDB_MULTIDIM - #endif // TILEDBMULTIDIM_H_INCLUDED diff --git a/frmts/tiledb/tiledbmultidimarray.cpp b/frmts/tiledb/tiledbmultidimarray.cpp index a1b92634f349..2d3d5f6ab847 100644 --- a/frmts/tiledb/tiledbmultidimarray.cpp +++ b/frmts/tiledb/tiledbmultidimarray.cpp @@ -31,8 +31,6 @@ #include <algorithm> #include <limits> -#ifdef HAS_TILEDB_MULTIDIM - /************************************************************************/ /* TileDBArray::TileDBArray() */ /************************************************************************/ @@ -1576,5 +1574,3 @@ CSLConstList TileDBArray::GetStructuralInfo() const { return m_aosStructuralInfo.List(); } - -#endif // HAS_TILEDB_MULTIDIM diff --git a/frmts/tiledb/tiledbmultidimattribute.cpp b/frmts/tiledb/tiledbmultidimattribute.cpp index d111ce62ddb4..f84125ab6e3e 100644 --- a/frmts/tiledb/tiledbmultidimattribute.cpp +++ b/frmts/tiledb/tiledbmultidimattribute.cpp @@ -29,8 +29,6 @@ #include "tiledbmultidim.h" #include "memmultidim.h" -#ifdef HAS_TILEDB_MULTIDIM - /************************************************************************/ /* TileDBAttribute::TileDBAttribute() */ /************************************************************************/ @@ -199,5 +197,3 @@ bool TileDBAttribute::IWrite(const GUInt64 *arrayStartIdx, const size_t *count, return poParent->PutMetadata(m_osName, tiledb_dt, nValues, oRawResult.data()); } - -#endif // HAS_TILEDB_MULTIDIM diff --git a/frmts/tiledb/tiledbmultidimattributeholder.cpp b/frmts/tiledb/tiledbmultidimattributeholder.cpp index 2760f22ff50c..71dfbbcafe96 100644 --- a/frmts/tiledb/tiledbmultidimattributeholder.cpp +++ b/frmts/tiledb/tiledbmultidimattributeholder.cpp @@ -28,8 +28,6 @@ #include "tiledbmultidim.h" -#ifdef HAS_TILEDB_MULTIDIM - /************************************************************************/ /* TileDBAttributeHolder::~TileDBAttributeHolder() */ /************************************************************************/ @@ -300,5 +298,3 @@ bool TileDBAttributeHolder::PutMetadata(const std::string &key, return false; } } - -#endif diff --git a/frmts/tiledb/tiledbmultidimgroup.cpp b/frmts/tiledb/tiledbmultidimgroup.cpp index aca83e3963a8..00cca02fe46d 100644 --- a/frmts/tiledb/tiledbmultidimgroup.cpp +++ b/frmts/tiledb/tiledbmultidimgroup.cpp @@ -28,8 +28,6 @@ #include "tiledbmultidim.h" -#ifdef HAS_TILEDB_MULTIDIM - #include "memmultidim.h" /************************************************************************/ @@ -574,5 +572,3 @@ bool TileDBGroup::DeleteAttribute(const std::string &osName, { return DeleteAttributeImpl(osName, papszOptions); } - -#endif // HAS_TILEDB_MULTIDIM From c95c689026a0025e2f4c34bb9d648123b9f93ff3 Mon Sep 17 00:00:00 2001 From: Alessandro Pasotti <elpaso@itopen.it> Date: Wed, 24 Apr 2024 14:19:57 +0200 Subject: [PATCH 167/230] [argparse] Return argument reference from add methods This can be used for further configuration of the returned arguments. --- apps/gdalargumentparser.cpp | 32 +++++++++++++++++++------------- apps/gdalargumentparser.h | 16 ++++++++-------- 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/apps/gdalargumentparser.cpp b/apps/gdalargumentparser.cpp index 8870b03805d5..2dffe4a50780 100644 --- a/apps/gdalargumentparser.cpp +++ b/apps/gdalargumentparser.cpp @@ -107,7 +107,7 @@ void GDALArgumentParser::display_error_and_usage(const std::exception &err) /* add_quiet_argument() */ /************************************************************************/ -void GDALArgumentParser::add_quiet_argument(bool *pVar) +Argument &GDALArgumentParser::add_quiet_argument(bool *pVar) { auto &arg = this->add_argument("-q", "--quiet") @@ -117,15 +117,17 @@ void GDALArgumentParser::add_quiet_argument(bool *pVar) "output.")); if (pVar) arg.store_into(*pVar); + + return arg; } /************************************************************************/ /* add_input_format_argument() */ /************************************************************************/ -void GDALArgumentParser::add_input_format_argument(CPLStringList *pvar) +Argument &GDALArgumentParser::add_input_format_argument(CPLStringList *pvar) { - add_argument("-if") + return add_argument("-if") .append() .metavar("<format>") .action( @@ -149,22 +151,23 @@ void GDALArgumentParser::add_input_format_argument(CPLStringList *pvar) /* add_output_format_argument() */ /************************************************************************/ -void GDALArgumentParser::add_output_format_argument(std::string &var) +Argument &GDALArgumentParser::add_output_format_argument(std::string &var) { auto &arg = add_argument("-of") .metavar("<output_format>") .store_into(var) .help(_("Output format.")); add_hidden_alias_for(arg, "-f"); + return arg; } /************************************************************************/ /* add_creation_options_argument() */ /************************************************************************/ -void GDALArgumentParser::add_creation_options_argument(CPLStringList &var) +Argument &GDALArgumentParser::add_creation_options_argument(CPLStringList &var) { - add_argument("-co") + return add_argument("-co") .metavar("<NAME>=<VALUE>") .append() .action([&var](const std::string &s) { var.AddString(s.c_str()); }) @@ -175,9 +178,10 @@ void GDALArgumentParser::add_creation_options_argument(CPLStringList &var) /* add_metadata_item_options_argument() */ /************************************************************************/ -void GDALArgumentParser::add_metadata_item_options_argument(CPLStringList &var) +Argument & +GDALArgumentParser::add_metadata_item_options_argument(CPLStringList &var) { - add_argument("-mo") + return add_argument("-mo") .metavar("<NAME>=<VALUE>") .append() .action([&var](const std::string &s) { var.AddString(s.c_str()); }) @@ -188,16 +192,16 @@ void GDALArgumentParser::add_metadata_item_options_argument(CPLStringList &var) /* add_open_options_argument() */ /************************************************************************/ -void GDALArgumentParser::add_open_options_argument(CPLStringList &var) +Argument &GDALArgumentParser::add_open_options_argument(CPLStringList &var) { - add_open_options_argument(&var); + return add_open_options_argument(&var); } /************************************************************************/ /* add_open_options_argument() */ /************************************************************************/ -void GDALArgumentParser::add_open_options_argument(CPLStringList *pvar) +Argument &GDALArgumentParser::add_open_options_argument(CPLStringList *pvar) { auto &arg = add_argument("-oo") .metavar("<NAME>=<VALUE>") @@ -208,15 +212,17 @@ void GDALArgumentParser::add_open_options_argument(CPLStringList *pvar) arg.action([pvar](const std::string &s) { pvar->AddString(s.c_str()); }); } + + return arg; } /************************************************************************/ /* add_output_type_argument() */ /************************************************************************/ -void GDALArgumentParser::add_output_type_argument(GDALDataType &eDT) +Argument &GDALArgumentParser::add_output_type_argument(GDALDataType &eDT) { - add_argument("-ot") + return add_argument("-ot") .metavar("Byte|Int8|[U]Int{16|32|64}|CInt{16|32}|[C]Float{32|64}") .action( [&eDT](const std::string &s) diff --git a/apps/gdalargumentparser.h b/apps/gdalargumentparser.h index c5b545748bf2..6a4929bd4e7b 100644 --- a/apps/gdalargumentparser.h +++ b/apps/gdalargumentparser.h @@ -64,28 +64,28 @@ class GDALArgumentParser : public ArgumentParser void display_error_and_usage(const std::exception &err); //! Add -q/--quiet argument, and store its value in *pVar (if pVar not null) - void add_quiet_argument(bool *pVar); + Argument &add_quiet_argument(bool *pVar); //! Add "-if format_name" argument for input format, and store its value into *pvar. - void add_input_format_argument(CPLStringList *pvar); + Argument &add_input_format_argument(CPLStringList *pvar); //! Add "-of format_name" argument for output format, and store its value into var. - void add_output_format_argument(std::string &var); + Argument &add_output_format_argument(std::string &var); //! Add "-co KEY=VALUE" argument for creation options, and store its value into var. - void add_creation_options_argument(CPLStringList &var); + Argument &add_creation_options_argument(CPLStringList &var); //! Add "-mo KEY=VALUE" argument for metadata item options, and store its value into var. - void add_metadata_item_options_argument(CPLStringList &var); + Argument &add_metadata_item_options_argument(CPLStringList &var); //! Add "-oo KEY=VALUE" argument for open options, and store its value into var. - void add_open_options_argument(CPLStringList &var); + Argument &add_open_options_argument(CPLStringList &var); //! Add "-oo KEY=VALUE" argument for open options, and store its value into *pvar. - void add_open_options_argument(CPLStringList *pvar); + Argument &add_open_options_argument(CPLStringList *pvar); //! Add "-ot data_type" argument for output type, and store its value into eDT. - void add_output_type_argument(GDALDataType &eDT); + Argument &add_output_type_argument(GDALDataType &eDT); //! Parse command line arguments, without the initial program name. void parse_args_without_binary_name(CSLConstList papszArgs); From 40884e83eecb383c427200ce0d99898f5c368ba3 Mon Sep 17 00:00:00 2001 From: Alessandro Pasotti <elpaso@itopen.it> Date: Wed, 24 Apr 2024 19:14:02 +0200 Subject: [PATCH 168/230] gdal_contour: use GDALArgumentParser (#9739) --- apps/gdal_contour.cpp | 501 +++++++++++++++++++++--------------------- 1 file changed, 248 insertions(+), 253 deletions(-) diff --git a/apps/gdal_contour.cpp b/apps/gdal_contour.cpp index 5f4fd9c9952f..a86f28530671 100644 --- a/apps/gdal_contour.cpp +++ b/apps/gdal_contour.cpp @@ -33,45 +33,169 @@ #include "gdal_version.h" #include "gdal.h" #include "gdal_alg.h" +#include "gdalargumentparser.h" #include "ogr_api.h" #include "ogr_srs_api.h" #include "commonutils.h" /************************************************************************/ -/* ArgIsNumericContour() */ +/* GDALContourOptions */ /************************************************************************/ -static bool ArgIsNumericContour(const char *pszArg) - +struct GDALContourOptions { - return CPLGetValueType(pszArg) != CPL_VALUE_STRING; -} + int nBand = 1; + double dfInterval = 0.0; + double dfNoData = 0.0; + double dfOffset = 0.0; + double dfExpBase = 0.0; + bool b3D = false; + bool bPolygonize = false; + bool bNoDataSet = false; + bool bIgnoreNoData = false; + std::string osNewLayerName = "contour"; + std::string osFormat; + std::string osElevAttrib; + std::string osElevAttribMin; + std::string osElevAttribMax; + std::vector<double> adfFixedLevels; + CPLStringList aosOpenOptions; + CPLStringList aosCreationOptions; + bool bQuiet = false; + std::string aosDestFilename; + std::string aosSrcFilename; +}; /************************************************************************/ -/* Usage() */ +/* GDALContourAppOptionsGetParser() */ /************************************************************************/ -static void Usage(bool bIsError, const char *pszErrorMsg = nullptr) - +static std::unique_ptr<GDALArgumentParser> +GDALContourAppOptionsGetParser(GDALContourOptions *psOptions) { - fprintf( - bIsError ? stderr : stdout, - "Usage: gdal_contour [--help] [--help-general]\n" - " [-b <band>] [-a <attribute_name>] [-amin " - "<attribute_name>] [-amax <attribute_name>]\n" - " [-3d] [-inodata] [-snodata <n>] [-f <formatname>] " - "[-i <interval>]\n" - " [-dsco <NAME>=<VALUE>]... " - "[-lco <NAME>=<VALUE>]...\n" - " [-off <offset>] [-fl <level> <level>...] [-e " - "<exp_base>]\n" - " [-nln <outlayername>] [-q] [-p]\n" - " <src_filename> <dst_filename>\n"); - - if (pszErrorMsg != nullptr) - fprintf(stderr, "\nFAILURE: %s\n", pszErrorMsg); - - exit(bIsError ? 1 : 0); + auto argParser = std::make_unique<GDALArgumentParser>( + "gdal_contour", /* bForBinary */ true); + + argParser->add_description(_("Creates contour lines from a raster file.")); + argParser->add_epilog(_( + "For more details, consult the full documentation for the gdal_contour " + "utility: http://gdal.org/gdal_contour.html")); + + argParser->add_argument("-b") + .metavar("<name>") + .default_value(1) + .nargs(1) + .scan<'i', int>() + .store_into(psOptions->nBand) + .help(_("Select an input band band containing the DEM data.")); + + argParser->add_argument("-a") + .metavar("<name>") + .store_into(psOptions->osElevAttrib) + .help(_("Provides a name for the attribute in which to put the " + "elevation.")); + + argParser->add_argument("-amin") + .metavar("<name>") + .store_into(psOptions->osElevAttribMin) + .help(_("Provides a name for the attribute in which to put the minimum " + "elevation.")); + + argParser->add_argument("-amax") + .metavar("<name>") + .store_into(psOptions->osElevAttribMax) + .help(_("Provides a name for the attribute in which to put the maximum " + "elevation.")); + + argParser->add_argument("-3d") + .flag() + .store_into(psOptions->b3D) + .help(_("Force production of 3D vectors instead of 2D.")); + + argParser->add_argument("-inodata") + .flag() + .store_into(psOptions->bIgnoreNoData) + .help(_("Ignore any nodata value implied in the dataset - treat all " + "values as valid.")); + + argParser->add_argument("-snodata") + .metavar("<value>") + .scan<'g', double>() + .action( + [psOptions](const auto &d) + { + psOptions->bNoDataSet = true; + psOptions->dfNoData = CPLAtofM(d.c_str()); + }) + .help(_("Input pixel value to treat as \"nodata\".")); + + argParser->add_output_format_argument(psOptions->osFormat); + + argParser->add_argument("-dsco") + .metavar("<NAME>=<VALUE>") + .append() + .action([psOptions](const std::string &s) + { psOptions->aosOpenOptions.AddString(s.c_str()); }) + .help(_("Dataset creation option (format specific).")); + + argParser->add_argument("-lco") + .metavar("<NAME>=<VALUE>") + .append() + .action([psOptions](const std::string &s) + { psOptions->aosCreationOptions.AddString(s.c_str()); }) + .help(_("Layer creation option (format specific).")); + + auto &group = argParser->add_mutually_exclusive_group(); + + group.add_argument("-i") + .metavar("<interval>") + .scan<'g', double>() + .store_into(psOptions->dfInterval) + .help(_("Elevation interval between contours.")); + + group.add_argument("-fl") + .metavar("<level>") + .nargs(argparse::nargs_pattern::at_least_one) + .scan<'g', double>() + .action([psOptions](const std::string &s) + { psOptions->adfFixedLevels.push_back(CPLAtof(s.c_str())); }) + .help(_("Name one or more \"fixed levels\" to extract.")); + + group.add_argument("-e") + .metavar("<base>") + .scan<'g', double>() + .store_into(psOptions->dfExpBase) + .help(_("Generate levels on an exponential scale: base ^ k, for k an " + "integer.")); + + argParser->add_argument("-off") + .metavar("<offset>") + .scan<'g', double>() + .store_into(psOptions->dfOffset) + .help(_("Offset from zero relative to which to interpret intervals.")); + + argParser->add_argument("-nln") + .metavar("<name>") + .store_into(psOptions->osNewLayerName) + .help(_("Provide a name for the output vector layer. Defaults to " + "\"contour\".")); + + argParser->add_argument("-p") + .flag() + .store_into(psOptions->bPolygonize) + .help(_("Generate contour polygons instead of lines.")); + + argParser->add_quiet_argument(&psOptions->bQuiet); + + argParser->add_argument("src_filename") + .store_into(psOptions->aosSrcFilename) + .help("The source raster file."); + + argParser->add_argument("dst_filename") + .store_into(psOptions->aosDestFilename) + .help("The destination vector file."); + + return argParser; } static void CreateElevAttrib(const char *pszElevAttrib, OGRLayerH hLayer) @@ -89,225 +213,90 @@ static void CreateElevAttrib(const char *pszElevAttrib, OGRLayerH hLayer) /* main() */ /************************************************************************/ -#define CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(nExtraArg) \ - do \ - { \ - if (i + nExtraArg >= argc) \ - Usage(true, CPLSPrintf("%s option requires %d argument(s)", \ - argv[i], nExtraArg)); \ - } while (false) - MAIN_START(argc, argv) { - bool b3D = false; - int bNoDataSet = FALSE; - bool bIgnoreNoData = false; - int nBandIn = 1; - double dfInterval = 0.0; - double dfNoData = 0.0; - double dfOffset = 0.0; - double dfExpBase = 0.0; - const char *pszSrcFilename = nullptr; - const char *pszDstFilename = nullptr; - const char *pszElevAttrib = nullptr; - const char *pszElevAttribMin = nullptr; - const char *pszElevAttribMax = nullptr; - const char *pszFormat = nullptr; - char **papszDSCO = nullptr; - char **papszLCO = nullptr; - double adfFixedLevels[1000]; - int nFixedLevelCount = 0; - const char *pszNewLayerName = "contour"; - bool bQuiet = false; + GDALProgressFunc pfnProgress = nullptr; - bool bPolygonize = false; - // Check that we are running against at least GDAL 1.4. - // Note to developers: if we use newer API, please change the requirement. - if (atoi(GDALVersionInfo("VERSION_NUM")) < 1400) - { - fprintf(stderr, - "At least, GDAL >= 1.4.0 is required for this version of %s, " - "which was compiled against GDAL %s\n", - argv[0], GDAL_RELEASE_NAME); - exit(1); - } + EarlySetConfigOptions(argc, argv); + + /* -------------------------------------------------------------------- */ + /* Register standard GDAL drivers, and process generic GDAL */ + /* command options. */ + /* -------------------------------------------------------------------- */ GDALAllRegister(); OGRRegisterAll(); argc = GDALGeneralCmdLineProcessor(argc, &argv, 0); + if (argc < 1) + exit(-argc); /* -------------------------------------------------------------------- */ /* Parse arguments. */ /* -------------------------------------------------------------------- */ - for (int i = 1; i < argc; i++) + + if (argc < 2) { - if (EQUAL(argv[i], "--utility_version")) - { - printf("%s was compiled against GDAL %s and " - "is running against GDAL %s\n", - argv[0], GDAL_RELEASE_NAME, GDALVersionInfo("RELEASE_NAME")); - CSLDestroy(argv); - return 0; - } - else if (EQUAL(argv[i], "--help")) - Usage(false); - else if (EQUAL(argv[i], "-a")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - pszElevAttrib = argv[++i]; - } - else if (EQUAL(argv[i], "-amin")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - pszElevAttribMin = argv[++i]; - } - else if (EQUAL(argv[i], "-amax")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - pszElevAttribMax = argv[++i]; - } - else if (EQUAL(argv[i], "-off")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - dfOffset = CPLAtof(argv[++i]); - } - else if (EQUAL(argv[i], "-i")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - dfInterval = CPLAtof(argv[++i]); - } - else if (EQUAL(argv[i], "-e")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - dfExpBase = CPLAtof(argv[++i]); - } - else if (EQUAL(argv[i], "-p")) - { - bPolygonize = true; - } - else if (EQUAL(argv[i], "-fl")) - { - if (i >= argc - 1) - Usage(true, CPLSPrintf("%s option requires at least 1 argument", - argv[i])); - while (i < argc - 1 && - nFixedLevelCount < static_cast<int>(sizeof(adfFixedLevels) / - sizeof(double)) && - ArgIsNumericContour(argv[i + 1])) - // coverity[tainted_data] - adfFixedLevels[nFixedLevelCount++] = CPLAtof(argv[++i]); - } - else if (EQUAL(argv[i], "-b")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - nBandIn = atoi(argv[++i]); - } - else if (EQUAL(argv[i], "-f") || EQUAL(argv[i], "-of")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - pszFormat = argv[++i]; - } - else if (EQUAL(argv[i], "-dsco")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - papszDSCO = CSLAddString(papszDSCO, argv[++i]); - } - else if (EQUAL(argv[i], "-lco")) - { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - papszLCO = CSLAddString(papszLCO, argv[++i]); - } - else if (EQUAL(argv[i], "-3d")) - { - b3D = true; - } - else if (EQUAL(argv[i], "-snodata")) + try { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - bNoDataSet = TRUE; - // coverity[tainted_data] - dfNoData = CPLAtof(argv[++i]); + GDALContourOptions sOptions; + auto argParser = GDALContourAppOptionsGetParser(&sOptions); + fprintf(stderr, "%s\n", argParser->usage().c_str()); } - else if (EQUAL(argv[i], "-nln")) + catch (const std::exception &err) { - CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1); - // coverity[tainted_data] - pszNewLayerName = argv[++i]; + CPLError(CE_Failure, CPLE_AppDefined, "Unexpected exception: %s", + err.what()); } - else if (EQUAL(argv[i], "-inodata")) - { - bIgnoreNoData = true; - } - else if (EQUAL(argv[i], "-q") || EQUAL(argv[i], "-quiet")) - { - bQuiet = TRUE; - } - else if (pszSrcFilename == nullptr) - { - pszSrcFilename = argv[i]; - } - else if (pszDstFilename == nullptr) - { - pszDstFilename = argv[i]; - } - else - Usage(true, "Too many command options."); + exit(1); } - if (dfInterval == 0.0 && nFixedLevelCount == 0 && dfExpBase == 0.0) - { - Usage(true, "Neither -i nor -fl nor -e are specified."); - } + GDALContourOptions sOptions; - if (pszSrcFilename == nullptr) + try { - Usage(true, "Missing source filename."); + auto argParser = GDALContourAppOptionsGetParser(&sOptions); + argParser->parse_args_without_binary_name(argv + 1); } - - if (pszDstFilename == nullptr) + catch (const std::exception &error) { - Usage(true, "Missing destination filename."); + CPLError(CE_Failure, CPLE_AppDefined, "%s", error.what()); + exit(1); } - if (strcmp(pszDstFilename, "/vsistdout/") == 0 || - strcmp(pszDstFilename, "/dev/stdout") == 0) + if (sOptions.aosSrcFilename.find("/vsistdout/") != std::string::npos || + sOptions.aosDestFilename.find("/vsistdout/") != std::string::npos) { - bQuiet = true; + sOptions.bQuiet = true; } - if (!bQuiet) + if (!sOptions.bQuiet) pfnProgress = GDALTermProgress; /* -------------------------------------------------------------------- */ /* Open source raster file. */ /* -------------------------------------------------------------------- */ - GDALDatasetH hSrcDS = GDALOpen(pszSrcFilename, GA_ReadOnly); + GDALDatasetH hSrcDS = + GDALOpen(sOptions.aosSrcFilename.c_str(), GA_ReadOnly); if (hSrcDS == nullptr) exit(2); - GDALRasterBandH hBand = GDALGetRasterBand(hSrcDS, nBandIn); + GDALRasterBandH hBand = GDALGetRasterBand(hSrcDS, sOptions.nBand); if (hBand == nullptr) { CPLError(CE_Failure, CPLE_AppDefined, - "Band %d does not exist on dataset.", nBandIn); + "Band %d does not exist on dataset.", sOptions.nBand); exit(2); } - if (!bNoDataSet && !bIgnoreNoData) - dfNoData = GDALGetRasterNoDataValue(hBand, &bNoDataSet); + if (!sOptions.bNoDataSet && !sOptions.bIgnoreNoData) + { + int bNoDataSet; + sOptions.dfNoData = GDALGetRasterNoDataValue(hBand, &bNoDataSet); + sOptions.bNoDataSet = bNoDataSet; + } /* -------------------------------------------------------------------- */ /* Try to get a coordinate system from the raster. */ @@ -318,14 +307,14 @@ MAIN_START(argc, argv) /* Create the output file. */ /* -------------------------------------------------------------------- */ CPLString osFormat; - if (pszFormat == nullptr) + if (sOptions.osFormat.empty()) { - const auto aoDrivers = - GetOutputDriversFor(pszDstFilename, GDAL_OF_VECTOR); + const auto aoDrivers = GetOutputDriversFor( + sOptions.aosDestFilename.c_str(), GDAL_OF_VECTOR); if (aoDrivers.empty()) { CPLError(CE_Failure, CPLE_AppDefined, "Cannot guess driver for %s", - pszDstFilename); + sOptions.aosDestFilename.c_str()); exit(10); } else @@ -334,14 +323,15 @@ MAIN_START(argc, argv) { CPLError(CE_Warning, CPLE_AppDefined, "Several drivers matching %s extension. Using %s", - CPLGetExtension(pszDstFilename), aoDrivers[0].c_str()); + CPLGetExtension(sOptions.aosDestFilename.c_str()), + aoDrivers[0].c_str()); } osFormat = aoDrivers[0]; } } else { - osFormat = pszFormat; + osFormat = sOptions.osFormat; } OGRSFDriverH hDriver = OGRGetDriverByName(osFormat.c_str()); @@ -353,16 +343,17 @@ MAIN_START(argc, argv) exit(10); } - OGRDataSourceH hDS = - OGR_Dr_CreateDataSource(hDriver, pszDstFilename, papszDSCO); + OGRDataSourceH hDS = OGR_Dr_CreateDataSource( + hDriver, sOptions.aosDestFilename.c_str(), sOptions.aosCreationOptions); if (hDS == nullptr) exit(1); OGRLayerH hLayer = OGR_DS_CreateLayer( - hDS, pszNewLayerName, hSRS, - bPolygonize ? (b3D ? wkbMultiPolygon25D : wkbMultiPolygon) - : (b3D ? wkbLineString25D : wkbLineString), - papszLCO); + hDS, sOptions.osNewLayerName.c_str(), hSRS, + sOptions.bPolygonize + ? (sOptions.b3D ? wkbMultiPolygon25D : wkbMultiPolygon) + : (sOptions.b3D ? wkbLineString25D : wkbLineString), + sOptions.aosCreationOptions); if (hLayer == nullptr) exit(1); @@ -371,11 +362,11 @@ MAIN_START(argc, argv) OGR_L_CreateField(hLayer, hFld, FALSE); OGR_Fld_Destroy(hFld); - if (bPolygonize) + if (sOptions.bPolygonize) { - if (pszElevAttrib) + if (!sOptions.osElevAttrib.empty()) { - pszElevAttrib = nullptr; + sOptions.osElevAttrib.clear(); CPLError(CE_Warning, CPLE_NotSupported, "-a is ignored in polygonal contouring mode. " "Use -amin and/or -amax instead"); @@ -383,88 +374,94 @@ MAIN_START(argc, argv) } else { - if (pszElevAttribMin != nullptr || pszElevAttribMax != nullptr) + if (!sOptions.osElevAttribMin.empty() || + !sOptions.osElevAttribMax.empty()) { - pszElevAttribMin = nullptr; - pszElevAttribMax = nullptr; + sOptions.osElevAttribMin.clear(); + sOptions.osElevAttribMax.clear(); CPLError(CE_Warning, CPLE_NotSupported, "-amin and/or -amax are ignored in line contouring mode. " "Use -a instead"); } } - if (pszElevAttrib) + if (!sOptions.osElevAttrib.empty()) { - CreateElevAttrib(pszElevAttrib, hLayer); + CreateElevAttrib(sOptions.osElevAttrib.c_str(), hLayer); } - if (pszElevAttribMin) + if (!sOptions.osElevAttribMin.empty()) { - CreateElevAttrib(pszElevAttribMin, hLayer); + CreateElevAttrib(sOptions.osElevAttribMin.c_str(), hLayer); } - if (pszElevAttribMax) + if (!sOptions.osElevAttribMax.empty()) { - CreateElevAttrib(pszElevAttribMax, hLayer); + CreateElevAttrib(sOptions.osElevAttribMax.c_str(), hLayer); } /* -------------------------------------------------------------------- */ /* Invoke. */ /* -------------------------------------------------------------------- */ int iIDField = OGR_FD_GetFieldIndex(OGR_L_GetLayerDefn(hLayer), "ID"); - int iElevField = - (pszElevAttrib == nullptr) - ? -1 - : OGR_FD_GetFieldIndex(OGR_L_GetLayerDefn(hLayer), pszElevAttrib); + int iElevField = (sOptions.osElevAttrib.empty()) + ? -1 + : OGR_FD_GetFieldIndex(OGR_L_GetLayerDefn(hLayer), + sOptions.osElevAttrib.c_str()); - int iElevFieldMin = (pszElevAttribMin == nullptr) - ? -1 - : OGR_FD_GetFieldIndex(OGR_L_GetLayerDefn(hLayer), - pszElevAttribMin); + int iElevFieldMin = + (sOptions.osElevAttribMin.empty()) + ? -1 + : OGR_FD_GetFieldIndex(OGR_L_GetLayerDefn(hLayer), + sOptions.osElevAttribMin.c_str()); - int iElevFieldMax = (pszElevAttribMax == nullptr) - ? -1 - : OGR_FD_GetFieldIndex(OGR_L_GetLayerDefn(hLayer), - pszElevAttribMax); + int iElevFieldMax = + (sOptions.osElevAttribMax.empty()) + ? -1 + : OGR_FD_GetFieldIndex(OGR_L_GetLayerDefn(hLayer), + sOptions.osElevAttribMax.c_str()); char **options = nullptr; - if (nFixedLevelCount > 0) + if (!sOptions.adfFixedLevels.empty()) { std::string values = "FIXED_LEVELS="; - for (int i = 0; i < nFixedLevelCount; i++) + for (size_t i = 0; i < sOptions.adfFixedLevels.size(); i++) { const int sz = 32; char *newValue = new char[sz + 1]; - if (i == nFixedLevelCount - 1) + if (i == sOptions.adfFixedLevels.size() - 1) { - CPLsnprintf(newValue, sz + 1, "%f", adfFixedLevels[i]); + CPLsnprintf(newValue, sz + 1, "%f", sOptions.adfFixedLevels[i]); } else { - CPLsnprintf(newValue, sz + 1, "%f,", adfFixedLevels[i]); + CPLsnprintf(newValue, sz + 1, "%f,", + sOptions.adfFixedLevels[i]); } values = values + std::string(newValue); delete[] newValue; } options = CSLAddString(options, values.c_str()); } - else if (dfExpBase != 0.0) + else if (sOptions.dfExpBase != 0.0) { - options = CSLAppendPrintf(options, "LEVEL_EXP_BASE=%f", dfExpBase); + options = + CSLAppendPrintf(options, "LEVEL_EXP_BASE=%f", sOptions.dfExpBase); } - else if (dfInterval != 0.0) + else if (sOptions.dfInterval != 0.0) { - options = CSLAppendPrintf(options, "LEVEL_INTERVAL=%f", dfInterval); + options = + CSLAppendPrintf(options, "LEVEL_INTERVAL=%f", sOptions.dfInterval); } - if (dfOffset != 0.0) + if (sOptions.dfOffset != 0.0) { - options = CSLAppendPrintf(options, "LEVEL_BASE=%f", dfOffset); + options = CSLAppendPrintf(options, "LEVEL_BASE=%f", sOptions.dfOffset); } - if (bNoDataSet) + if (sOptions.bNoDataSet) { - options = CSLAppendPrintf(options, "NODATA=%.19g", dfNoData); + options = CSLAppendPrintf(options, "NODATA=%.19g", sOptions.dfNoData); } if (iIDField != -1) { @@ -482,7 +479,7 @@ MAIN_START(argc, argv) { options = CSLAppendPrintf(options, "ELEV_FIELD_MAX=%d", iElevFieldMax); } - if (bPolygonize) + if (sOptions.bPolygonize) { options = CSLAppendPrintf(options, "POLYGONIZE=YES"); } @@ -496,8 +493,6 @@ MAIN_START(argc, argv) GDALClose(hSrcDS); CSLDestroy(argv); - CSLDestroy(papszDSCO); - CSLDestroy(papszLCO); GDALDestroyDriverManager(); OGRCleanupAll(); From c8769da8cbf575485b446785700eaaf1d8bbe2d3 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 24 Apr 2024 19:43:11 +0200 Subject: [PATCH 169/230] ossfuzz build.sh: update for latest poppler [ci skip] --- fuzzers/build.sh | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/fuzzers/build.sh b/fuzzers/build.sh index 2436f25cf032..a704d4b2f7b2 100755 --- a/fuzzers/build.sh +++ b/fuzzers/build.sh @@ -139,6 +139,11 @@ curl -L https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.7.4.tar.gz > v4 patch -p0 < $SRC/gdal/fuzzers/fix_stack_read_overflow_ncindexlookup.patch && \ cd .. +rm -rf freetype-2.13.2 +curl -L https://download.savannah.gnu.org/releases/freetype/freetype-2.13.2.tar.xz > freetype-2.13.2.tar.xz && \ + tar xJf freetype-2.13.2.tar.xz && \ + rm freetype-2.13.2.tar.xz + rm -rf poppler git clone --depth 1 https://anongit.freedesktop.org/git/poppler/poppler.git poppler @@ -182,6 +187,14 @@ make -j$(nproc) -s make install cd .. +# build freetype +cd freetype-2.13.2 +CFLAGS="$NON_FUZZING_CFLAGS" ./configure --prefix=$SRC/install +make clean -s +make -j$(nproc) -s +make install +cd .. + # build poppler # We *need* to build with the sanitize flags for the address sanitizer, @@ -201,8 +214,10 @@ fi cd poppler mkdir -p build cd build +# -DENABLE_BOOST=OFF because Boost 1.74 is now required. Ubuntu 20.04 only provides 1.71 cmake .. \ -DCMAKE_INSTALL_PREFIX=$SRC/install \ + -DCMAKE_PREFIX_PATH=$SRC/install \ -DCMAKE_BUILD_TYPE=debug \ -DCMAKE_C_FLAGS="$POPPLER_C_FLAGS" \ -DCMAKE_CXX_FLAGS="$POPPLER_CXX_FLAGS" \ @@ -222,6 +237,7 @@ cmake .. \ -DENABLE_GPGME=OFF \ -DENABLE_LCMS=OFF \ -DENABLE_UTILS=OFF \ + -DENABLE_BOOST=OFF \ -DWITH_Cairo=OFF \ -DWITH_NSS3=OFF \ -DBUILD_CPP_TESTS=OFF \ From fb4b603d5ed039424ff92d69cf2cf9001dba6004 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 24 Apr 2024 20:14:53 +0200 Subject: [PATCH 170/230] Update p-ranav/argparse to latest master --- apps/argparse/README.TXT | 2 +- apps/argparse/argparse.hpp | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/apps/argparse/README.TXT b/apps/argparse/README.TXT index 1bf965a98111..62917d1b5ad8 100644 --- a/apps/argparse/README.TXT +++ b/apps/argparse/README.TXT @@ -1,3 +1,3 @@ Provenance: https://github.com/p-ranav/argparse -Retrieved from https://github.com/p-ranav/argparse/blob/a1c41c5537c919c1a56661ec1cdf5a49b9e99af6/include/argparse/argparse.hpp +Retrieved from https://github.com/p-ranav/argparse/blob/e54e4592867cb9ddb86edd1f18f7f9c401abb170/include/argparse/argparse.hpp diff --git a/apps/argparse/argparse.hpp b/apps/argparse/argparse.hpp index 14487b5b4536..118e2145ce1a 100644 --- a/apps/argparse/argparse.hpp +++ b/apps/argparse/argparse.hpp @@ -698,12 +698,13 @@ class Argument { return *this; } - auto &store_into(int &var) { + template <typename T, typename std::enable_if<std::is_integral<T>::value>::type * = nullptr> + auto &store_into(T &var) { if (m_default_value.has_value()) { - var = std::any_cast<int>(m_default_value); + var = std::any_cast<T>(m_default_value); } action([&var](const auto &s) { - var = details::parse_number<int, details::radix_10>()(s); + var = details::parse_number<T, details::radix_10>()(s); }); return *this; } @@ -1771,8 +1772,8 @@ class ArgumentParser { void parse_args(const std::vector<std::string> &arguments) { parse_args_internal(arguments); // Check if all arguments are parsed - for (const auto &iter : m_argument_map) { - iter.second->validate(); + for ([[maybe_unused]] const auto &[unused, argument] : m_argument_map) { + argument->validate(); } // Check each mutually exclusive group and make sure From abb68afc01055b2a177f962903199c6ad10edcdf Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 24 Apr 2024 20:15:10 +0200 Subject: [PATCH 171/230] ogrinfo and ogr2ogr: update to use store_into(GIntBig&) --- apps/ogr2ogr_lib.cpp | 6 ++---- apps/ogrinfo_lib.cpp | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/apps/ogr2ogr_lib.cpp b/apps/ogr2ogr_lib.cpp index b7d223e782a6..3650db61ade0 100644 --- a/apps/ogr2ogr_lib.cpp +++ b/apps/ogr2ogr_lib.cpp @@ -7395,8 +7395,7 @@ static std::unique_ptr<GDALArgumentParser> GDALVectorTranslateOptionsGetParser( argParser->add_argument("-fid") .metavar("<FID>") - .action([psOptions](const std::string &s) - { psOptions->nFIDToFetch = CPLAtoGIntBig(s.c_str()); }) + .store_into(psOptions->nFIDToFetch) .help(_("If provided, only the feature with the specified feature id " "will be processed.")); @@ -7445,8 +7444,7 @@ static std::unique_ptr<GDALArgumentParser> GDALVectorTranslateOptionsGetParser( argParser->add_argument("-limit") .metavar("<nb_features>") - .action([psOptions](const std::string &s) - { psOptions->nLimit = CPLAtoGIntBig(s.c_str()); }) + .store_into(psOptions->nLimit) .help(_("Limit the number of features per layer.")); argParser->add_argument("-ds_transaction") diff --git a/apps/ogrinfo_lib.cpp b/apps/ogrinfo_lib.cpp index 397a9c3d9d23..a407116421a3 100644 --- a/apps/ogrinfo_lib.cpp +++ b/apps/ogrinfo_lib.cpp @@ -2261,8 +2261,7 @@ static std::unique_ptr<GDALArgumentParser> GDALVectorInfoOptionsGetParser( argParser->add_argument("-fid") .metavar("<FID>") - .action([psOptions](const std::string &s) - { psOptions->nFetchFID = CPLAtoGIntBig(s.c_str()); }) + .store_into(psOptions->nFetchFID) .help(_("Only the feature with this feature id will be reported.")); argParser->add_argument("-spat") @@ -2328,8 +2327,7 @@ static std::unique_ptr<GDALArgumentParser> GDALVectorInfoOptionsGetParser( argParser->add_argument("-limit") .metavar("<nb_features>") - .action([psOptions](const std::string &s) - { psOptions->nLimit = CPLAtoGIntBig(s.c_str()); }) + .store_into(psOptions->nLimit) .help(_("Limit the number of features per layer.")); argParser->add_argument("-fields") From 8e7e11b83fc7da9f7b6c993994b3805e23509b59 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 24 Apr 2024 19:23:39 +0200 Subject: [PATCH 172/230] GDALArgumentParser: fix dealing with positional arguments that are negative numeric values (needed for gdallocationinfo) --- apps/gdalargumentparser.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/gdalargumentparser.cpp b/apps/gdalargumentparser.cpp index 2dffe4a50780..bd903e6657a8 100644 --- a/apps/gdalargumentparser.cpp +++ b/apps/gdalargumentparser.cpp @@ -399,7 +399,7 @@ void GDALArgumentParser::parse_args(const CPLStringList &aosArgs) argument->consume(it, end, "", /* dry_run = */ true); for (; it != next_it; ++it) { - if ((*it)[0] == '-') + if (!Argument::is_positional(*it, m_prefix_chars)) { next_it = it; break; From a56a2596819fbe11eb82f9abe562eb6504500ee6 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 24 Apr 2024 19:23:57 +0200 Subject: [PATCH 173/230] gdallocationinfo: use GDALArgumentParser --- apps/gdallocationinfo.cpp | 306 ++++++++++++----------- doc/source/programs/gdallocationinfo.rst | 2 +- 2 files changed, 156 insertions(+), 152 deletions(-) diff --git a/apps/gdallocationinfo.cpp b/apps/gdallocationinfo.cpp index 1ec5ae30b1e8..d317e2599b5c 100644 --- a/apps/gdallocationinfo.cpp +++ b/apps/gdallocationinfo.cpp @@ -33,6 +33,9 @@ #include "gdal.h" #include "commonutils.h" #include "ogr_spatialref.h" +#include "gdalargumentparser.h" + +#include <limits> #include <vector> #include <cctype> @@ -44,49 +47,15 @@ #endif /************************************************************************/ -/* Usage() */ -/************************************************************************/ - -static void Usage(bool bIsError) - -{ - fprintf( - bIsError ? stderr : stdout, - "Usage: gdallocationinfo [--help] [--help-general]\n" - " [-xml] [-lifonly] [-valonly]\n" - " [-E] [-field_sep <sep>] " - "[-ignore_extra_input]\n" - " [-b <band>]... [-overview <overview_level>]\n" - " [-l_srs <srs_def>] [-geoloc] [-wgs84]\n" - " [-oo <NAME>=<VALUE>]... <srcfile> [<x> <y>]\n" - "\n"); - exit(1); -} - -/************************************************************************/ -/* SanitizeSRS */ +/* GetSRSAsWKT */ /************************************************************************/ -static char *SanitizeSRS(const char *pszUserInput) +static std::string GetSRSAsWKT(const char *pszUserInput) { - CPLErrorReset(); - - OGRSpatialReferenceH hSRS = OSRNewSpatialReference(nullptr); - - char *pszResult = nullptr; - if (OSRSetFromUserInput(hSRS, pszUserInput) == OGRERR_NONE) - OSRExportToWkt(hSRS, &pszResult); - else - { - CPLError(CE_Failure, CPLE_AppDefined, - "Translating source or target SRS failed:\n%s", pszUserInput); - exit(1); - } - - OSRDestroySpatialReference(hSRS); - - return pszResult; + OGRSpatialReference oSRS; + oSRS.SetFromUserInput(pszUserInput); + return oSRS.exportToWkt(); } /************************************************************************/ @@ -96,14 +65,15 @@ static char *SanitizeSRS(const char *pszUserInput) MAIN_START(argc, argv) { - const char *pszLocX = nullptr, *pszLocY = nullptr; - const char *pszSrcFilename = nullptr; - char *pszSourceSRS = nullptr; + double dfGeoX = std::numeric_limits<double>::quiet_NaN(); + double dfGeoY = std::numeric_limits<double>::quiet_NaN(); + std::string osSrcFilename; + std::string osSourceSRS; std::vector<int> anBandList; bool bAsXML = false, bLIFOnly = false; bool bQuiet = false, bValOnly = false; - int nOverview = -1; - char **papszOpenOptions = nullptr; + int nOverview = 0; + CPLStringList aosOpenOptions; std::string osFieldSep; bool bIgnoreExtraInput = false; bool bEcho = false; @@ -112,101 +82,146 @@ MAIN_START(argc, argv) argc = GDALGeneralCmdLineProcessor(argc, &argv, 0); if (argc < 1) exit(-argc); + CPLStringList aosArgv; + aosArgv.Assign(argv, /* bAssign = */ true); + + GDALArgumentParser argParser(aosArgv[0], /* bForBinary=*/true); + + argParser.add_description(_("Raster query tool.")); + + const char *pszEpilog = + _("For more details, consult " + "https://gdal.org/programs/gdallocationinfo.html"); + argParser.add_epilog(pszEpilog); + + argParser.add_argument("-xml").flag().store_into(bAsXML).help( + _("Format the output report as XML.")); + + argParser.add_argument("-lifonly") + .flag() + .store_into(bLIFOnly) + .help(_("Only outputs filenames from the LocationInfo request against " + "the database.")); + + argParser.add_argument("-valonly") + .flag() + .store_into(bValOnly) + .help(_("Only outputs pixel values of the selected pixel on each of " + "the selected bands.")); + + argParser.add_argument("-E").flag().store_into(bEcho).help( + _("Enable Echo mode, where input coordinates are prepended to the " + "output lines in -valonly mode.")); + + argParser.add_argument("-field_sep") + .metavar("<sep>") + .store_into(osFieldSep) + .help(_("Defines the field separator, used in -valonly mode, to " + "separate different values.")); + + argParser.add_argument("-ignore_extra_input") + .flag() + .store_into(bIgnoreExtraInput) + .help(_("Set this flag to avoid extra non-numeric content at end of " + "input lines.")); + + argParser.add_argument("-b") + .append() + .metavar("<band>") + .store_into(anBandList) + .help(_("Select band(s).")); + + argParser.add_argument("-overview") + .metavar("<overview_level>") + .store_into(nOverview) + .help(_("Query the (overview_level)th overview (overview_level=1 is " + "the 1st overview).")); - /* -------------------------------------------------------------------- */ - /* Parse arguments. */ - /* -------------------------------------------------------------------- */ - for (int i = 1; i < argc; i++) { - if (EQUAL(argv[i], "--utility_version")) - { - printf("%s was compiled against GDAL %s and is running against " - "GDAL %s\n", - argv[0], GDAL_RELEASE_NAME, GDALVersionInfo("RELEASE_NAME")); - GDALDestroyDriverManager(); - CSLDestroy(argv); - return 0; - } - else if (EQUAL(argv[i], "--help")) - { - Usage(false); - } - else if (i < argc - 1 && EQUAL(argv[i], "-b")) - { - anBandList.push_back(atoi(argv[++i])); - } - else if (i < argc - 1 && EQUAL(argv[i], "-overview")) - { - nOverview = atoi(argv[++i]) - 1; - } - else if (i < argc - 1 && EQUAL(argv[i], "-l_srs")) - { - CPLFree(pszSourceSRS); - // coverity[tainted_data] - pszSourceSRS = SanitizeSRS(argv[++i]); - } - else if (EQUAL(argv[i], "-geoloc")) - { - CPLFree(pszSourceSRS); - pszSourceSRS = CPLStrdup("-geoloc"); - } - else if (EQUAL(argv[i], "-wgs84")) - { - CPLFree(pszSourceSRS); - pszSourceSRS = SanitizeSRS("WGS84"); - } - else if (EQUAL(argv[i], "-xml")) - { - bAsXML = true; - } - else if (EQUAL(argv[i], "-lifonly")) - { - bLIFOnly = true; - bQuiet = true; - } - else if (EQUAL(argv[i], "-valonly")) - { - bValOnly = true; - bQuiet = true; - } - else if (i < argc - 1 && EQUAL(argv[i], "-field_sep")) - { - osFieldSep = CPLString(argv[++i]) - .replaceAll("\\t", '\t') - .replaceAll("\\r", '\r') - .replaceAll("\\n", '\n'); - } - else if (EQUAL(argv[i], "-ignore_extra_input")) - { - bIgnoreExtraInput = true; - } - else if (EQUAL(argv[i], "-E")) - { - bEcho = true; - } - else if (i < argc - 1 && EQUAL(argv[i], "-oo")) - { - papszOpenOptions = CSLAddString(papszOpenOptions, argv[++i]); - } - else if (argv[i][0] == '-' && - !isdigit(static_cast<unsigned char>(argv[i][1]))) - Usage(true); + auto &group = argParser.add_mutually_exclusive_group(); + + group.add_argument("-l_srs") + .metavar("<srs_def>") + .store_into(osSourceSRS) + .help(_("Coordinate system of the input x, y location.")); + + group.add_argument("-geoloc") + .flag() + .action([&osSourceSRS](const std::string &) + { osSourceSRS = "-geoloc"; }) + .help(_("Indicates input x,y points are in the georeferencing " + "system of the image.")); + + group.add_argument("-wgs84") + .flag() + .action([&osSourceSRS](const std::string &) + { osSourceSRS = GetSRSAsWKT("WGS84"); }) + .help(_("Indicates input x,y points are WGS84 long, lat.")); + } - else if (pszSrcFilename == nullptr) - pszSrcFilename = argv[i]; + argParser.add_open_options_argument(&aosOpenOptions); - else if (pszLocX == nullptr) - pszLocX = argv[i]; + argParser.add_argument("srcfile") + .metavar("<srcfile>") + .nargs(1) + .store_into(osSrcFilename) + .help(_("The source GDAL raster datasource name.")); - else if (pszLocY == nullptr) - pszLocY = argv[i]; + argParser.add_argument("x") + .metavar("<x>") + .nargs(argparse::nargs_pattern::optional) + .store_into(dfGeoX) + .help(_("X location of target pixel.")); - else - Usage(true); + argParser.add_argument("y") + .metavar("<y>") + .nargs(argparse::nargs_pattern::optional) + .store_into(dfGeoY) + .help(_("Y location of target pixel.")); + + const auto displayUsage = [&argParser]() + { + std::stringstream usageStringStream; + usageStringStream << argParser.usage(); + std::cerr << CPLString(usageStringStream.str()) + .replaceAll("<x> <y>", "[<x> <y>]") + << std::endl + << std::endl; + std::cout << _("Note: ") << "gdallocationinfo" + << _(" --long-usage for full help.") << std::endl; + }; + + try + { + argParser.parse_args(aosArgv); + } + catch (const std::exception &err) + { + std::cerr << _("Error: ") << err.what() << std::endl; + displayUsage(); + std::exit(1); } - if (pszSrcFilename == nullptr || (pszLocX != nullptr && pszLocY == nullptr)) - Usage(true); + if (bLIFOnly || bValOnly) + bQuiet = true; + + // User specifies with 1-based index, but internally we use 0-based index + --nOverview; + + // Deal with special characters + osFieldSep = CPLString(osFieldSep) + .replaceAll("\\t", '\t') + .replaceAll("\\r", '\r') + .replaceAll("\\n", '\n'); + + if (!std::isnan(dfGeoX) && std::isnan(dfGeoY)) + { + fprintf(stderr, "<y> should be specified when <x> is specified\n\n"); + displayUsage(); + exit(1); + } + + const bool bIsXYSpecifiedAsArgument = !std::isnan(dfGeoX); if (bEcho && !bValOnly) { @@ -233,9 +248,9 @@ MAIN_START(argc, argv) /* -------------------------------------------------------------------- */ /* Open source file. */ /* -------------------------------------------------------------------- */ - GDALDatasetH hSrcDS = - GDALOpenEx(pszSrcFilename, GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR, - nullptr, papszOpenOptions, nullptr); + GDALDatasetH hSrcDS = GDALOpenEx(osSrcFilename.c_str(), + GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR, + nullptr, aosOpenOptions.List(), nullptr); if (hSrcDS == nullptr) exit(1); @@ -244,10 +259,10 @@ MAIN_START(argc, argv) /* -------------------------------------------------------------------- */ OGRSpatialReferenceH hSrcSRS = nullptr; OGRCoordinateTransformationH hCT = nullptr; - if (pszSourceSRS != nullptr && !EQUAL(pszSourceSRS, "-geoloc")) + if (!osSourceSRS.empty() && !EQUAL(osSourceSRS.c_str(), "-geoloc")) { - hSrcSRS = OSRNewSpatialReference(pszSourceSRS); + hSrcSRS = OSRNewSpatialReference(osSourceSRS.c_str()); OSRSetAxisMappingStrategy(hSrcSRS, OAMS_TRADITIONAL_GIS_ORDER); auto hTrgSRS = GDALGetSpatialRef(hSrcDS); if (!hTrgSRS) @@ -271,19 +286,17 @@ MAIN_START(argc, argv) /* Turn the location into a pixel and line location. */ /* -------------------------------------------------------------------- */ bool inputAvailable = true; - double dfGeoX = 0; - double dfGeoY = 0; CPLString osXML; char szLine[1024]; int nLine = 0; std::string osExtraContent; - if (pszLocX == nullptr && pszLocY == nullptr) + if (std::isnan(dfGeoX)) { // Is it an interactive terminal ? if (isatty(static_cast<int>(fileno(stdin)))) { - if (pszSourceSRS != nullptr) + if (!osSourceSRS.empty()) { fprintf( stderr, @@ -332,11 +345,6 @@ MAIN_START(argc, argv) inputAvailable = false; } } - else - { - dfGeoX = CPLAtof(pszLocX); - dfGeoY = CPLAtof(pszLocY); - } int nRetCode = 0; while (inputAvailable) @@ -349,7 +357,7 @@ MAIN_START(argc, argv) exit(1); } - if (pszSourceSRS != nullptr) + if (!osSourceSRS.empty()) { double adfGeoTransform[6] = {}; if (GDALGetGeoTransform(hSrcDS, adfGeoTransform) != CE_None) @@ -628,7 +636,7 @@ MAIN_START(argc, argv) printf("\n"); } - if (pszLocX != nullptr && pszLocY != nullptr) + if (bIsXYSpecifiedAsArgument) break; osExtraContent.clear(); @@ -695,10 +703,6 @@ MAIN_START(argc, argv) GDALDumpOpenDatasets(stderr); GDALDestroyDriverManager(); - CPLFree(pszSourceSRS); - CSLDestroy(papszOpenOptions); - - CSLDestroy(argv); return nRetCode; } diff --git a/doc/source/programs/gdallocationinfo.rst b/doc/source/programs/gdallocationinfo.rst index 023711b24165..65569e4a31f9 100644 --- a/doc/source/programs/gdallocationinfo.rst +++ b/doc/source/programs/gdallocationinfo.rst @@ -19,7 +19,7 @@ Synopsis [-xml] [-lifonly] [-valonly] [-E] [-field_sep <sep>] [-ignore_extra_input] [-b <band>]... [-overview <overview_level>] - [-l_srs <srs_def>] [-geoloc] [-wgs84] + [[-l_srs <srs_def>] | [-geoloc] | [-wgs84]] [-oo <NAME>=<VALUE>]... <srcfile> [<x> <y>] From 3cbea422dd08327cb2c43e06fa98db09f4a25c1e Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 24 Apr 2024 21:34:43 +0200 Subject: [PATCH 174/230] Miramon: CMakeLists.txt: tag the driver as PLUGIN_CAPABLE NO_DEPS, instead of just PLUGIN_CAPABLE --- ogr/ogrsf_frmts/miramon/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ogr/ogrsf_frmts/miramon/CMakeLists.txt b/ogr/ogrsf_frmts/miramon/CMakeLists.txt index 8eccd4e46b07..aac8dccc64cb 100644 --- a/ogr/ogrsf_frmts/miramon/CMakeLists.txt +++ b/ogr/ogrsf_frmts/miramon/CMakeLists.txt @@ -1,7 +1,7 @@ add_gdal_driver( TARGET ogr_MiraMon SOURCES ogrmiramondatasource.cpp ogrmiramondriver.cpp ogrmiramonlayer.cpp mm_wrlayr.c mm_gdal_functions.c mm_rdlayr.c - PLUGIN_CAPABLE) + PLUGIN_CAPABLE NO_DEPS) gdal_standard_includes(ogr_MiraMon) target_include_directories(ogr_MiraMon PRIVATE $<TARGET_PROPERTY:ogrsf_generic,SOURCE_DIR>) From 7a4ab2d0ea57acf209916b16d3bc8c3f0404dab6 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Wed, 24 Apr 2024 21:40:29 +0200 Subject: [PATCH 175/230] GDALOpen(): avoid double '.' in error messages related to plugin drivers --- gcore/gdaldataset.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/gcore/gdaldataset.cpp b/gcore/gdaldataset.cpp index 45b333a10d69..3c5478ce27f9 100644 --- a/gcore/gdaldataset.cpp +++ b/gcore/gdaldataset.cpp @@ -4008,7 +4008,9 @@ GDALDatasetH CPL_STDCALL GDALOpenEx(const char *pszFilename, { if (VSIStat(pszGDALDriverPath, &sStat) != 0) { - osMsg += ". Directory '"; + if (osMsg.back() != '.') + osMsg += "."; + osMsg += " Directory '"; osMsg += pszGDALDriverPath; osMsg += "' pointed by GDAL_DRIVER_PATH does not exist."; @@ -4016,10 +4018,12 @@ GDALDatasetH CPL_STDCALL GDALOpenEx(const char *pszFilename, } else { + if (osMsg.back() != '.') + osMsg += "."; #ifdef INSTALL_PLUGIN_FULL_DIR if (VSIStat(INSTALL_PLUGIN_FULL_DIR, &sStat) != 0) { - osMsg += ". Directory '"; + osMsg += " Directory '"; osMsg += INSTALL_PLUGIN_FULL_DIR; osMsg += "' hardcoded in the GDAL library does not " "exist and the GDAL_DRIVER_PATH " @@ -4028,7 +4032,7 @@ GDALDatasetH CPL_STDCALL GDALOpenEx(const char *pszFilename, else #endif { - osMsg += ". The GDAL_DRIVER_PATH configuration " + osMsg += " The GDAL_DRIVER_PATH configuration " "option is not set."; } } From 323e2498fa21dfee2c04adca04464c7f346fbad2 Mon Sep 17 00:00:00 2001 From: AbelPau <92721356+AbelPau@users.noreply.github.com> Date: Wed, 24 Apr 2024 22:15:24 +0200 Subject: [PATCH 176/230] MiraMonVector fixes (2) tests improvements (+10) (#9744) **Fixes** - Delete of unused function (MMInitHeader) - Use of CPLError() in some explicit return errors **Test improvements** - last_version added in -lco Version - more corrupted files tested - unexisting coordinates or unexpected polygon construction tested datefield added in tests code simplification - multipolygon Z added in tests - multigeometry added in tests - ANSI DBFEncoding test added --- .../CorruptedFiles/BadCycle/SimplePolFile.arc | Bin 0 -> 536 bytes .../CorruptedFiles/BadCycle/SimplePolFile.nod | Bin 0 -> 92 bytes .../CorruptedFiles/BadCycle/SimplePolFile.pol | Bin 0 -> 349 bytes .../BadCycle/SimplePolFileA.dbf | Bin 0 -> 511 bytes .../BadCycle/SimplePolFileA.rel | 89 +++++++ .../BadCycle/SimplePolFileN.dbf | Bin 0 -> 261 bytes .../BadCycle/SimplePolFileN.rel | 64 +++++ .../BadCycle/SimplePolFileP.dbf | Bin 0 -> 729 bytes .../BadCycle/SimplePolFileP.rel | 93 +++++++ .../CorruptedCoordinates.arc | Bin 0 -> 240 bytes .../CorruptedCoordinates.nod | Bin 0 -> 92 bytes .../CorruptedCoordinates.pol | Bin 0 -> 349 bytes .../CorruptedCoordinatesA.dbf | Bin 0 -> 511 bytes .../CorruptedCoordinatesA.rel | 89 +++++++ .../CorruptedCoordinatesN.dbf | Bin 0 -> 261 bytes .../CorruptedCoordinatesN.rel | 64 +++++ .../CorruptedCoordinatesP.dbf | Bin 0 -> 729 bytes .../CorruptedCoordinatesP.rel | 93 +++++++ .../CorruptedCoordinatesPoint.pnt | Bin 0 -> 60 bytes .../CorruptedCoordinatesPointT.dbf | Bin 0 -> 254 bytes .../CorruptedCoordinatesPointT.rel | 58 ++++ .../CorruptedPolygon/Multipolygons.arc | Bin 0 -> 1168 bytes .../CorruptedPolygon/Multipolygons.nod | Bin 0 -> 112 bytes .../CorruptedPolygon/Multipolygons.pol | Bin 0 -> 256 bytes .../CorruptedPolygon/MultipolygonsA.dbf | Bin 0 -> 733 bytes .../CorruptedPolygon/MultipolygonsA.rel | 92 +++++++ .../CorruptedPolygon/MultipolygonsN.dbf | Bin 0 -> 305 bytes .../CorruptedPolygon/MultipolygonsN.rel | 68 +++++ .../CorruptedPolygon/MultipolygonsP.dbf | Bin 0 -> 1055 bytes .../CorruptedPolygon/MultipolygonsP.rel | 108 ++++++++ .../CorruptedFiles/NoArcRel/SimpleArcFile.arc | Bin 0 -> 592 bytes .../CorruptedFiles/NoArcRel/SimpleArcFile.nod | Bin 0 -> 172 bytes .../NoArcRel/SimpleArcFileA.dbf | Bin 0 -> 789 bytes .../NoArcRel/SimpleArcFileN.dbf | Bin 0 -> 481 bytes .../NoArcRel/SimpleArcFileN.rel | 64 +++++ .../CorruptedFiles/NoNode/SimpleArcFile.arc | Bin 0 -> 592 bytes .../CorruptedFiles/NoNode/SimpleArcFileA.dbf | Bin 0 -> 789 bytes .../CorruptedFiles/NoNode/SimpleArcFileA.rel | 102 +++++++ .../CorruptedFiles/NoNode/SimpleArcFileN.dbf | Bin 0 -> 481 bytes .../CorruptedFiles/NoNode/SimpleArcFileN.rel | 64 +++++ .../CorruptedFiles/NoPolRel/SimplePolFile.arc | Bin 0 -> 536 bytes .../CorruptedFiles/NoPolRel/SimplePolFile.nod | Bin 0 -> 92 bytes .../CorruptedFiles/NoPolRel/SimplePolFile.pol | Bin 0 -> 349 bytes .../NoPolRel/SimplePolFileA.dbf | Bin 0 -> 511 bytes .../NoPolRel/SimplePolFileA.rel | 89 +++++++ .../NoPolRel/SimplePolFileN.dbf | Bin 0 -> 261 bytes .../NoPolRel/SimplePolFileN.rel | 64 +++++ .../NoPolRel/SimplePolFileP.dbf | Bin 0 -> 729 bytes autotest/ogr/ogr_miramon_vector.py | 252 +++++++++++++----- ogr/ogrsf_frmts/miramon/mm_wrlayr.c | 74 ----- ogr/ogrsf_frmts/miramon/mm_wrlayr.h | 1 - ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp | 20 +- 52 files changed, 1393 insertions(+), 155 deletions(-) create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFile.arc create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFile.nod create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFile.pol create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileA.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileA.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileN.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileN.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileP.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileP.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.arc create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.nod create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.pol create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesA.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesA.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesN.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesN.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesP.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesP.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPoint.pnt create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPointT.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPointT.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.arc create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.nod create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.pol create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsA.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsA.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsN.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsN.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsP.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsP.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFile.arc create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFile.nod create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileA.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileN.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileN.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFile.arc create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileA.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileA.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileN.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileN.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.arc create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.nod create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.pol create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileA.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileA.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileN.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileN.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileP.dbf diff --git a/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFile.arc b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFile.arc new file mode 100644 index 0000000000000000000000000000000000000000..431d702a3a3540257dfdffce360f123848d1eeb8 GIT binary patch literal 536 zcmZ<^a#k?ZGi2E98#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHG%nT3!Qx`GM zUGJG~heHqB*RaOxb#QfTKxH?eYC%-nGFH!W^(hYSdv8Sre(Qvr|LB0;fx_u^4xaNr zCY1GcIdA~gS}-y&01+6weNBkr>znA{yGPA9OQp%dR8&BU|5*pzT}L-DR<}-Rasb)4 z1E`4!NQ1DHuuoUCOREFSoy(sY)!yRmaHu{}u-Mq85$;Zy`;T~ST9YW-=1{=1XUjX6 z7Px;txb6*opjzRu;BL<4DeCnY=C!#Uo5sIk0^I$V@=Mw{p7%H?Xf64*P_-TIU%uVd zKAU`+9d=kPniM47=K%Bfnxh;csREN6-p-uN_CLD;9xj3#+rAovwmOK1@3PEPnB>6k zX|B$DufqY|zE6uM1zmpvG#46Auy6`G`>G`2PoqP}O}kqSbK4wX;RFjmfwL6`*4t|x X(gN6zt#oa7FnhbkkNsGM1G;$t@$uhR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFile.nod b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFile.nod new file mode 100644 index 0000000000000000000000000000000000000000..e5d310c3c0cee2ef29d314c8b4e269060b4f902e GIT binary patch literal 92 zcmebCcTq6ZGi2E98#`I@Yl%af+p%f<8zwmHXlZ`)KDENZbN<JKvc4_{W(EjgWME?O V05U;z0F;h^%7G|GD4PjL0|5OV62Jfe literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFile.pol b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFile.pol new file mode 100644 index 0000000000000000000000000000000000000000..0a2e4736fe6478f5c622ee571b3c8a07b4443c76 GIT binary patch literal 349 zcmWIW_fas^GgR2^8#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHGEDQ+12xKxL zxD3odHX;2;${9RB2A~0${)l<*de3Y-9D3NkhBaQVgWC_%0>dEvZOd3a%hjhiG>W+N z7MK2ZaNm0?D)3t;-2IOZ=p87WUgzLB|6@W~UzY<+FNh8R5^i4;V)*(dIxIVRca6>b zuMWO@)Qq!KnjB0;1*G_&b-?|9bQ5EB>y#!G{SiR@Qo=r6(JrkHr@l^&Y`%Ke0Tc!x Nzz(ti2$(?u1OVn<W$pj~ literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileA.dbf b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..22d64985be2349156fb209f77c88885217bf9fc4 GIT binary patch literal 511 zcmZ{fK?=e!5Jd+H-MQ19i<}@dF^%c6*t7&Bg<8a2_Flo$c{5uPzb)e|!soxd3CSM$ zJ|m(h-KTRkTcXgUZm8REB8oHJt**p$^~30Imp}a6$1c^wq<{Q;xghq9Pa%dcUt)Um zcE<es8^KM%l@_Jau9D7L<0_O$tj?A~!9sF~O6h|vl_|7AiNxw`DKspkpFauf3RxOs Kt0OoPtLqo7Pdilr literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileA.rel b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileA.rel new file mode 100644 index 000000000000..355451718486 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileA.rel @@ -0,0 +1,89 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204654+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels +unitatsY=? + +[EXTENT] +toler_env=0 +MinX=335.3187440533326 +MaxX=1224.163653663228 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204653+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=SimplePolFile.pol + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204654+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileN.dbf b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..19847ee7684ceccc4001218201034fe3e646fd67 GIT binary patch literal 261 zcmZRsW|LuNU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 kXfR0H6(Y}wrk_^<8!*5oXlR5@RKXCNC=Rt)O)xY903eGTUH||9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileN.rel b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileN.rel new file mode 100644 index 000000000000..c0f476e1f0de --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204653+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16204652+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204653+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileP.dbf b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..5d00a81ae4a44cefb1edfafaf085f49073b42336 GIT binary patch literal 729 zcma)&OHRWu5QYa8b%8)^U~ev9+2cp-rAbW{sf5Zzh+S^L2{=4%<V+rFBxaHM=l|v* zXZO3?-vYqP%}@TW;t8NR^v4w5ni`<J(7RPj+keH>#qZAzevYYmk6ns|Kkt_dLW-d{ z!~QjI`;h9qJjcfU^T(+<Zt~D|a+n6lPXM?t#h5b3q6hvsf|4H?g}9<Fmf&It!dK0$ z>BZEnkfbdsCeeBBtkuqsrj)2F$q}WWOg2r+Fm~qrI@1e@R5?;CuU6bhEl`u`oUw{m p9#Kr@>{?Boj2&irAr)1#w=wGkH(Fa?5mv&O{Cf>5J<a^@+8-ctQn>&C literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileP.rel b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileP.rel new file mode 100644 index 000000000000..46823207aee6 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/BadCycle/SimplePolFileP.rel @@ -0,0 +1,93 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204988+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=00691677-6d15-40f8-9d62-e8df34876e80_SimplePolFileP + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code= +codeSpace= +DatasetTitle=Simple Pol File + +[OVERVIEW:ASPECTES_TECNICS] +ArcSource=NoExistPolFile.arc + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204988+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[EXTENT] +toler_env=0 +MinX=335.318744053333 +MaxX=1224.16365366323 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204986+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern +visible=0 +TractamentVariable=Ordinal + +[TAULA_PRINCIPAL:N_VERTEXS] +descriptor=Nombre de vèrtexs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:PERIMETRE] +descriptor=Perímetre del polígon + +[TAULA_PRINCIPAL:AREA] +descriptor=Àrea del polígon + +[TAULA_PRINCIPAL:N_ARCS] +descriptor=Nombre d'arcs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_POLIG] +descriptor=Nombre de polígons elementals +visible=0 +MostrarUnitats=0 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampArea=AREA +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG + +[TAULA_PRINCIPAL:ATT1] +descriptor=atribute1 + +[TAULA_PRINCIPAL:ATT2] +descriptor=atribute2 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.arc b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.arc new file mode 100644 index 0000000000000000000000000000000000000000..78bba69d09d68746c4e00b37593f167cd42bf888 GIT binary patch literal 240 zcmZ<^a#k?ZGi2E98#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHG%nT3!Qx`GM zUGJG~heHqB*RaOxb#QfTKxH?eYC%-nGFH!W^(hYSdv8Sre(Qvr|LB0;fx_u^4xaNr zCY1GcIdA~gS}-y&01+6weNBkr>znA{yGPA9OQp%dR8&BU|5*pzT}L-DR<}-Rasb)4 m1E`4!NQ1DHuuoUCOREFSoy(sY)!yRmaHu{}u-Mq8(E$KrXj9Vw literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.nod b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.nod new file mode 100644 index 0000000000000000000000000000000000000000..e5d310c3c0cee2ef29d314c8b4e269060b4f902e GIT binary patch literal 92 zcmebCcTq6ZGi2E98#`I@Yl%af+p%f<8zwmHXlZ`)KDENZbN<JKvc4_{W(EjgWME?O V05U;z0F;h^%7G|GD4PjL0|5OV62Jfe literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.pol b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.pol new file mode 100644 index 0000000000000000000000000000000000000000..0a2e4736fe6478f5c622ee571b3c8a07b4443c76 GIT binary patch literal 349 zcmWIW_fas^GgR2^8#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHGEDQ+12xKxL zxD3odHX;2;${9RB2A~0${)l<*de3Y-9D3NkhBaQVgWC_%0>dEvZOd3a%hjhiG>W+N z7MK2ZaNm0?D)3t;-2IOZ=p87WUgzLB|6@W~UzY<+FNh8R5^i4;V)*(dIxIVRca6>b zuMWO@)Qq!KnjB0;1*G_&b-?|9bQ5EB>y#!G{SiR@Qo=r6(JrkHr@l^&Y`%Ke0Tc!x Nzz(ti2$(?u1OVn<W$pj~ literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesA.dbf b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..22d64985be2349156fb209f77c88885217bf9fc4 GIT binary patch literal 511 zcmZ{fK?=e!5Jd+H-MQ19i<}@dF^%c6*t7&Bg<8a2_Flo$c{5uPzb)e|!soxd3CSM$ zJ|m(h-KTRkTcXgUZm8REB8oHJt**p$^~30Imp}a6$1c^wq<{Q;xghq9Pa%dcUt)Um zcE<es8^KM%l@_Jau9D7L<0_O$tj?A~!9sF~O6h|vl_|7AiNxw`DKspkpFauf3RxOs Kt0OoPtLqo7Pdilr literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesA.rel b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesA.rel new file mode 100644 index 000000000000..ab3cbbf56076 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesA.rel @@ -0,0 +1,89 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204654+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_CorruptedCoordinatesA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_CorruptedCoordinatesA +codeSpace= +DatasetTitle=Corruptes coordinates [píxels] + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels +unitatsY=? + +[EXTENT] +toler_env=0 +MinX=335.3187440533326 +MaxX=1224.163653663228 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204653+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=CorruptedCoordinates.pol + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204654+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesN.dbf b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..19847ee7684ceccc4001218201034fe3e646fd67 GIT binary patch literal 261 zcmZRsW|LuNU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 kXfR0H6(Y}wrk_^<8!*5oXlR5@RKXCNC=Rt)O)xY903eGTUH||9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesN.rel b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesN.rel new file mode 100644 index 000000000000..c0f476e1f0de --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204653+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16204652+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204653+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesP.dbf b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..5d00a81ae4a44cefb1edfafaf085f49073b42336 GIT binary patch literal 729 zcma)&OHRWu5QYa8b%8)^U~ev9+2cp-rAbW{sf5Zzh+S^L2{=4%<V+rFBxaHM=l|v* zXZO3?-vYqP%}@TW;t8NR^v4w5ni`<J(7RPj+keH>#qZAzevYYmk6ns|Kkt_dLW-d{ z!~QjI`;h9qJjcfU^T(+<Zt~D|a+n6lPXM?t#h5b3q6hvsf|4H?g}9<Fmf&It!dK0$ z>BZEnkfbdsCeeBBtkuqsrj)2F$q}WWOg2r+Fm~qrI@1e@R5?;CuU6bhEl`u`oUw{m p9#Kr@>{?Boj2&irAr)1#w=wGkH(Fa?5mv&O{Cf>5J<a^@+8-ctQn>&C literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesP.rel b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesP.rel new file mode 100644 index 000000000000..aa7c1d652d5b --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesP.rel @@ -0,0 +1,93 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204988+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=00691677-6d15-40f8-9d62-e8df34876e80_CorruptedCoordinatesP + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code= +codeSpace= +DatasetTitle=Corrupted coordinates + +[OVERVIEW:ASPECTES_TECNICS] +ArcSource=CorruptedCoordinates.arc + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204988+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[EXTENT] +toler_env=0 +MinX=335.318744053333 +MaxX=1224.16365366323 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204986+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern +visible=0 +TractamentVariable=Ordinal + +[TAULA_PRINCIPAL:N_VERTEXS] +descriptor=Nombre de vèrtexs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:PERIMETRE] +descriptor=Perímetre del polígon + +[TAULA_PRINCIPAL:AREA] +descriptor=Àrea del polígon + +[TAULA_PRINCIPAL:N_ARCS] +descriptor=Nombre d'arcs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_POLIG] +descriptor=Nombre de polígons elementals +visible=0 +MostrarUnitats=0 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampArea=AREA +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG + +[TAULA_PRINCIPAL:ATT1] +descriptor=atribute1 + +[TAULA_PRINCIPAL:ATT2] +descriptor=atribute2 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPoint.pnt b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPoint.pnt new file mode 100644 index 0000000000000000000000000000000000000000..eb2da160ad09cfc23cca3dedc1c012870a17f325 GIT binary patch literal 60 zcmWIW3sErCGh|xgZgJZrwbbG8>DU&wDNPQ!6DphwW7{02T$9KvY3p)eW`KbECRt0K Ib2os+0gG=Hf&c&j literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPointT.dbf b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPointT.dbf new file mode 100644 index 0000000000000000000000000000000000000000..57527da74671340fbf8db0f3f2debbbfe2da2835 GIT binary patch literal 254 zcmbPG%_hstz`)SMpavw-fCvMFr%SwhkfWQYGXsMkNR|stiDO8JAyf#c1PXXy3<d_E ze2}M8Xozb(145o3DBlRg+zbr72n9w8aA4pFXDTQ-A+b@IhR#S_7bG?c6UkT<aS#Up Dx6B-o literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPointT.rel b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPointT.rel new file mode 100644 index 000000000000..69a72d07ba46 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPointT.rel @@ -0,0 +1,58 @@ +[VERSIO] +Vers=4 +SubVers=3 +VersMetaDades=5 +SubVersMetaDades=0 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +simbolitzable=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern +descriptor_spa=Identificador Gráfico interno +descriptor_eng=Internal Graphic identifier + +[TAULA_PRINCIPAL:ATT1] +descriptor=Atributte1 + +[TAULA_PRINCIPAL:ATTRIBUTE_2] +descriptor=Atributte2 + +[METADADES] +language=cat,spa,eng +MDIdiom=cat,spa,eng +dateStamp=20230628 16344458+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=68ddf845-79e8-4791-bf7a-5459eb951a04_SimplePointsFile + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=68ddf845-79e8-4791-bf7a-5459eb951a04_SimplePointsFile +codeSpace= +DatasetTitle=Simple Points File + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels + +[EXTENT] +MinX=342.325404376834 +MaxX=594.503182156354 +MinY=715.680304471881 +MaxY=848.806850618409 +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16351606+0200 +ContentDate=20230629 12064184+0200 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.arc b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.arc new file mode 100644 index 0000000000000000000000000000000000000000..5752163a2af3312d2577983523ce3d88269b6454 GIT binary patch literal 1168 zcmah|e@K&26rD>UZKRc{3^fgsr6K-EQRei%_x5QTrIsP;Fj_g(!c3(^3n~<&&{0rJ zlcdxrDq9e022w^L{>TE+j6|2Aq-97gC6W5J`}z#~qp`=w<=yk{z2}@u_AtND;7|vB zVrGuPpoXN*_`W_xJPc-E9B%X?5Hh>Bzi&_rjxzlHFHb3^xjFD;@e}pHG^2!g7`St_ zVrGuQxo)3AO+JIxk><*o5dz;PPHxqmpx`X#n&mQ?4Qe|XbsrL@G_H`x!h9=(1n|7D zp|*BJfa+5o&#fU0J}E}peXE%r2diW<5o$j>TMg5WB?|bMKAvL;0PxqhIS(Hs&{Orr zVAxGysl%_X)<Qrb=D!wk;@^Ji!uIR81bJbd3;?12T1$d6g}F5;HC@&?cxq1kq0&;Y z&p#Op$;EM-DvK#}zV*&Bo{a_di+PPp9EJ3v*6*tK8qOaDrw-FmO-~GXgW_{4P*CQM zrL<pHV;>F--xGPiE0=F<`O4scO^3-iIOJa?lik)LUoKSAq5gPIYI`R|-c0;HP|JDL zB452)#CK~U?ecVt%RJ}J%p`qp1Z6iB3KZ}?Ch7fTF{QC^lptP3U^cj7sL_q$eJOzj zCsl&wC&74uaD6E9TR>6gH~hYo!n@NkR+WyxJ==WA6t+cYmAS7asDB1Uon8X0&&w@$ z_n`RRVZi#U=|6hlJQM;v%{pT~ueXjNK9a|x&rE3@;o*w}>&Gnn?T`O5lO~K^O9brm z`Y`Ozl|rQ3uj$k%2B9|Jr%`~Zn^{IVlYF|ezTKD60@hPA)a$%}{s?Et$HQgc%13+K ze47O*9e8Ca(=o}{_I}HG{8*6uoqeR{e!>|63Kjy7X^NP%4`UG#J1lyh|5tr`zncl- z`w8mZ2k74>g7cLE&i9DGrG{tYWhH?AZQ${C;Qk`g9$|d~w{|&qS#v?!C){hUS1y5K Z<&d~9T+b^!-bYZc9wP1IpFLdJ?{D_0)eisw literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.nod b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.nod new file mode 100644 index 0000000000000000000000000000000000000000..76dd01f956adec07460d8ad34b585716a5d6f32c GIT binary patch literal 112 zcmebCcTq6ZGh`I+7M?jX(!rtQglvIrm#f2p4W8dLmN+_057}jGZ0+p8!T<q`3``6G ZKqiQefYJ$2Is-_<03(#a1f`jwGyquE561ui literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.pol b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.pol new file mode 100644 index 0000000000000000000000000000000000000000..6a36670a33b23f35680489a6f5f9d17750b93bb1 GIT binary patch literal 256 zcmWIW_fas^GxSXIj7m*Qvv%;>=XT}{kDJ4lxQW*nOE@|hybC^i=8B6069WP;LYc@k zj7`WK7NA~`Ss?lX5Kq(Mh@Qvb>o6hapYt!yumhy%e}SZbzLfr5l}VuvY(Oyvc93h> VLDn!agZMxIc0UtX2rLN`2LLMYI{N?s literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsA.dbf b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..74d23798fe8a8ca5b1e2b5be1bbee04ebc74bd9e GIT binary patch literal 733 zcmZ{g!3x4K42A=WH!q_1K0(Qnrfx@ZQwCCDig??L2tKQC;wHMTBlb}Ket+^e?d&mM z%>m#A&u*WrH9%dq$7Xk^_W+@HH<Oak-m2zW-7kOq(>Wa5UDLIH`xj}z&#!*|EcL^= ztlB!%Lw{<+$3OJ8KHt)43l&Iy44t!rm=i_l2n$BC%p74<TBcE<$zoSjMU<UlTcA)f z6L-Y1_4zib{2tUy6`IkTV$$?_5$|O@J~K(?tQR#@%~YY$elJT)y+ubEOB~NgPKe&- L-i_cOwM@frzIa*5 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsA.rel b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsA.rel new file mode 100644 index 000000000000..f2a70de04e3a --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsA.rel @@ -0,0 +1,92 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240418 15564880+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=8d979929-160d-4fea-9333-4111bc4af930_MultipolygonsA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[IDENTIFICATION] +code=8d979929-160d-4fea-9333-4111bc4af930_MultipolygonsA +codeSpace= +DatasetTitle=Multipolygons [Plantill] + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemIdentifier=lat/long-WGS84 + +[EXTENT] +toler_env=0 +MinX=27.399999937575 +MaxX=44.099999937575 +MinY=34.192500062925 +MaxY=41.662500062925 + +[OVERVIEW] +CreationDate=20240418 15564879+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +visible=0 +descriptor=Longitud de l'arc (projecció) + +[TAULA_PRINCIPAL:LONG_ARCE] +unitats=m +descriptor=Longitud de l'arc (el·lipsoide) + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=Multipolygons.pol + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20240418 15564879+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampLongitudArcEllipsoidal=LONG_ARCE +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsN.dbf b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..0baf50ab841870d687cbe3721df9bdb857851309 GIT binary patch literal 305 zcmZRsVG&|sU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 nXfR0H6(Y}wrk_^<8!*5oXlR5@RKXCNC=Rt)O~9en7`wRua@rya literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsN.rel b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsN.rel new file mode 100644 index 000000000000..a1d8b2f52192 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsN.rel @@ -0,0 +1,68 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240418 15564879+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=383b7088-9b01-4fbc-8260-a0334da9a15e_MultipolygonsN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[IDENTIFICATION] +code=383b7088-9b01-4fbc-8260-a0334da9a15e_MultipolygonsN +codeSpace= +DatasetTitle=Multipolygons [Plantill] [Plantill] + +[EXTENT] +toler_env=0 +MinX=27.399999937575 +MaxX=44.099999937575 +MinY=34.192500062925 +MaxY=41.662500062925 + +[OVERVIEW] +CreationDate=20240418 15564877+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20240418 15564878+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsP.dbf b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..8d5f513c68a4fc8cbc6fef73b6d714914655dd59 GIT binary patch literal 1055 zcmchUyH3L}6oy?D5JIpZSeX0*wS12=rj4m0rD^4+h^;dc0|RWl8_&X<V8@rFrIk<z z&S3vOpU?4cd;EEJI|cxJkG|}<7k2<u<#uk0$7%*Z<LK4imS+2;O{<^RNBBycYN1=B z1HZr6ZIHP3{r7ocbP+1L^AB;`+r>fNYzs59<|6;?f3>Wu`B84K)^^nZ*e3wsIxOPU zY!*{(x>a}nc?4munpSeJJ8qwJC(7k!TKfxl?mlA>^>Xgs-bL+(ksAh47Hxc=(xop4 z%*h*=0!JleRBjduVayd{At?mmOlC+hfpMy_aH13<LgY3%Q7n@=Q!>jHK^bO*_B(lK z1YxgI>U2S3>52BCl87pIDG2{)fAR79{_+OX$zBl3xR9C3y@<B_OeqpVwG>m1IZ-`K bP5#|JjV8PZ_H*2cEW%{pe)>At&mVpO?$UnN literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsP.rel b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsP.rel new file mode 100644 index 000000000000..4336a8b250d1 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/CorruptedPolygon/MultipolygonsP.rel @@ -0,0 +1,108 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20240418 15565753+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=1a33d20b-d84d-4f3c-8bb8-02e773d488ed_MultipolygonsP + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[IDENTIFICATION] +code=1a33d20b-d84d-4f3c-8bb8-02e773d488ed_MultipolygonsP +codeSpace= +DatasetTitle=Multipolygons + +[OVERVIEW:ASPECTES_TECNICS] +ArcSource=Multipolygons.arc + +[EXTENT] +toler_env=0 +MinX=27.399999937575 +MaxX=44.099999937575 +MinY=34.192500062925 +MaxY=41.662500062925 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +MostrarUnitats=0 +descriptor=Identificador Gràfic intern + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:PERIMETRE] +visible=0 +descriptor=Perímetre del polígon (projecció) + +[TAULA_PRINCIPAL:PERIMETREE] +unitats=m +descriptor=Perímetre del polígon (el·lipsoide) + +[TAULA_PRINCIPAL:AREA] +visible=0 +descriptor=Àrea del polígon (projecció) + +[TAULA_PRINCIPAL:AREAE] +unitats=m² +descriptor=Àrea del polígon (el·lipsoide) + +[TAULA_PRINCIPAL:N_ARCS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre d'arcs + +[TAULA_PRINCIPAL:N_POLIG] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de polígons elementals + +[TAULA_PRINCIPAL:NUMBER] +unitats= + +[TAULA_PRINCIPAL:INT64] +unitats= + +[TAULA_PRINCIPAL:DOUBLE] +unitats= + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20240418 15565753+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic en SIG +OrganisationName=CREAF + +[QUALITY:LINEAGE] +processes=1 + +[OVERVIEW] +CreationDate=20240418 15565750+0200 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampPerimetreEllipsoidal=PERIMETREE +NomCampArea=AREA +NomCampAreaEllipsoidal=AREAE +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFile.arc b/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFile.arc new file mode 100644 index 0000000000000000000000000000000000000000..a5084dcc5b725485e9d8e3cb5f8cc69f76680ef6 GIT binary patch literal 592 zcmZ<^a#k?ZGh|3gDSH?GwbVht)#A2E>Qo1pplciC)G{4HUnXja$@e<2FhBshI=MM7 z<lUz<IyC%W@G-%x%YhZBOn?!h7DSy}7szU+G1-CpW{O+ymL><zNTp8mkck-Pvjf#8 z05vcHX=Wf^5%_cp1NVG~1-bIM2ljP4TurUlZ-3wCU>n*v{o3DlhZSolylTJJ<p5HD z0jLk;K9JonV|6UTr4k+f@4FQh_^s06m%%Jn?<I{6M#TwpqMkH5IBmYoS6SQUzy?(N z0jLi|gY2%G@b~Pc!xauN|4G{Qtym(`?9lFX+{=7<t;00dV=G;cbvpQaJ>osOwH@x? zwUq|e-@PX|z|_OS4;C(`o8wHluXQ+t@lBnwYgx6!%GnZ)t8|(j$`&5{FZ8X#!7ySu z^J9?~hm4C7c_nRG4wl@~hkM_3!NVs=^oZj(_X!S{+y!?t9It_g<FbhIMfEj^@Px$^ zEWBXhgC1^oS@&j$ooRrF!`g$K9KG!I4vB6YRTs}RI|!sNojsYY8XjJ-aQwC<Y|7a^ e%?`{p&V{wd>l_Y*K->)rU#B;<3r}xpb^rhvLhHEz literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFile.nod b/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFile.nod new file mode 100644 index 0000000000000000000000000000000000000000..2b3fa6ee1e50370394723c3eb9280a167191edf6 GIT binary patch literal 172 zcmebCcTq6ZGh|3gDSH?GwbVht)#A2E>Qsj?zNu4oEvt42eVM2wCg1D8!2kh_49pA# pKqiQ;fYJ?6x&um2fYLLd^a3co0!X6+MyLRcW`gozG&7VBq5*xM6Pf@3 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileA.dbf b/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..3f8b628456601b93fa883c7a1b3f869e88524409 GIT binary patch literal 789 zcmb7;O%8%E5Jrb+SZUnoz9%5Gr3IGAUr0<zkQjGd#H|-J9!MaCDWojyqJ90|%(T1L z&fNom<GlNQwax%l)|E|CR4ITO>}6G=x<}r$`SZTvZ?!7Br0Lsrzq=fu>MZXnRn2~> zVf4kI?e8aTOLfce1|0NdI(pAEkNz|XeIAKz`L<ACNEwr!FIXsy5Vf_PCRjkO25%v? zi4a2?N74(0FJy!v5<^7dDKg0n-a_jXGo)O45#uy4eul#WL?rR=Cph*ZNkqb9=`kL~ S(hO&B4a0o)gcRS8aU(ydUt84x literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileN.dbf b/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..93a6c3f4a05e7123170d8498de79943a641d9018 GIT binary patch literal 481 zcmZRsW|QGyU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 zXfR0H6(Y}wrk_^<8!*5oXlRU0RKXCNC=RtoB&fw|8xC_#NHEuw1hr-)s5Qr~7692( BIRgLy literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileN.rel b/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileN.rel new file mode 100644 index 000000000000..81b6ce35bc31 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/NoArcRel/SimpleArcFileN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16235470+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=e4365dc3-82f4-4da8-ae1f-3f73922adc27_SimpleArcFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=e4365dc3-82f4-4da8-ae1f-3f73922adc27_SimpleArcFileN +codeSpace= +DatasetTitle=Simple Arc File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16235469+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16235470+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFile.arc b/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFile.arc new file mode 100644 index 0000000000000000000000000000000000000000..a5084dcc5b725485e9d8e3cb5f8cc69f76680ef6 GIT binary patch literal 592 zcmZ<^a#k?ZGh|3gDSH?GwbVht)#A2E>Qo1pplciC)G{4HUnXja$@e<2FhBshI=MM7 z<lUz<IyC%W@G-%x%YhZBOn?!h7DSy}7szU+G1-CpW{O+ymL><zNTp8mkck-Pvjf#8 z05vcHX=Wf^5%_cp1NVG~1-bIM2ljP4TurUlZ-3wCU>n*v{o3DlhZSolylTJJ<p5HD z0jLk;K9JonV|6UTr4k+f@4FQh_^s06m%%Jn?<I{6M#TwpqMkH5IBmYoS6SQUzy?(N z0jLi|gY2%G@b~Pc!xauN|4G{Qtym(`?9lFX+{=7<t;00dV=G;cbvpQaJ>osOwH@x? zwUq|e-@PX|z|_OS4;C(`o8wHluXQ+t@lBnwYgx6!%GnZ)t8|(j$`&5{FZ8X#!7ySu z^J9?~hm4C7c_nRG4wl@~hkM_3!NVs=^oZj(_X!S{+y!?t9It_g<FbhIMfEj^@Px$^ zEWBXhgC1^oS@&j$ooRrF!`g$K9KG!I4vB6YRTs}RI|!sNojsYY8XjJ-aQwC<Y|7a^ e%?`{p&V{wd>l_Y*K->)rU#B;<3r}xpb^rhvLhHEz literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileA.dbf b/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..3f8b628456601b93fa883c7a1b3f869e88524409 GIT binary patch literal 789 zcmb7;O%8%E5Jrb+SZUnoz9%5Gr3IGAUr0<zkQjGd#H|-J9!MaCDWojyqJ90|%(T1L z&fNom<GlNQwax%l)|E|CR4ITO>}6G=x<}r$`SZTvZ?!7Br0Lsrzq=fu>MZXnRn2~> zVf4kI?e8aTOLfce1|0NdI(pAEkNz|XeIAKz`L<ACNEwr!FIXsy5Vf_PCRjkO25%v? zi4a2?N74(0FJy!v5<^7dDKg0n-a_jXGo)O45#uy4eul#WL?rR=Cph*ZNkqb9=`kL~ S(hO&B4a0o)gcRS8aU(ydUt84x literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileA.rel b/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileA.rel new file mode 100644 index 000000000000..9f92d9a62f2c --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileA.rel @@ -0,0 +1,102 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat,spa,eng +MDIdiom=cat,spa,eng +dateStamp=20230628 16235471+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=16b4eae3-8f74-4145-95db-babb7f0feb0f_SimpleArcFileA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=16b4eae3-8f74-4145-95db-babb7f0feb0f_SimpleArcFileA +codeSpace= +DatasetTitle=Simple Arc File + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels +unitatsY=? + +[EXTENT] +toler_env=0 +MinX=351.333967649907 +MaxX=1369.30161750719 +MinY=201.191246431919 +MaxY=931.88582302564 + +[OVERVIEW] +CreationDate=20230628 16235470+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +descriptor_spa=Identificador Gráfico interno +descriptor_eng=Internal Graphic identifier +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs +descriptor_spa=Número de vertices +descriptor_eng=Number of vertices + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc +descriptor_spa=Longitud del arco +descriptor_eng=Lenght of arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial +descriptor_spa=Nodo inicial +descriptor_eng=Initial node + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final +descriptor_spa=Nodo final +descriptor_eng=Final node + +[TAULA_PRINCIPAL:ATT1] +descriptor=Atributte1 + +[TAULA_PRINCIPAL:ATT2] +descriptor=Attribute2 + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16235471+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileN.dbf b/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..93a6c3f4a05e7123170d8498de79943a641d9018 GIT binary patch literal 481 zcmZRsW|QGyU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 zXfR0H6(Y}wrk_^<8!*5oXlRU0RKXCNC=RtoB&fw|8xC_#NHEuw1hr-)s5Qr~7692( BIRgLy literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileN.rel b/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileN.rel new file mode 100644 index 000000000000..81b6ce35bc31 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/NoNode/SimpleArcFileN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16235470+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=e4365dc3-82f4-4da8-ae1f-3f73922adc27_SimpleArcFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=e4365dc3-82f4-4da8-ae1f-3f73922adc27_SimpleArcFileN +codeSpace= +DatasetTitle=Simple Arc File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16235469+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16235470+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.arc b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.arc new file mode 100644 index 0000000000000000000000000000000000000000..431d702a3a3540257dfdffce360f123848d1eeb8 GIT binary patch literal 536 zcmZ<^a#k?ZGi2E98#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHG%nT3!Qx`GM zUGJG~heHqB*RaOxb#QfTKxH?eYC%-nGFH!W^(hYSdv8Sre(Qvr|LB0;fx_u^4xaNr zCY1GcIdA~gS}-y&01+6weNBkr>znA{yGPA9OQp%dR8&BU|5*pzT}L-DR<}-Rasb)4 z1E`4!NQ1DHuuoUCOREFSoy(sY)!yRmaHu{}u-Mq85$;Zy`;T~ST9YW-=1{=1XUjX6 z7Px;txb6*opjzRu;BL<4DeCnY=C!#Uo5sIk0^I$V@=Mw{p7%H?Xf64*P_-TIU%uVd zKAU`+9d=kPniM47=K%Bfnxh;csREN6-p-uN_CLD;9xj3#+rAovwmOK1@3PEPnB>6k zX|B$DufqY|zE6uM1zmpvG#46Auy6`G`>G`2PoqP}O}kqSbK4wX;RFjmfwL6`*4t|x X(gN6zt#oa7FnhbkkNsGM1G;$t@$uhR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.nod b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.nod new file mode 100644 index 0000000000000000000000000000000000000000..e5d310c3c0cee2ef29d314c8b4e269060b4f902e GIT binary patch literal 92 zcmebCcTq6ZGi2E98#`I@Yl%af+p%f<8zwmHXlZ`)KDENZbN<JKvc4_{W(EjgWME?O V05U;z0F;h^%7G|GD4PjL0|5OV62Jfe literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.pol b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.pol new file mode 100644 index 0000000000000000000000000000000000000000..0a2e4736fe6478f5c622ee571b3c8a07b4443c76 GIT binary patch literal 349 zcmWIW_fas^GgR2^8#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHGEDQ+12xKxL zxD3odHX;2;${9RB2A~0${)l<*de3Y-9D3NkhBaQVgWC_%0>dEvZOd3a%hjhiG>W+N z7MK2ZaNm0?D)3t;-2IOZ=p87WUgzLB|6@W~UzY<+FNh8R5^i4;V)*(dIxIVRca6>b zuMWO@)Qq!KnjB0;1*G_&b-?|9bQ5EB>y#!G{SiR@Qo=r6(JrkHr@l^&Y`%Ke0Tc!x Nzz(ti2$(?u1OVn<W$pj~ literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileA.dbf b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..22d64985be2349156fb209f77c88885217bf9fc4 GIT binary patch literal 511 zcmZ{fK?=e!5Jd+H-MQ19i<}@dF^%c6*t7&Bg<8a2_Flo$c{5uPzb)e|!soxd3CSM$ zJ|m(h-KTRkTcXgUZm8REB8oHJt**p$^~30Imp}a6$1c^wq<{Q;xghq9Pa%dcUt)Um zcE<es8^KM%l@_Jau9D7L<0_O$tj?A~!9sF~O6h|vl_|7AiNxw`DKspkpFauf3RxOs Kt0OoPtLqo7Pdilr literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileA.rel b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileA.rel new file mode 100644 index 000000000000..355451718486 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileA.rel @@ -0,0 +1,89 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204654+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels +unitatsY=? + +[EXTENT] +toler_env=0 +MinX=335.3187440533326 +MaxX=1224.163653663228 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204653+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=SimplePolFile.pol + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204654+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileN.dbf b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..19847ee7684ceccc4001218201034fe3e646fd67 GIT binary patch literal 261 zcmZRsW|LuNU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 kXfR0H6(Y}wrk_^<8!*5oXlR5@RKXCNC=Rt)O)xY903eGTUH||9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileN.rel b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileN.rel new file mode 100644 index 000000000000..c0f476e1f0de --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204653+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16204652+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204653+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileP.dbf b/autotest/ogr/data/miramon/CorruptedFiles/NoPolRel/SimplePolFileP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..5d00a81ae4a44cefb1edfafaf085f49073b42336 GIT binary patch literal 729 zcma)&OHRWu5QYa8b%8)^U~ev9+2cp-rAbW{sf5Zzh+S^L2{=4%<V+rFBxaHM=l|v* zXZO3?-vYqP%}@TW;t8NR^v4w5ni`<J(7RPj+keH>#qZAzevYYmk6ns|Kkt_dLW-d{ z!~QjI`;h9qJjcfU^T(+<Zt~D|a+n6lPXM?t#h5b3q6hvsf|4H?g}9<Fmf&It!dK0$ z>BZEnkfbdsCeeBBtkuqsrj)2F$q}WWOg2r+Fm~qrI@1e@R5?;CuU6bhEl`u`oUw{m p9#Kr@>{?Boj2&irAr)1#w=wGkH(Fa?5mv&O{Cf>5J<a^@+8-ctQn>&C literal 0 HcmV?d00001 diff --git a/autotest/ogr/ogr_miramon_vector.py b/autotest/ogr/ogr_miramon_vector.py index d58a52573002..19562018b21c 100644 --- a/autotest/ogr/ogr_miramon_vector.py +++ b/autotest/ogr/ogr_miramon_vector.py @@ -102,43 +102,24 @@ def test_ogr_miramon_write_simple_point_EmptyVersion(tmp_vsimem): check_simple_point(ds) -def test_ogr_miramon_write_simple_point_V11(tmp_vsimem): - - out_filename = str(tmp_vsimem / "out.pnt") - gdal.VectorTranslate( - out_filename, - "data/miramon/Points/SimplePoints/SimplePointsFile.pnt", - format="MiraMonVector", - options="-lco Version=V1.1", - ) - ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) - check_simple_point(ds) - - -def test_ogr_miramon_write_simple_point_V20(tmp_vsimem): - - out_filename = str(tmp_vsimem / "out.pnt") - gdal.VectorTranslate( - out_filename, - "data/miramon/Points/SimplePoints/SimplePointsFile.pnt", - format="MiraMonVector", - options="-lco Version=V2.0", - ) - - ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) - check_simple_point(ds) - - -def test_ogr_miramon_write_simple_point_last_version(tmp_vsimem): +@pytest.mark.parametrize( + "version", + [ + "V1.1", + "V2.0", + "last_version", + "VX.0", + ], +) +def test_ogr_miramon_write_simple_point_V11(tmp_vsimem, version): out_filename = str(tmp_vsimem / "out.pnt") gdal.VectorTranslate( out_filename, "data/miramon/Points/SimplePoints/SimplePointsFile.pnt", format="MiraMonVector", - options="-lco Version=last_version", + options="-lco Version=" + version, ) - ds = gdal.OpenEx(out_filename, gdal.OF_VECTOR) check_simple_point(ds) @@ -865,6 +846,10 @@ def test_ogr_miramon_OpenLanguageArc(Language, expected_description): "Error reading the format in the DBF file", ), ("data/miramon/CorruptedFiles/NoREL/NoREL.pnt", "rel must exist."), + ("data/miramon/CorruptedFiles/NoNode/SimpleArcFile.arc", "Cannot open file"), + ("data/miramon/CorruptedFiles/NoArcRel/SimpleArcFile.arc", "rel must exist"), + ("data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.pol", "rel must exist"), + ("data/miramon/CorruptedFiles/BadCycle/SimplePolFile.pol", "Cannot open file"), ], ) def test_ogr_miramon_corrupted_files(name, message): @@ -875,6 +860,46 @@ def test_ogr_miramon_corrupted_files(name, message): ) +############################################################################### +# features test: unexisting coordinates, unexpected polygon construction + + +@pytest.mark.parametrize( + "name,message", + [ + ( + "data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinatesPoint.pnt", + "Wrong file format", + ), + ( + "data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.arc", + "Wrong file format", + ), + ( + "data/miramon/CorruptedFiles/CorruptedCoordinates/CorruptedCoordinates.pol", + "Wrong file format", + ), + ( + "data/miramon/CorruptedFiles/CorruptedPolygon/Multipolygons.pol", + "Wrong polygon format", + ), + ], +) +def test_ogr_miramon_corrupted_features_point(name, message): + + ds = gdal.OpenEx( + name, + gdal.OF_VECTOR, + ) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + + assert lyr is not None, "Failed to get layer" + with pytest.raises(Exception, match=message): + for f in lyr: + pass + + ############################################################################### # multiregister test @@ -931,6 +956,7 @@ def create_common_attributes(lyr): lyr.CreateField(ogr.FieldDefn("intlistfield", ogr.OFTIntegerList)) lyr.CreateField(ogr.FieldDefn("int64listfield", ogr.OFTInteger64List)) lyr.CreateField(ogr.FieldDefn("doulistfield", ogr.OFTRealList)) + lyr.CreateField(ogr.FieldDefn("datefield", ogr.OFTDate)) def assign_common_attributes(f): @@ -942,6 +968,7 @@ def assign_common_attributes(f): f["intlistfield"] = [123456789] f["int64listfield"] = [12345678912345678] f["doulistfield"] = [1.5, 4.2] + f["datefield"] = "2024/04/24" def check_common_attributes(f): @@ -953,6 +980,16 @@ def check_common_attributes(f): assert f["intlistfield"] == [123456789] assert f["int64listfield"] == [12345678912345678] assert f["doulistfield"] == [1.5, 4.2] + assert f["datefield"] == "2024/04/24" + + +def open_ds_lyr_0_feature_0(layername): + ds = ogr.Open(layername) + assert ds is not None, "Failed to get dataset" + lyr = ds.GetLayer(0) + assert lyr is not None, "Failed to get layer" + f = lyr.GetNextFeature() + return ds, lyr, f def test_ogr_miramon_write_basic_polygon(tmp_path): @@ -972,11 +1009,7 @@ def test_ogr_miramon_write_basic_polygon(tmp_path): ds = None layername = filename + "/test.pol" - ds = ogr.Open(layername) - assert ds is not None, "Failed to get dataset" - lyr = ds.GetLayer(0) - assert lyr is not None, "Failed to get layer" - f = lyr.GetNextFeature() + ds, lyr, f = open_ds_lyr_0_feature_0(layername) assert f["ID_GRAFIC"] == [1, 1] assert f["N_VERTEXS"] == [4, 4] @@ -1011,11 +1044,7 @@ def test_ogr_miramon_write_basic_multipolygon(tmp_path): ds = None layername = filename + "/test.pol" - ds = ogr.Open(layername) - assert ds is not None, "Failed to get dataset" - lyr = ds.GetLayer(0) - assert lyr is not None, "Failed to get layer" - f = lyr.GetNextFeature() + ds, lyr, f = open_ds_lyr_0_feature_0(layername) assert f["ID_GRAFIC"] == [1, 1] assert f["N_VERTEXS"] == [20, 20] @@ -1031,6 +1060,44 @@ def test_ogr_miramon_write_basic_multipolygon(tmp_path): ds = None +def test_ogr_miramon_write_basic_multipolygon_3d(tmp_path): + + filename = str(tmp_path / "DataSetMULTIPOL3d") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry( + ogr.CreateGeometryFromWkt( + "MULTIPOLYGON Z (((0 0 3,0 5 3,5 5 4,5 0 5,0 0 3), (1 1 6,2 1 3,2 2 9.2,1 2 3.14,1 1 6), (3 3 1,4 3 12,4 4 21,3 4 2,3 3 1)),((5 6 2,5 7 2,6 7 3,6 6 3,5 6 2)))" + ) + ) + + lyr.CreateFeature(f) + f = None + ds = None + + layername = filename + "/test.pol" + ds, lyr, f = open_ds_lyr_0_feature_0(layername) + + assert f["ID_GRAFIC"] == [1, 1] + assert f["N_VERTEXS"] == [20, 20] + assert f["PERIMETRE"] == [32, 32] + assert f["AREA"] == [24, 24] + assert f["N_ARCS"] == [4, 4] + assert f["N_POLIG"] == [4, 4] + check_common_attributes(f) + assert ( + f.GetGeometryRef().ExportToIsoWkt() + == "MULTIPOLYGON Z (((0 0 3,0 5 3,5 5 4,5 0 5,0 0 3),(1 1 6,2 1 3,2 2 9.2,1 2 3.14,1 1 6),(3 3 1,4 3 12,4 4 21,3 4 2,3 3 1)),((5 6 2,5 7 2,6 7 3,6 6 3,5 6 2)))" + ) + ds = None + + def test_ogr_miramon_write_basic_linestring(tmp_path): filename = str(tmp_path / "DataSetLINESTRING") @@ -1048,11 +1115,7 @@ def test_ogr_miramon_write_basic_linestring(tmp_path): ds = None layername = filename + "/test.arc" - ds = ogr.Open(layername) - assert ds is not None, "Failed to get dataset" - lyr = ds.GetLayer(0) - assert lyr is not None, "Failed to get layer" - f = lyr.GetNextFeature() + ds, lyr, f = open_ds_lyr_0_feature_0(layername) assert f["ID_GRAFIC"] == [0, 0] assert f["N_VERTEXS"] == [3, 3] @@ -1081,11 +1144,7 @@ def test_ogr_miramon_write_basic_linestringZ(tmp_path): ds = None layername = filename + "/test.arc" - ds = ogr.Open(layername) - assert ds is not None, "Failed to get dataset" - lyr = ds.GetLayer(0) - assert lyr is not None, "Failed to get layer" - f = lyr.GetNextFeature() + ds, lyr, f = open_ds_lyr_0_feature_0(layername) assert f["ID_GRAFIC"] == [0, 0] assert f["N_VERTEXS"] == [3, 3] @@ -1116,11 +1175,7 @@ def test_ogr_miramon_write_basic_multilinestring(tmp_path): ds = None layername = filename + "/test.arc" - ds = ogr.Open(layername) - assert ds is not None, "Failed to get dataset" - lyr = ds.GetLayer(0) - assert lyr is not None, "Failed to get layer" - f = lyr.GetNextFeature() + ds, lyr, f = open_ds_lyr_0_feature_0(layername) assert f["ID_GRAFIC"] == [0, 0] assert f["N_VERTEXS"] == [3, 3] @@ -1143,13 +1198,20 @@ def test_ogr_miramon_write_basic_multilinestring(tmp_path): ds = None -def test_ogr_miramon_write_basic_point(tmp_path): +@pytest.mark.parametrize( + "DBFEncoding", + [ + "UTF8", + "ANSI", + ], +) +def test_ogr_miramon_write_basic_point(tmp_path, DBFEncoding): filename = str(tmp_path / "DataSetPOINT") ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) srs = osr.SpatialReference() srs.ImportFromEPSG(32631) - options = ["DBFEncoding=UTF8"] + options = ["DBFEncoding=" + DBFEncoding] lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown, options=options) create_common_attributes(lyr) f = ogr.Feature(lyr.GetLayerDefn()) @@ -1165,11 +1227,7 @@ def test_ogr_miramon_write_basic_point(tmp_path): ds = None layername = filename + "/test.pnt" - ds = ogr.Open(layername) - assert ds is not None, "Failed to get dataset" - lyr = ds.GetLayer(0) - assert lyr is not None, "Failed to get layer" - f = lyr.GetNextFeature() + ds, lyr, f = open_ds_lyr_0_feature_0(layername) assert f["ID_GRAFIC"] == [0, 0] check_common_attributes(f) @@ -1205,11 +1263,7 @@ def test_ogr_miramon_write_basic_pointZ(tmp_path): ds = None layername = filename + "/test.pnt" - ds = ogr.Open(layername) - assert ds is not None, "Failed to get dataset" - lyr = ds.GetLayer(0) - assert lyr is not None, "Failed to get layer" - f = lyr.GetNextFeature() + ds, lyr, f = open_ds_lyr_0_feature_0(layername) assert f["ID_GRAFIC"] == [0, 0] check_common_attributes(f) @@ -1242,11 +1296,7 @@ def test_ogr_miramon_write_basic_multipoint(tmp_path): ds = None layername = filename + "/test.pnt" - ds = ogr.Open(layername) - assert ds is not None, "Failed to get dataset" - lyr = ds.GetLayer(0) - assert lyr is not None, "Failed to get layer" - f = lyr.GetNextFeature() + ds, lyr, f = open_ds_lyr_0_feature_0(layername) assert f["ID_GRAFIC"] == [0, 0] check_common_attributes(f) @@ -1259,3 +1309,61 @@ def test_ogr_miramon_write_basic_multipoint(tmp_path): assert f.GetGeometryRef().ExportToIsoWkt() == "POINT (1 0)" ds = None + + +def test_ogr_miramon_write_basic_multigeometry(tmp_path): + + filename = str(tmp_path / "DataSetMULTIGEOM") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("POINT (0 0)")) + lyr.CreateFeature(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("LINESTRING (0 0,0 1,1 1)")) + lyr.CreateFeature(f) + + f.SetGeometry( + ogr.CreateGeometryFromWkt( + "MULTIPOLYGON (((0 0,0 5,5 5,5 0,0 0), (1 1,2 1,2 2,1 2,1 1), (3 3,4 3,4 4,3 4,3 3)),((5 6,5 7,6 7,6 6,5 6)))" + ) + ) + lyr.CreateFeature(f) + + f = None + ds = None + + layername = filename + "/test.pnt" + ds, lyr, f = open_ds_lyr_0_feature_0(layername) + + assert f["ID_GRAFIC"] == [0, 0] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "POINT (0 0)" + + ds = None + + layername = filename + "/test.arc" + ds, lyr, f = open_ds_lyr_0_feature_0(layername) + + assert f["ID_GRAFIC"] == [0, 0] + check_common_attributes(f) + assert f.GetGeometryRef().ExportToIsoWkt() == "LINESTRING (0 0,0 1,1 1)" + + ds = None + + layername = filename + "/test.pol" + ds, lyr, f = open_ds_lyr_0_feature_0(layername) + + assert f["ID_GRAFIC"] == [1, 1] + check_common_attributes(f) + assert ( + f.GetGeometryRef().ExportToIsoWkt() + == "MULTIPOLYGON (((0 0,0 5,5 5,5 0,0 0),(1 1,2 1,2 2,1 2,1 1),(3 3,4 3,4 4,3 4,3 3)),((5 6,5 7,6 7,6 6,5 6)))" + ) + + ds = None diff --git a/ogr/ogrsf_frmts/miramon/mm_wrlayr.c b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c index adf5c5d295f7..816df6bcb580 100644 --- a/ogr/ogrsf_frmts/miramon/mm_wrlayr.c +++ b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c @@ -369,80 +369,6 @@ static int MMWriteHeader(FILE_TYPE *pF, struct MM_TH *pMMHeader) return 0; } -void MMInitHeader(struct MM_TH *pMMHeader, int layerType, int nVersion) -{ - memset(pMMHeader, 0, sizeof(*pMMHeader)); - switch (nVersion) - { - case MM_32BITS_VERSION: - pMMHeader->aLayerVersion[0] = '0'; - pMMHeader->aLayerVersion[1] = '1'; - pMMHeader->aLayerSubVersion = '1'; - break; - case MM_64BITS_VERSION: - case MM_LAST_VERSION: - default: - pMMHeader->aLayerVersion[0] = '0'; - pMMHeader->aLayerVersion[1] = '2'; - pMMHeader->aLayerSubVersion = '0'; - break; - } - switch (layerType) - { - case MM_LayerType_Point: - pMMHeader->aFileType[0] = 'P'; - pMMHeader->aFileType[1] = 'N'; - pMMHeader->aFileType[2] = 'T'; - break; - case MM_LayerType_Point3d: - pMMHeader->aFileType[0] = 'P'; - pMMHeader->aFileType[1] = 'N'; - pMMHeader->aFileType[2] = 'T'; - pMMHeader->bIs3d = 1; - break; - case MM_LayerType_Arc: - pMMHeader->aFileType[0] = 'A'; - pMMHeader->aFileType[1] = 'R'; - pMMHeader->aFileType[2] = 'C'; - break; - case MM_LayerType_Arc3d: - pMMHeader->aFileType[0] = 'A'; - pMMHeader->aFileType[1] = 'R'; - pMMHeader->aFileType[2] = 'C'; - pMMHeader->bIs3d = 1; - break; - case MM_LayerType_Pol: - pMMHeader->aFileType[0] = 'P'; - pMMHeader->aFileType[1] = 'O'; - pMMHeader->aFileType[2] = 'L'; - break; - case MM_LayerType_Pol3d: - pMMHeader->aFileType[0] = 'P'; - pMMHeader->aFileType[1] = 'O'; - pMMHeader->aFileType[2] = 'L'; - pMMHeader->bIs3d = 1; - break; - default: - break; - } - pMMHeader->nElemCount = 0; - pMMHeader->hBB.dfMinX = MM_UNDEFINED_STATISTICAL_VALUE; - pMMHeader->hBB.dfMaxX = -MM_UNDEFINED_STATISTICAL_VALUE; - pMMHeader->hBB.dfMinY = MM_UNDEFINED_STATISTICAL_VALUE; - pMMHeader->hBB.dfMaxY = -MM_UNDEFINED_STATISTICAL_VALUE; - - pMMHeader->Flag = MM_CREATED_USING_MIRAMON; // Created from MiraMon - if (pMMHeader->bIs3d) - pMMHeader->Flag |= MM_LAYER_3D_INFO; // 3D - - if (pMMHeader->bIsMultipolygon) - pMMHeader->Flag |= MM_LAYER_MULTIPOLYGON; // Multipolygon. - - if (pMMHeader->aFileType[0] == 'P' && pMMHeader->aFileType[1] == 'O' && - pMMHeader->aFileType[2] == 'L') - pMMHeader->Flag |= MM_BIT_5_ON; // Explicital polygons -} - int MMWriteEmptyHeader(FILE_TYPE *pF, int layerType, int nVersion) { struct MM_TH pMMHeader; diff --git a/ogr/ogrsf_frmts/miramon/mm_wrlayr.h b/ogr/ogrsf_frmts/miramon/mm_wrlayr.h index a1d7bbea6307..c9c87e8ad765 100644 --- a/ogr/ogrsf_frmts/miramon/mm_wrlayr.h +++ b/ogr/ogrsf_frmts/miramon/mm_wrlayr.h @@ -125,7 +125,6 @@ int MMInitLayerByType(struct MiraMonVectLayerInfo *hMiraMonLayer); int MMDestroyLayer(struct MiraMonVectLayerInfo *hMiraMonLayer); int MMCloseLayer(struct MiraMonVectLayerInfo *hMiraMonLayer); int MMReadHeader(FILE_TYPE *pF, struct MM_TH *pMMHeader); -void MMInitHeader(struct MM_TH *pMMHeader, int layerType, int nVersion); int MMWriteEmptyHeader(FILE_TYPE *pF, int layerType, int nVersion); int MMReadAHArcSection(struct MiraMonVectLayerInfo *hMiraMonLayer); int MMReadPHPolygonSection(struct MiraMonVectLayerInfo *hMiraMonLayer); diff --git a/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp index 503e3633860a..82edbc707c68 100644 --- a/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp +++ b/ogr/ogrsf_frmts/miramon/ogrmiramonlayer.cpp @@ -788,6 +788,7 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) // Get X,Y (z). MiraMon has no multipoints if (MMGetGeoFeatureFromVector(phMiraMonLayer, nIElem)) { + CPLError(CE_Failure, CPLE_AppDefined, "Wrong file format."); delete poGeom; return nullptr; } @@ -806,6 +807,7 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) // Get X,Y (Z) n times MiraMon has no multilines if (MMGetGeoFeatureFromVector(phMiraMonLayer, nIElem)) { + CPLError(CE_Failure, CPLE_AppDefined, "Wrong file format."); delete poGeom; return nullptr; } @@ -843,17 +845,18 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) // Get X,Y (Z) n times MiraMon has no multilines if (MMGetGeoFeatureFromVector(phMiraMonLayer, nIElem)) { + CPLError(CE_Failure, CPLE_AppDefined, + "Wrong file format."); delete poGeom; return nullptr; } nIVrtAcum = 0; - if (!phMiraMonLayer->bIsPolygon && - !(phMiraMonLayer->ReadFeature.flag_VFG[0] & + if (!(phMiraMonLayer->ReadFeature.flag_VFG[0] & MM_EXTERIOR_ARC_SIDE)) { CPLError(CE_Failure, CPLE_NoWriteAccess, - "\nWrong polygon format."); + "Wrong polygon format."); delete poGeom; return nullptr; } @@ -917,6 +920,8 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) // Get X,Y (Z) n times because MiraMon has no multilinetrings if (MMGetGeoFeatureFromVector(phMiraMonLayer, nIElem)) { + CPLError(CE_Failure, CPLE_AppDefined, + "Wrong file format."); delete poGeom; return nullptr; } @@ -929,7 +934,7 @@ OGRFeature *OGRMiraMonLayer::GetFeature(GIntBig nFeatureId) MM_EXTERIOR_ARC_SIDE)) { CPLError(CE_Failure, CPLE_AssertionFailed, - "\nWrong polygon format."); + "Wrong polygon format."); delete poGeom; return nullptr; } @@ -1484,9 +1489,8 @@ OGRErr OGRMiraMonLayer::MMProcessMultiGeometry(OGRGeometryH hGeom, if (poGeom == nullptr) { - CPLError( - CE_Failure, CPLE_AppDefined, - "\nFeatures without geometry not supported by MiraMon writer."); + CPLError(CE_Failure, CPLE_AppDefined, + "Features without geometry not supported by MiraMon writer."); return LOG_ACTION(OGRERR_FAILURE); } @@ -2590,7 +2594,7 @@ OGRErr OGRMiraMonLayer::CreateField(const OGRFieldDefn *poField, int bApproxOK) if (!bApproxOK) { CPLError(CE_Failure, CPLE_AppDefined, - "\nField %s is of an unsupported type: %s.", + "Field %s is of an unsupported type: %s.", poField->GetNameRef(), poField->GetFieldTypeName(poField->GetType())); return OGRERR_FAILURE; From acf874c3d9a98469e8fc3345a40c89c4bd91786b Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 03:57:58 +0200 Subject: [PATCH 177/230] CI: add a configuration testing numpy 2.0.0rc1 Refs #9751 This commit should be reverted once alpine:edge switches to numpy 2 --- .github/workflows/alpine_numpy2/Dockerfile.ci | 82 +++++++++++++++++++ .github/workflows/alpine_numpy2/build.sh | 36 ++++++++ .github/workflows/linux_build.yml | 6 ++ 3 files changed, 124 insertions(+) create mode 100644 .github/workflows/alpine_numpy2/Dockerfile.ci create mode 100755 .github/workflows/alpine_numpy2/build.sh diff --git a/.github/workflows/alpine_numpy2/Dockerfile.ci b/.github/workflows/alpine_numpy2/Dockerfile.ci new file mode 100644 index 000000000000..c12ff7c49c76 --- /dev/null +++ b/.github/workflows/alpine_numpy2/Dockerfile.ci @@ -0,0 +1,82 @@ +FROM alpine:edge + +RUN apk add \ + apache-arrow-dev \ + armadillo-dev \ + basisu-dev \ + blosc-dev \ + brunsli-dev \ + ccache \ + cfitsio-dev \ + cmake \ + curl-dev \ + expat-dev \ + freexl-dev \ + gcc \ + g++ \ + geos-dev \ + giflib-dev \ + gnu-libiconv-dev \ + hdf5-dev \ + json-c-dev \ + kealib-dev \ + libaec-dev \ + libarchive-dev \ + libdeflate-dev \ + libgeotiff-dev \ + libheif-dev \ + libjpeg-turbo-dev \ + libjxl-dev \ + libkml-dev \ + libpng-dev \ + libpq-dev \ + librasterlite2-dev \ + libspatialite-dev \ + libtirpc-dev \ + libwebp-dev \ + libxml2-dev \ + libxslt-dev \ + linux-headers \ + lz4-dev \ + make \ + mariadb-connector-c-dev \ + netcdf-dev \ + odbc-cpp-wrapper-dev \ + ogdi-dev \ + openexr-dev \ + openjpeg-dev \ + openssl-dev \ + pcre2-dev \ + podofo-dev \ + poppler-dev \ + proj-dev \ + proj-util \ + py3-pip \ + py3-setuptools \ + python3-dev \ + qhull-dev \ + sfcgal-dev \ + snappy-dev \ + sqlite-dev \ + swig \ + tiledb-dev \ + tiff-dev \ + unixodbc-dev \ + xerces-c-dev \ + xz-dev \ + zlib-dev \ + zstd-dev + +# Commenting out those packages to be sure to test numpy 2.0.0rc1 +# py3-numpy \ +# py3-numpy-dev \ +# py3-pyarrow \ +# py3-pyarrow-pyc \ + +# apache-arrow-dev actually comes with an embedded pyarrow version, which is not py3-pyarrow, and is non functional ! +RUN mv /usr/lib/python3.12/site-packages/pyarrow /usr/lib/python3.12/site-packages/pyarrow.disabled + +COPY requirements.txt /tmp/ +RUN python3 -m pip install --break-system-packages numpy==2.0.0rc1 +RUN python3 -m pip install --break-system-packages -U -r /tmp/requirements.txt + diff --git a/.github/workflows/alpine_numpy2/build.sh b/.github/workflows/alpine_numpy2/build.sh new file mode 100755 index 000000000000..9cbdfc622841 --- /dev/null +++ b/.github/workflows/alpine_numpy2/build.sh @@ -0,0 +1,36 @@ +#!/bin/sh + +set -e + +cat << EOF > /tmp/foo.cpp +#include <cstdio> +extern "C" void DeclareDeferredFOO(void); +void DeclareDeferredFOO() +{ + FILE* f = fopen("/tmp/DeclareDeferredFOO_has_been_run.bin", "wb"); + if (f) + fclose(f); +} +EOF + +cmake ${GDAL_SOURCE_DIR:=..} \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_UNITY_BUILD=ON \ + -DUSE_CCACHE=ON \ + -DCMAKE_INSTALL_PREFIX=/usr \ + -DIconv_INCLUDE_DIR=/usr/include/gnu-libiconv \ + -DIconv_LIBRARY=/usr/lib/libiconv.so \ + -DADD_EXTERNAL_DEFERRED_PLUGIN_FOO=/tmp/foo.cpp \ + -DCMAKE_C_FLAGS=-Werror -DCMAKE_CXX_FLAGS="-std=c++23 -Werror" -DWERROR_DEV_FLAG="-Werror=dev" +make -j$(nproc) +make -j$(nproc) install DESTDIR=/tmp/install-gdal + +# To check if DeclareDeferredFOO() is called by GDALAllRegister() +apps/gdalinfo --version + +if test -f /tmp/DeclareDeferredFOO_has_been_run.bin; then + echo "DeclareDeferredFOO() has been run" +else + echo "DeclareDeferredFOO() has NOT been run" + exit 1 +fi diff --git a/.github/workflows/linux_build.yml b/.github/workflows/linux_build.yml index 69f75d555f31..f9e150cd954a 100644 --- a/.github/workflows/linux_build.yml +++ b/.github/workflows/linux_build.yml @@ -65,6 +65,12 @@ jobs: build_script: build.sh os: ubuntu-22.04 + - name: Alpine, numpy 2 + id: alpine_numpy2 + container: alpine_numpy2 + build_script: build.sh + os: ubuntu-22.04 + - name: Alpine, gcc 32-bit id: alpine_32bit container: alpine_32bit From c71f6877d20e6b7a9d3f15c3bc6f40b6f7edab0f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 04:32:57 +0200 Subject: [PATCH 178/230] Docker: update ubuntu-small to 24.04 [ci skip] --- docker/ubuntu-small/Dockerfile | 45 +++++++++++++++------------ docker/ubuntu-small/bh-set-envvars.sh | 4 +-- 2 files changed, 27 insertions(+), 22 deletions(-) diff --git a/docker/ubuntu-small/Dockerfile b/docker/ubuntu-small/Dockerfile index 1f890b00076a..9f859eb7866c 100644 --- a/docker/ubuntu-small/Dockerfile +++ b/docker/ubuntu-small/Dockerfile @@ -6,8 +6,8 @@ # or licensed under MIT (LICENSE.TXT) Copyright 2019 Even Rouault <even.rouault@spatialys.com> ARG PROJ_INSTALL_PREFIX=/usr/local -ARG BASE_IMAGE=ubuntu:22.04 -ARG TARGET_BASE_IMAGE=ubuntu:22.04 +ARG BASE_IMAGE=ubuntu:24.04 +ARG TARGET_BASE_IMAGE=ubuntu:24.04 FROM $BASE_IMAGE as builder @@ -21,18 +21,19 @@ COPY ./bh-set-envvars.sh /buildscripts/bh-set-envvars.sh RUN . /buildscripts/bh-set-envvars.sh \ && if test "${TARGET_ARCH}" != ""; then \ rm -f /etc/apt/sources.list \ - && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ jammy main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ jammy-backports main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=amd64] http://security.ubuntu.com/ubuntu jammy-security main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse" >> /etc/apt/sources.list \ + rm -f /etc/apt/sources.list.d/ubuntu.sources \ + && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ noble main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ noble-updates main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ noble-backports main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=amd64] http://security.ubuntu.com/ubuntu noble-security main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ noble main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ noble-updates main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ noble-security main restricted universe" >> /etc/apt/sources.list \ && dpkg --add-architecture ${TARGET_ARCH} \ && apt-get update -y \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y g++-11-${GCC_ARCH}-linux-gnu \ - && ln -s ${GCC_ARCH}-linux-gnu-gcc-11 /usr/bin/${GCC_ARCH}-linux-gnu-gcc \ - && ln -s ${GCC_ARCH}-linux-gnu-g++-11 /usr/bin/${GCC_ARCH}-linux-gnu-g++ \ + && DEBIAN_FRONTEND=noninteractive apt-get install -y g++-13-${GCC_ARCH}-linux-gnu \ + && ln -s ${GCC_ARCH}-linux-gnu-gcc-13 /usr/bin/${GCC_ARCH}-linux-gnu-gcc \ + && ln -s ${GCC_ARCH}-linux-gnu-g++-13 /usr/bin/${GCC_ARCH}-linux-gnu-g++ \ && rm -rf /var/lib/apt/lists/*; \ fi @@ -134,10 +135,12 @@ RUN . /buildscripts/bh-set-envvars.sh \ && ln -s libinternalproj.so.${PROJ_SO} /build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO_FIRST} \ && ln -s libinternalproj.so.${PROJ_SO} /build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so \ && rm /build${PROJ_INSTALL_PREFIX}/lib/libproj.* \ - && ln -s libinternalproj.so.${PROJ_SO} /build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO} \ - && ln -s libinternalproj.so.${PROJ_SO} /build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO_FIRST} \ && ${GCC_ARCH}-linux-gnu-strip -s /build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO} \ - && for i in /build${PROJ_INSTALL_PREFIX}/bin/*; do ${GCC_ARCH}-linux-gnu-strip -s $i 2>/dev/null || /bin/true; done + && for i in /build${PROJ_INSTALL_PREFIX}/bin/*; do ${GCC_ARCH}-linux-gnu-strip -s $i 2>/dev/null || /bin/true; done \ + && apt-get update -y \ + && DEBIAN_FRONTEND=noninteractive apt-get install -y patchelf \ + && patchelf --set-soname libinternalproj.so.${PROJ_SO_FIRST} /build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO} \ + && for i in /build${PROJ_INSTALL_PREFIX}/bin/*; do patchelf --replace-needed libproj.so.${PROJ_SO_FIRST} libinternalproj.so.${PROJ_SO_FIRST} $i; done # Build GDAL ARG GDAL_VERSION=master @@ -172,9 +175,11 @@ RUN . /buildscripts/bh-set-envvars.sh \ fi \ && mkdir build \ && cd build \ - && CFLAGS='-DPROJ_RENAME_SYMBOLS -O2' CXXFLAGS='-DPROJ_RENAME_SYMBOLS -DPROJ_INTERNAL_CPP_NAMESPACE -O2' \ + # -Wno-psabi avoid 'note: parameter passing for argument of type 'std::pair<double, double>' when C++17 is enabled changed to match C++14 in GCC 10.1' on arm64 + && CFLAGS='-DPROJ_RENAME_SYMBOLS -O2' CXXFLAGS='-DPROJ_RENAME_SYMBOLS -DPROJ_INTERNAL_CPP_NAMESPACE -O2 -Wno-psabi' \ cmake .. \ -DCMAKE_INSTALL_PREFIX=/usr \ + -DGDAL_FIND_PACKAGE_PROJ_MODE=MODULE \ -DPROJ_INCLUDE_DIR="/build${PROJ_INSTALL_PREFIX-/usr/local}/include" \ -DPROJ_LIBRARY="/build${PROJ_INSTALL_PREFIX-/usr/local}/lib/libinternalproj.so" \ -DGDAL_USE_TIFF_INTERNAL=ON \ @@ -219,19 +224,19 @@ RUN apt-get update -y && apt-get upgrade -y \ RUN apt-get update \ # PROJ dependencies && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - libsqlite3-0 libtiff5 libcurl4 \ + libsqlite3-0 libtiff6 libcurl4 \ curl unzip ca-certificates \ && rm -rf /var/lib/apt/lists/* # GDAL dependencies RUN apt-get update -y \ && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - python3-numpy libpython3.10 \ - libjpeg-turbo8 libgeos3.10.2 libgeos-c1v5 \ + python3-numpy libpython3.12 \ + libjpeg-turbo8 libgeos3.12.1 libgeos-c1v5 \ libexpat1 \ libxerces-c3.2 \ libwebp7 libpng16-16 \ - libzstd1 bash libpq5 libssl3 libopenjp2-7 libspatialite7 \ + libzstd1 bash libpq5 libssl3 libopenjp2-7 libspatialite8 \ # pil for antialias option of gdal2tiles python3-pil \ && rm -rf /var/lib/apt/lists/* diff --git a/docker/ubuntu-small/bh-set-envvars.sh b/docker/ubuntu-small/bh-set-envvars.sh index 1d5dd1c97a92..36d8f5dd1906 100644 --- a/docker/ubuntu-small/bh-set-envvars.sh +++ b/docker/ubuntu-small/bh-set-envvars.sh @@ -9,8 +9,8 @@ if test "${TARGET_ARCH:-}" != ""; then exit 0 fi export APT_ARCH_SUFFIX=":${TARGET_ARCH}" - export CC=${GCC_ARCH}-linux-gnu-gcc-11 - export CXX=${GCC_ARCH}-linux-gnu-g++-11 + export CC=${GCC_ARCH}-linux-gnu-gcc-13 + export CXX=${GCC_ARCH}-linux-gnu-g++-13 export WITH_HOST="--host=${GCC_ARCH}-linux-gnu" else export APT_ARCH_SUFFIX="" From acbaefd63d6c5db3724053a7bec16d0137e3925a Mon Sep 17 00:00:00 2001 From: Peter Johnson <peter@geocode.earth> Date: Thu, 25 Apr 2024 10:24:00 +0200 Subject: [PATCH 179/230] docs: pmtiles.rst - casing Update casing of `JSon` -> `JSON` --- doc/source/drivers/vector/pmtiles.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/source/drivers/vector/pmtiles.rst b/doc/source/drivers/vector/pmtiles.rst index 02e1a9717aac..d32325c36bf8 100644 --- a/doc/source/drivers/vector/pmtiles.rst +++ b/doc/source/drivers/vector/pmtiles.rst @@ -132,8 +132,8 @@ Dataset creation options - .. co:: CONF :choices: <json>, <filename> - Layer configuration as a JSon serialized string. - Or filename containing the configuration as JSon. + Layer configuration as a JSON serialized string. + Or filename containing the configuration as JSON. - .. co:: SIMPLIFICATION :choices: float @@ -186,7 +186,7 @@ Layer configuration ------------------- The above mentioned CONF dataset creation option can be set to a string -whose value is a JSon serialized document such as the below one: +whose value is a JSON serialized document such as the below one: .. code-block:: json From f841cf67219060c7a219f42164a369be7d8fb4e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=A9=8D=E4=B8=B9=E5=B0=BC=20Dan=20Jacobson?= <jidanni@jidanni.org> Date: Thu, 25 Apr 2024 16:32:41 +0800 Subject: [PATCH 180/230] Update ogr2ogr.rst to stay on screen in Linux man page --- doc/source/programs/ogr2ogr.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/doc/source/programs/ogr2ogr.rst b/doc/source/programs/ogr2ogr.rst index 59953fc16bba..d840083a1435 100644 --- a/doc/source/programs/ogr2ogr.rst +++ b/doc/source/programs/ogr2ogr.rst @@ -34,11 +34,13 @@ Synopsis [-dim layer_dim|2|XY|3|XYZ|XYM|XYZM] [-s_coord_epoch <epoch>] [-a_coord_epoch <epoch>] [-t_coord_epoch <epoch>] [-ct <pipeline_def>] [-spat_srs <srs_def>] [-geomfield <name>] [-segmentize <max_dist>] [-simplify <tolerance>] [-makevalid] [-wrapdateline] - [-datelineoffset <val_in_degree>] [-clipsrc [<xmin> <ymin> <xmax> <ymax>]|<WKT>|<datasource>|spat_extent] + [-datelineoffset <val_in_degree>] + [-clipsrc [<xmin> <ymin> <xmax> <ymax>]|<WKT>|<datasource>|spat_extent] [-clipsrcsql <sql_statement>] [-clipsrclayer <layername>] [-clipsrcwhere <expression>] [-clipdst [<xmin> <ymin> <xmax> <ymax>]|<WKT>|<datasource>] [-clipdstsql <sql_statement>] [-clipdstlayer <layername>] [-clipdstwhere <expression>] [-explodecollections] [-zfield <name>] - [-gcp <ungeoref_x> <ungeoref_y> <georef_x> <georef_y> [<elevation>]]... [-tps] [-order 1|2|3] + [-gcp <ungeoref_x> <ungeoref_y> <georef_x> <georef_y> [<elevation>]]... + [-tps] [-order 1|2|3] [-xyRes <val>[ m|mm|deg]] [-zRes <val>[ m|mm]] [-mRes <val>] [-unsetCoordPrecision] Other options: From 96c0ceeaf46dbd1f18d1ce4d40ab719e36f33c5c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=A9=8D=E4=B8=B9=E5=B0=BC=20Dan=20Jacobson?= <jidanni@jidanni.org> Date: Thu, 25 Apr 2024 17:01:32 +0800 Subject: [PATCH 181/230] Update 10_bug_report.yml to make clear not talking about all of GitHub --- .github/ISSUE_TEMPLATE/10_bug_report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/10_bug_report.yml b/.github/ISSUE_TEMPLATE/10_bug_report.yml index e87303029ac9..005f8b901a80 100644 --- a/.github/ISSUE_TEMPLATE/10_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/10_bug_report.yml @@ -6,7 +6,7 @@ body: - type: markdown attributes: value: | - Thanks for taking the time to fill out this bug report correctly. Do NOT use GitHub to post any questions or support requests! They will be closed immediately and ignored. + Thanks for taking the time to fill out this bug report correctly. Do NOT use this form to post any questions or support requests! They will be closed immediately and ignored. The GDAL project is made of contributions from various individuals and organizations, each with their own focus. The issue you are facing is not necessarily in the priority list of those contributors and consequently there is no guarantee that it will be addressed in a timely manner. If this bug report or feature request is high-priority for you, and you cannot address it yourself, we suggest engaging a GDAL developer or support organisation and financially sponsoring a fix. From 0efd3659cf5d36c1f0eedf34b99fb1c0e9b9d209 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 14:51:44 +0200 Subject: [PATCH 182/230] ogr2ogr.rst: clarify -gcp --- doc/source/programs/ogr2ogr.rst | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/doc/source/programs/ogr2ogr.rst b/doc/source/programs/ogr2ogr.rst index d840083a1435..c1925fa2e756 100644 --- a/doc/source/programs/ogr2ogr.rst +++ b/doc/source/programs/ogr2ogr.rst @@ -511,8 +511,13 @@ output coordinate system or even reprojecting the features during translation. .. option:: -gcp <ungeoref_x> <ungeoref_y> <georef_x> <georef_y> [<elevation>] - Add the indicated ground control point. This option may be provided - multiple times to provide a set of GCPs. + Use the indicated ground control point to compute a coordinate transformation. + The transformation method can be selected by specifying the :option:`-order` + or :option:`-tps` options. + Note that unlike raster tools such as gdal_edit or gdal_translate, GCPs + are not added to the output dataset. + This option may be provided multiple times to provide a set of GCPs (at + least 2 GCPs are needed). .. option:: -order <n> From 35d5c9d17cd817ee7f26f6b9718318a6bf635c2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=A9=8D=E4=B8=B9=E5=B0=BC=20Dan=20Jacobson?= <jidanni@jidanni.org> Date: Thu, 25 Apr 2024 16:55:10 +0800 Subject: [PATCH 183/230] --formats option: detail abbreviation codes --- gcore/gdal_misc.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/gcore/gdal_misc.cpp b/gcore/gdal_misc.cpp index a00be33fda88..bedc056a4e8c 100644 --- a/gcore/gdal_misc.cpp +++ b/gcore/gdal_misc.cpp @@ -3434,7 +3434,9 @@ int CPL_STDCALL GDALGeneralCmdLineProcessor(int nArgc, char ***ppapszArgv, if (nOptions == 0) nOptions = GDAL_OF_RASTER; - printf("Supported Formats:\n"); /*ok*/ + printf(/*ok*/ + "Supported Formats: (ro:read-only, rw:read-write, +:update, " + "v:virtual-I/O s:subdatasets)\n"); for (int iDr = 0; iDr < GDALGetDriverCount(); iDr++) { GDALDriverH hDriver = GDALGetDriver(iDr); From 09003d3ae688205396c86cb87c7354ab82fee162 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 15:45:38 +0200 Subject: [PATCH 184/230] pyproject.toml: use numpy>=2.0.0rc1 for python >=3.9 Refs #9751 --- swig/python/pyproject.toml | 5 ++++- swig/python/setup.py.in | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/swig/python/pyproject.toml b/swig/python/pyproject.toml index c0206bdd76cd..07ce63e756b7 100644 --- a/swig/python/pyproject.toml +++ b/swig/python/pyproject.toml @@ -1,5 +1,8 @@ [build-system] -requires = ["setuptools>=67.0.0", "oldest-supported-numpy", "wheel"] +requires = ["setuptools>=67.0.0", + "wheel", + "oldest-supported-numpy; python_version=='3.8'", + "numpy >=2.0.0rc1; python_version>='3.9'"] build-backend = "setuptools.build_meta" [project] diff --git a/swig/python/setup.py.in b/swig/python/setup.py.in index d48f46045003..ee28826b4ea2 100644 --- a/swig/python/setup.py.in +++ b/swig/python/setup.py.in @@ -73,6 +73,7 @@ def get_numpy_include(): _set_builtin("__NUMPY_SETUP__", False) import numpy + print('Using numpy ' + numpy.__version__) return numpy.get_include() From 5cd7def6e6a30ed47917e9f30b3c6f8e356012e2 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 04:33:04 +0200 Subject: [PATCH 185/230] Docker: update ubuntu-full to 24.04 [ci skip] --- docker/README.md | 1 + docker/ubuntu-full/Dockerfile | 81 +++++++++++++++------------- docker/ubuntu-full/bh-gdal.sh | 4 +- docker/ubuntu-full/bh-proj.sh | 9 +++- docker/ubuntu-full/bh-set-envvars.sh | 4 +- 5 files changed, 58 insertions(+), 41 deletions(-) diff --git a/docker/README.md b/docker/README.md index 0d26f4c0d663..14aac61caa96 100644 --- a/docker/README.md +++ b/docker/README.md @@ -46,6 +46,7 @@ See [alpine-normal/Dockerfile](alpine-normal/Dockerfile) # Ubuntu based Ubuntu version: +* 24.04 for GDAL 3.9 * 22.04 for GDAL 3.6, 3.7 and 3.8 * 20.04 for GDAL 3.4 and 3.5 diff --git a/docker/ubuntu-full/Dockerfile b/docker/ubuntu-full/Dockerfile index f7ea5ff14948..306663f48611 100644 --- a/docker/ubuntu-full/Dockerfile +++ b/docker/ubuntu-full/Dockerfile @@ -5,8 +5,8 @@ # Public domain # or licensed under MIT (LICENSE.TXT) Copyright 2019 Even Rouault <even.rouault@spatialys.com> -ARG BASE_IMAGE=ubuntu:22.04 -ARG TARGET_BASE_IMAGE=ubuntu:22.04 +ARG BASE_IMAGE=ubuntu:24.04 +ARG TARGET_BASE_IMAGE=ubuntu:24.04 FROM $BASE_IMAGE as builder @@ -20,21 +20,23 @@ COPY ./bh-set-envvars.sh /buildscripts/bh-set-envvars.sh RUN . /buildscripts/bh-set-envvars.sh \ && if test "${TARGET_ARCH}" != ""; then \ rm -f /etc/apt/sources.list \ - && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ jammy main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ jammy-backports main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=amd64] http://security.ubuntu.com/ubuntu jammy-security main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe multiverse" >> /etc/apt/sources.list \ - && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse" >> /etc/apt/sources.list \ + rm -f /etc/apt/sources.list.d/ubuntu.sources \ + && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ noble main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ noble-updates main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=amd64] http://us.archive.ubuntu.com/ubuntu/ noble-backports main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=amd64] http://security.ubuntu.com/ubuntu noble-security main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ noble main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ noble-updates main restricted universe" >> /etc/apt/sources.list \ + && echo "deb [arch=${TARGET_ARCH}] http://ports.ubuntu.com/ubuntu-ports/ noble-security main restricted universe" >> /etc/apt/sources.list \ && dpkg --add-architecture ${TARGET_ARCH} \ && apt-get update -y \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y g++-11-${GCC_ARCH}-linux-gnu \ - && ln -s ${GCC_ARCH}-linux-gnu-gcc-11 /usr/bin/${GCC_ARCH}-linux-gnu-gcc \ - && ln -s ${GCC_ARCH}-linux-gnu-g++-11 /usr/bin/${GCC_ARCH}-linux-gnu-g++ \ + && DEBIAN_FRONTEND=noninteractive apt-get install -y g++-13-${GCC_ARCH}-linux-gnu \ + && ln -s ${GCC_ARCH}-linux-gnu-gcc-13 /usr/bin/${GCC_ARCH}-linux-gnu-gcc \ + && ln -s ${GCC_ARCH}-linux-gnu-g++-13 /usr/bin/${GCC_ARCH}-linux-gnu-g++ \ && rm -rf /var/lib/apt/lists/*; \ fi + # Setup build env for PROJ USER root RUN . /buildscripts/bh-set-envvars.sh \ @@ -43,7 +45,7 @@ RUN . /buildscripts/bh-set-envvars.sh \ build-essential ca-certificates \ git make cmake wget unzip libtool automake \ zlib1g-dev${APT_ARCH_SUFFIX} libsqlite3-dev${APT_ARCH_SUFFIX} pkg-config sqlite3 libcurl4-openssl-dev${APT_ARCH_SUFFIX} \ - libtiff5-dev${APT_ARCH_SUFFIX} \ + libtiff-dev${APT_ARCH_SUFFIX} \ && rm -rf /var/lib/apt/lists/* ARG JAVA_VERSION=17 @@ -137,7 +139,11 @@ RUN . /buildscripts/bh-set-envvars.sh \ && cd tiledb \ && mkdir build_cmake \ && cd build_cmake \ - && ../bootstrap --prefix=/usr --disable-werror --disable-tests --disable-avx2 \ + && ../bootstrap --prefix=/usr --disable-werror --disable-tests --disable-avx2 --disable-webp \ + && cmake .. -DOPENSSL_INCLUDE_DIR=/usr/include -DOPENSSL_CRYPTO_LIBRARY=/usr/lib/${GCC_ARCH}-linux-gnu/libcrypto.so -DOPENSSL_SSL_LIBRARY=/usr/lib/${GCC_ARCH}-linux-gnu/libssl.so \ + && cd tiledb \ + && cmake ../.. -DOPENSSL_INCLUDE_DIR=/usr/include -DOPENSSL_CRYPTO_LIBRARY=/usr/lib/${GCC_ARCH}-linux-gnu/libcrypto.so -DOPENSSL_SSL_LIBRARY=/usr/lib/${GCC_ARCH}-linux-gnu/libssl.so \ + && cd .. \ && make -j$(nproc) \ && make install-tiledb DESTDIR="/build_thirdparty" \ && make install-tiledb \ @@ -182,18 +188,19 @@ RUN . /buildscripts/bh-set-envvars.sh \ && rm -rf FileGDB_API-RHEL7-64gcc83.tar.gz \ ) ; fi +# DISABLED: cf https://github.com/lucianpls/QB3/issues/16 # Build libqb3 -RUN . /buildscripts/bh-set-envvars.sh \ - && git clone https://github.com/lucianpls/QB3.git \ - && cd QB3/QB3lib \ - && mkdir build \ - && cd build \ - && cmake -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_BUILD_TYPE=Release .. \ - && make -j$(nproc) \ - && make -j$(nproc) install \ - && make install DESTDIR="/build_thirdparty" \ - && cd ../../.. \ - && rm -rf QB3 +#RUN . /buildscripts/bh-set-envvars.sh \ +# && git clone https://github.com/lucianpls/QB3.git \ +# && cd QB3/QB3lib \ +# && mkdir build \ +# && cd build \ +# && cmake -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_BUILD_TYPE=Release .. \ +# && make -j$(nproc) \ +# && make -j$(nproc) install \ +# && make install DESTDIR="/build_thirdparty" \ +# && cd ../../.. \ +# && rm -rf QB3 ARG WITH_PDFIUM=yes RUN if echo "$WITH_PDFIUM" | grep -Eiq "^(y(es)?|1|true)$" ; then ( \ @@ -227,9 +234,9 @@ RUN . /buildscripts/bh-set-envvars.sh \ && rm -rf /var/lib/apt/lists/* # Install Arrow C++ -ARG ARROW_VERSION=15.0.2-1 +ARG ARROW_VERSION=16.0.0-1 # ARROW_SOVERSION to be updated in the "Build final image" section too -ARG ARROW_SOVERSION=1500 +ARG ARROW_SOVERSION=1600 RUN . /buildscripts/bh-set-envvars.sh \ && apt-get update -y \ && DEBIAN_FRONTEND=noninteractive apt-get install -y -V ca-certificates lsb-release wget \ @@ -290,7 +297,7 @@ FROM $TARGET_BASE_IMAGE as runner USER root RUN date ARG JAVA_VERSION=17 -ARG ARROW_SOVERSION=1500 +ARG ARROW_SOVERSION=1600 # Update distro RUN apt-get update -y && apt-get upgrade -y \ @@ -300,28 +307,30 @@ ARG TARGET_ARCH= RUN apt-get update \ # PROJ dependencies && DEBIAN_FRONTEND=noninteractive apt-get install -y \ - libsqlite3-0 libtiff5 libcurl4 \ + libsqlite3-0 libtiff6 libcurl4 \ wget curl unzip ca-certificates \ # GDAL dependencies && DEBIAN_FRONTEND=noninteractive apt-get install -y \ libopenjp2-7 libcairo2 python3-numpy \ - libpng16-16 libjpeg-turbo8 libgif7 liblzma5 libgeos3.10.2 libgeos-c1v5 \ + libpng16-16 libjpeg-turbo8 libgif7 liblzma5 libgeos3.12.1 libgeos-c1v5 \ libxml2 libexpat1 \ - libxerces-c3.2 libnetcdf-c++4 netcdf-bin libpoppler118 libspatialite7 librasterlite2-1 gpsbabel \ - libhdf4-0-alt libhdf5-103 libhdf5-cpp-103 poppler-utils libfreexl1 unixodbc mdbtools libwebp7 \ + libxerces-c3.2 libnetcdf-c++4-1 netcdf-bin libpoppler134 libspatialite8 librasterlite2-1 gpsbabel \ + libhdf4-0-alt libhdf5-103-1 libhdf5-cpp-103-1 poppler-utils libfreexl1 unixodbc mdbtools libwebp7 \ liblcms2-2 libpcre3 libcrypto++8 libfyba0 \ libkmlbase1 libkmlconvenience1 libkmldom1 libkmlengine1 libkmlregionator1 libkmlxsd1 \ - libmysqlclient21 libogdi4.1 libcfitsio9 openjdk-"$JAVA_VERSION"-jre \ - libzstd1 bash bash-completion libpq5 libssl3 \ - libarmadillo10 libpython3.10 libopenexr25 libheif1 \ + libmysqlclient21 libogdi4.1 libcfitsio10 \ + libzstd1 bash bash-completion libpq5 \ + libarmadillo12 libpython3.12 libopenexr-3-1-30 libheif1 \ libdeflate0 libblosc1 liblz4-1 \ libbrotli1 \ libarchive13 \ libaec0 \ - libspdlog1 \ + libspdlog1.12 \ python-is-python3 \ # pil for antialias option of gdal2tiles python3-pil \ + # Install JRE with --no-install-recommends, otherwise it draws default-jre, which draws systemd, which fails to install when running the arm64v8/ubuntu:24.04 image on a 64bit host + && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends openjdk-"$JAVA_VERSION"-jre \ # Workaround bug in ogdi packaging && ln -s /usr/lib/ogdi/libvrf.so /usr/lib \ # Install Arrow C++ diff --git a/docker/ubuntu-full/bh-gdal.sh b/docker/ubuntu-full/bh-gdal.sh index 5d12fbf87c35..0c5493445441 100755 --- a/docker/ubuntu-full/bh-gdal.sh +++ b/docker/ubuntu-full/bh-gdal.sh @@ -38,7 +38,8 @@ wget -q "https://github.com/${GDAL_REPOSITORY}/archive/${GDAL_VERSION}.tar.gz" \ fi export CFLAGS="-DPROJ_RENAME_SYMBOLS -O2 -g" - export CXXFLAGS="-DPROJ_RENAME_SYMBOLS -DPROJ_INTERNAL_CPP_NAMESPACE -O2 -g" + # -Wno-psabi avoid 'note: parameter passing for argument of type 'std::pair<double, double>' when C++17 is enabled changed to match C++14 in GCC 10.1' on arm64 + export CXXFLAGS="-DPROJ_RENAME_SYMBOLS -DPROJ_INTERNAL_CPP_NAMESPACE -O2 -g -Wno-psabi" mkdir build cd build @@ -64,6 +65,7 @@ wget -q "https://github.com/${GDAL_REPOSITORY}/archive/${GDAL_VERSION}.tar.gz" \ echo "${GDAL_CMAKE_EXTRA_OPTS}" cmake .. \ -DCMAKE_INSTALL_PREFIX=/usr \ + -DGDAL_FIND_PACKAGE_PROJ_MODE=MODULE \ -DBUILD_TESTING=OFF \ -DPROJ_INCLUDE_DIR="/build${PROJ_INSTALL_PREFIX-/usr/local}/include" \ -DPROJ_LIBRARY="/build${PROJ_INSTALL_PREFIX-/usr/local}/lib/libinternalproj.so" \ diff --git a/docker/ubuntu-full/bh-proj.sh b/docker/ubuntu-full/bh-proj.sh index 4e99b99b38df..a0216409d2e3 100755 --- a/docker/ubuntu-full/bh-proj.sh +++ b/docker/ubuntu-full/bh-proj.sh @@ -71,8 +71,6 @@ ln -s "libinternalproj.so.${PROJ_SO}" "${DESTDIR}${PROJ_INSTALL_PREFIX}/lib/libi ln -s "libinternalproj.so.${PROJ_SO}" "${DESTDIR}${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so" rm "${DESTDIR}${PROJ_INSTALL_PREFIX}/lib"/libproj.* -ln -s "libinternalproj.so.${PROJ_SO}" "${DESTDIR}${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO}" -ln -s "libinternalproj.so.${PROJ_SO}" "${DESTDIR}${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO_FIRST}" if [ "${WITH_DEBUG_SYMBOLS}" = "yes" ]; then # separate debug symbols @@ -98,3 +96,10 @@ else ${GCC_ARCH}-linux-gnu-strip -s "$P" 2>/dev/null || /bin/true; done; fi + +apt-get update -y +DEBIAN_FRONTEND=noninteractive apt-get install -y patchelf +patchelf --set-soname libinternalproj.so.${PROJ_SO_FIRST} ${DESTDIR}${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO} +for i in ${DESTDIR}${PROJ_INSTALL_PREFIX}/bin/*; do + patchelf --replace-needed libproj.so.${PROJ_SO_FIRST} libinternalproj.so.${PROJ_SO_FIRST} $i; +done diff --git a/docker/ubuntu-full/bh-set-envvars.sh b/docker/ubuntu-full/bh-set-envvars.sh index 1d5dd1c97a92..36d8f5dd1906 100644 --- a/docker/ubuntu-full/bh-set-envvars.sh +++ b/docker/ubuntu-full/bh-set-envvars.sh @@ -9,8 +9,8 @@ if test "${TARGET_ARCH:-}" != ""; then exit 0 fi export APT_ARCH_SUFFIX=":${TARGET_ARCH}" - export CC=${GCC_ARCH}-linux-gnu-gcc-11 - export CXX=${GCC_ARCH}-linux-gnu-g++-11 + export CC=${GCC_ARCH}-linux-gnu-gcc-13 + export CXX=${GCC_ARCH}-linux-gnu-g++-13 export WITH_HOST="--host=${GCC_ARCH}-linux-gnu" else export APT_ARCH_SUFFIX="" From 3c4041b073435bf75fe899d315563ed7cc27677e Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 17:15:31 +0200 Subject: [PATCH 186/230] GDALVectorInfo(): fix 3.9.0beta1 regression regarding passing layer names Fixes https://github.com/USDAForestService/gdalraster/pull/324 --- apps/ogrinfo_lib.cpp | 26 ++++++++++++++++-------- autotest/utilities/test_ogrinfo_lib.py | 28 ++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 8 deletions(-) diff --git a/apps/ogrinfo_lib.cpp b/apps/ogrinfo_lib.cpp index a407116421a3..9aa19640505a 100644 --- a/apps/ogrinfo_lib.cpp +++ b/apps/ogrinfo_lib.cpp @@ -52,7 +52,6 @@ typedef enum struct GDALVectorInfoOptions { GDALVectorInfoFormat eFormat = FORMAT_TEXT; - std::string osFilename{}; std::string osWHERE{}; CPLStringList aosLayers{}; std::unique_ptr<OGRGeometry> poSpatialFilter; @@ -1843,9 +1842,7 @@ char *GDALVectorInfo(GDALDatasetH hDataset, CPLString osRet; CPLJSONObject oRoot; - const std::string osFilename(!psOptions->osFilename.empty() - ? psOptions->osFilename - : std::string(poDS->GetDescription())); + const std::string osFilename(poDS->GetDescription()); const bool bJson = psOptions->eFormat == FORMAT_JSON; CPLJSONArray oLayerArray; @@ -2430,8 +2427,13 @@ static std::unique_ptr<GDALArgumentParser> GDALVectorInfoOptionsGetParser( .help(_("Format/driver name(s) to try when opening the input file.")); argParser->add_argument("filename") - .nargs(psOptionsForBinary ? 1 : 0) - .store_into(psOptions->osFilename) + .nargs(argparse::nargs_pattern::optional) + .action( + [psOptionsForBinary](const std::string &s) + { + if (psOptionsForBinary) + psOptionsForBinary->osFilename = s; + }) .help(_("The data source to open.")); argParser->add_argument("layer") @@ -2471,12 +2473,21 @@ std::string GDALVectorInfoGetParserUsage() /** * Allocates a GDALVectorInfoOptions struct. * + * Note that when this function is used a library function, and not from the + * ogrinfo utility, a dataset name must be specified if any layer names(s) are + * specified (if no layer name is specific, passing a dataset name is not + * needed). That dataset name may be a dummy one, as the dataset taken into + * account is the hDS parameter passed to GDALVectorInfo(). + * Similarly the -oo switch in a non-ogrinfo context will be ignored, and it + * is the responsibility of the user to apply them when opening the hDS parameter + * passed to GDALVectorInfo(). + * * @param papszArgv NULL terminated list of options (potentially including * filename and open options too), or NULL. The accepted options are the ones of * the <a href="/programs/ogrinfo.html">ogrinfo</a> utility. * @param psOptionsForBinary (output) may be NULL (and should generally be * NULL), otherwise (ogrinfo_bin.cpp use case) must be allocated with - * GDALVectorInfoOptionsForBinaryNew() prior to this + * GDALVectorInfoOptionsForBinaryNew() prior to this * function. Will be filled with potentially present filename, open options, * subdataset number... * @return pointer to the allocated GDALVectorInfoOptions struct. Must be freed @@ -2594,7 +2605,6 @@ GDALVectorInfoOptionsNew(char **papszArgv, if (psOptionsForBinary) { psOptions->bStdoutOutput = true; - psOptionsForBinary->osFilename = psOptions->osFilename; psOptionsForBinary->osSQLStatement = psOptions->osSQLStatement; } diff --git a/autotest/utilities/test_ogrinfo_lib.py b/autotest/utilities/test_ogrinfo_lib.py index e46db5212df0..055e69ed2c40 100755 --- a/autotest/utilities/test_ogrinfo_lib.py +++ b/autotest/utilities/test_ogrinfo_lib.py @@ -609,3 +609,31 @@ def test_ogrinfo_lib_json_features_resolution(): s = gdal.VectorInfo(content, dumpFeatures=True) assert "POINT Z (1.2 1.2 1.23)" in s + + +############################################################################### +# Test layers option + + +def test_ogrinfo_lib_layers(): + + ds = gdal.GetDriverByName("Memory").Create("dummy", 0, 0, 0, gdal.GDT_Unknown) + ds.CreateLayer("foo") + ds.CreateLayer("bar") + + j = gdal.VectorInfo(ds, format="json", layers=[]) + assert len(j["layers"]) == 2 + assert j["layers"][0]["name"] == "foo" + assert j["layers"][1]["name"] == "bar" + + j = gdal.VectorInfo(ds, format="json", layers=["foo"]) + assert len(j["layers"]) == 1 + assert j["layers"][0]["name"] == "foo" + + j = gdal.VectorInfo(ds, format="json", layers=["foo", "bar"]) + assert len(j["layers"]) == 2 + assert j["layers"][0]["name"] == "foo" + assert j["layers"][1]["name"] == "bar" + + with pytest.raises(Exception, match="Couldn't fetch requested layer"): + gdal.VectorInfo(ds, format="json", layers=["invalid"]) From fb549c6a88fe79b60b90fbfb53337b35f1df3789 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 17:25:17 +0200 Subject: [PATCH 187/230] Doc: JSon -> JSON --- .../development/rfc/rfc67_nullfieldvalues.rst | 4 +-- doc/source/drivers/raster/gpkg.rst | 2 +- doc/source/drivers/raster/isis3.rst | 4 +-- doc/source/drivers/raster/jp2ecw.rst | 6 ++-- doc/source/drivers/raster/jp2kak.rst | 6 ++-- doc/source/drivers/raster/jp2lura.rst | 6 ++-- doc/source/drivers/raster/jp2openjpeg.rst | 6 ++-- doc/source/drivers/raster/mbtiles.rst | 4 +-- doc/source/drivers/raster/vicar.rst | 2 +- doc/source/drivers/vector/csv.rst | 2 +- doc/source/drivers/vector/eeda.rst | 2 +- doc/source/drivers/vector/elasticsearch.rst | 18 ++++++------ doc/source/drivers/vector/geojson.rst | 28 +++++++++---------- doc/source/drivers/vector/geojsonseq.rst | 4 +-- doc/source/drivers/vector/mongodbv3.rst | 8 +++--- doc/source/drivers/vector/mvt.rst | 6 ++-- doc/source/drivers/vector/sqlite.rst | 2 +- 17 files changed, 55 insertions(+), 55 deletions(-) diff --git a/doc/source/development/rfc/rfc67_nullfieldvalues.rst b/doc/source/development/rfc/rfc67_nullfieldvalues.rst index 363860e590e1..8f8bef656d24 100644 --- a/doc/source/development/rfc/rfc67_nullfieldvalues.rst +++ b/doc/source/development/rfc/rfc67_nullfieldvalues.rst @@ -24,7 +24,7 @@ Rationale Currently, OGR supports one single concept to indicate that a field value is missing : the concept of unset field. -So assuming a JSon feature collection with 2 features would properties +So assuming a JSON feature collection with 2 features would properties would be { "foo": "bar" } and { "foo": "bar", "other_field": null }, OGR currently returns that the other_field is unset in both cases. @@ -179,7 +179,7 @@ On the write side, for the GeoJSON driver, in GDAL 2.1 or before, a unset field was written as field_name: null. Starting with GDAL 2.2, only fields explicitly set as null with OGR_F_SetFieldNull() will be written with a null value. Unset fields of a feature will not be present -in the corresponding JSon feature element. +in the corresponding JSON feature element. MIGRATION_GUIDE.TXT is updated to discuss those compatibility issues. diff --git a/doc/source/drivers/raster/gpkg.rst b/doc/source/drivers/raster/gpkg.rst index 588c6efef22d..2da9a922e8dd 100644 --- a/doc/source/drivers/raster/gpkg.rst +++ b/doc/source/drivers/raster/gpkg.rst @@ -362,7 +362,7 @@ In all the above tiling schemes, consecutive zoom levels defer by a resolution of a factor of two. Starting with GDAL 3.2, it is also possible to use a Tile Matrix Set definition, -encoded as a JSon file, according to the `OGC Two Dimensional Tile Matrix Set standard`_ +encoded as a JSON file, according to the `OGC Two Dimensional Tile Matrix Set standard`_ Examples of such files can be found at http://schemas.opengis.net/tms/1.0/json/examples/ The GDAL data directory also contains files prefixed with ``tms_`` and with a ``.json`` extension. If there is a ``tms_FOO.json`` file, then ``FOO`` can be used as the diff --git a/doc/source/drivers/raster/isis3.rst b/doc/source/drivers/raster/isis3.rst index 7689dc96011f..d33d74219097 100644 --- a/doc/source/drivers/raster/isis3.rst +++ b/doc/source/drivers/raster/isis3.rst @@ -46,7 +46,7 @@ Metadata -------- Starting with GDAL 2.2, the ISIS3 label can be retrieved as -JSon-serialized content in the json:ISIS3 metadata domain. +JSON-serialized content in the json:ISIS3 metadata domain. For example: @@ -317,7 +317,7 @@ Python : from osgeo import gdal src_ds = gdal.Open('in.lbl') - # Load source label as JSon + # Load source label as JSON label = json.loads( src_ds.GetMetadata_List('json:ISIS3')[0] ) # Update parameter label["IsisCube"]["Mapping"]["TargetName"] = "Moon" diff --git a/doc/source/drivers/raster/jp2ecw.rst b/doc/source/drivers/raster/jp2ecw.rst index 30f741ee574f..8195dcb83020 100644 --- a/doc/source/drivers/raster/jp2ecw.rst +++ b/doc/source/drivers/raster/jp2ecw.rst @@ -152,11 +152,11 @@ impact decoding speed and compatibility with other JPEG2000 toolkits. a GML box conforming to the `OGC GML in JPEG2000, version 2 <http://docs.opengeospatial.org/is/08-085r4/08-085r4.html>`__ specification should be included in the file. *filename* must point - to a file with a JSon content that defines how the GMLJP2 v2 box + to a file with a JSON content that defines how the GMLJP2 v2 box should be built. See :ref:`GMLJP2v2 definition file section <gmjp2v2def>` in documentation of - the JP2OpenJPEG driver for the syntax of the JSon configuration file. - It is also possible to directly pass the JSon content inlined as a + the JP2OpenJPEG driver for the syntax of the JSON configuration file. + It is also possible to directly pass the JSON content inlined as a string. If filename is just set to YES, a minimal instance will be built. diff --git a/doc/source/drivers/raster/jp2kak.rst b/doc/source/drivers/raster/jp2kak.rst index 2ca54565bfbe..48c4d6fc8f09 100644 --- a/doc/source/drivers/raster/jp2kak.rst +++ b/doc/source/drivers/raster/jp2kak.rst @@ -194,11 +194,11 @@ Creation Options: a GML box conforming to the `OGC GML in JPEG2000, version 2 <http://docs.opengeospatial.org/is/08-085r4/08-085r4.html>`__ specification should be included in the file. *filename* must point - to a file with a JSon content that defines how the GMLJP2 v2 box + to a file with a JSON content that defines how the GMLJP2 v2 box should be built. See :ref:`GMLJP2v2 definition file section <gmjp2v2def>` in documentation of - the JP2OpenJPEG driver for the syntax of the JSon configuration file. - It is also possible to directly pass the JSon content inlined as a + the JP2OpenJPEG driver for the syntax of the JSON configuration file. + It is also possible to directly pass the JSON content inlined as a string. If filename is just set to YES, a minimal instance will be built. diff --git a/doc/source/drivers/raster/jp2lura.rst b/doc/source/drivers/raster/jp2lura.rst index 6fbbd5511ed1..95c535adee44 100644 --- a/doc/source/drivers/raster/jp2lura.rst +++ b/doc/source/drivers/raster/jp2lura.rst @@ -96,12 +96,12 @@ Creation Options the `OGC GML in JPEG2000, version 2.0.1 <http://docs.opengeospatial.org/is/08-085r5/08-085r5.html>`__ specification should be included in the file. *filename* must point - to a file with a JSon content that defines how the GMLJP2 v2 box + to a file with a JSON content that defines how the GMLJP2 v2 box should be built. See :ref:`GMLJP2v2 definition file section <gmjp2v2def>` in documentation of - the JP2OpenJPEG driver for the syntax of the JSon configuration file. + the JP2OpenJPEG driver for the syntax of the JSON configuration file. It is also possible to - directly pass the JSon content inlined as a string. If filename is + directly pass the JSON content inlined as a string. If filename is just set to YES, a minimal instance will be built. - .. co:: GeoJP2 diff --git a/doc/source/drivers/raster/jp2openjpeg.rst b/doc/source/drivers/raster/jp2openjpeg.rst index 0bcf95c62cd0..981eb6d4c94d 100644 --- a/doc/source/drivers/raster/jp2openjpeg.rst +++ b/doc/source/drivers/raster/jp2openjpeg.rst @@ -144,9 +144,9 @@ Creation Options a GML box conforming to the `OGC GML in JPEG2000, version 2.0.1 <http://docs.opengeospatial.org/is/08-085r5/08-085r5.html>`__ specification should be included in the file. *filename* must point - to a file with a JSon content that defines how the GMLJP2 v2 box - should be built. See below section for the syntax of the JSon - configuration file. It is also possible to directly pass the JSon + to a file with a JSON content that defines how the GMLJP2 v2 box + should be built. See below section for the syntax of the JSON + configuration file. It is also possible to directly pass the JSON content inlined as a string. If filename is just set to YES, a minimal instance will be built. Note: GDAL 2.0 and 2.1 use the older `OGC GML in JPEG2000, version diff --git a/doc/source/drivers/raster/mbtiles.rst b/doc/source/drivers/raster/mbtiles.rst index a0b01bfbc8ea..7f2ca03a4016 100644 --- a/doc/source/drivers/raster/mbtiles.rst +++ b/doc/source/drivers/raster/mbtiles.rst @@ -355,7 +355,7 @@ The following creation options are available: - .. co:: CONF - Layer configuration as a JSon serialized string. + Layer configuration as a JSON serialized string. - .. co:: SIMPLIFICATION :choices: <float> @@ -439,7 +439,7 @@ Layer configuration (vector) ---------------------------- The above mentioned CONF dataset creation option can be set to a string -whose value is a JSon serialized document such as the below one: +whose value is a JSON serialized document such as the below one: :: diff --git a/doc/source/drivers/raster/vicar.rst b/doc/source/drivers/raster/vicar.rst index 8f98d1deaaac..90a367da41a4 100644 --- a/doc/source/drivers/raster/vicar.rst +++ b/doc/source/drivers/raster/vicar.rst @@ -30,7 +30,7 @@ Metadata -------- Starting with GDAL 3.1, the VICAR label can be retrieved as -JSon-serialized content in the json:VICAR metadata domain. +JSON-serialized content in the json:VICAR metadata domain. For example: diff --git a/doc/source/drivers/vector/csv.rst b/doc/source/drivers/vector/csv.rst index 21956a81d157..ec4ae49abd36 100644 --- a/doc/source/drivers/vector/csv.rst +++ b/doc/source/drivers/vector/csv.rst @@ -54,7 +54,7 @@ for geometries encoded in WKT Starting with GDAL 2.2, the "JSonStringList", "JSonIntegerList", "JSonInteger64List" and "JSonRealList" types can be used in .csvt to map to the corresponding OGR StringList, IntegerList, Integer64List and -RealList types. The field values are then encoded as JSon arrays, with +RealList types. The field values are then encoded as JSON arrays, with proper CSV escaping. Automatic field type guessing can also be done diff --git a/doc/source/drivers/vector/eeda.rst b/doc/source/drivers/vector/eeda.rst index e0cbbe457c2f..a344bd1a7415 100644 --- a/doc/source/drivers/vector/eeda.rst +++ b/doc/source/drivers/vector/eeda.rst @@ -141,7 +141,7 @@ The following attributes will always be present: +---------------------+-----------+--------------------------------------------------------------+-------------------------------+ | band_crs | String | CRS as EPSG:XXXX or WKT (only set if equal among all bands) | No | +---------------------+-----------+--------------------------------------------------------------+-------------------------------+ - | other_properties | String | Serialized JSon dictionary with key/value pairs where key is | No | + | other_properties | String | Serialized JSON dictionary with key/value pairs where key is | No | | | | not a standalone field | | +---------------------+-----------+--------------------------------------------------------------+-------------------------------+ diff --git a/doc/source/drivers/vector/elasticsearch.rst b/doc/source/drivers/vector/elasticsearch.rst index d30cc5dd443a..6bb78aab2832 100644 --- a/doc/source/drivers/vector/elasticsearch.rst +++ b/doc/source/drivers/vector/elasticsearch.rst @@ -191,16 +191,16 @@ syntax <https://www.elastic.co/guide/en/elasticsearch/reference/current/query-ds They will be combined with the potentially defined spatial filter. It is also possible to directly use a Elasticsearch filter by setting -the string passed to SetAttributeFilter() as a JSon serialized object, +the string passed to SetAttributeFilter() as a JSON serialized object, e.g. .. code-block:: json { "post_filter": { "term": { "properties.EAS_ID": 169 } } } -Note: if defining directly an Elastic Search JSon filter, the spatial +Note: if defining directly an Elastic Search JSON filter, the spatial filter specified through SetSpatialFilter() will be ignored, and must -thus be included in the JSon filter if needed. +thus be included in the JSON filter if needed. Paging ------ @@ -221,7 +221,7 @@ and build the schema that best fit to the found fields and values. It is also possible to set the :oo:`JSON_FIELD=YES` open option so that a \_json special field is added to the OGR schema. When reading Elastic -Search documents as OGR features, the full JSon version of the document +Search documents as OGR features, the full JSON version of the document will be stored in the \_json field. This might be useful in case of complex documents or with data types that do not translate well in OGR data types. On creation/update of documents, if the \_json field is @@ -247,7 +247,7 @@ Starting with GDAL 2.2, SQL requests, involving a single layer, with WHERE and ORDER BY statements will be translated as Elasticsearch queries. -Otherwise, if specifying "ES" as the dialect of ExecuteSQL(), a JSon +Otherwise, if specifying "ES" as the dialect of ExecuteSQL(), a JSON string with a serialized `Elastic Search filter <https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-filters.html>`__ can be passed. The search will be done on all indices and types, unless @@ -325,7 +325,7 @@ members are: geo_shape geometries is only supported since Elasticsearch 7 and may require a non-free license. -- ``geohash_grid`` (optional): a JSon object, describing a few characteristics of +- ``geohash_grid`` (optional): a JSON object, describing a few characteristics of the geohash_grid, that can have the following members: * ``size`` (optional): maximum number of geohash buckets to return per query. The @@ -340,7 +340,7 @@ members are: taking into account the ``size`` parameter and the spatial filter, so that the theoretical number of buckets returned does not exceed ``size``. -- ``fields`` (optional): a JSon object, describing which additional statistical +- ``fields`` (optional): a JSON object, describing which additional statistical fields should be added, that can have the following members: * ``min`` (optional): array with the paths to index properties on which @@ -455,7 +455,7 @@ options: :since: 2.4 Filename from which to read a user-defined index definition, or inlined index - definition as serialized JSon. + definition as serialized JSON . - .. lco:: MAPPING_NAME @@ -469,7 +469,7 @@ options: - .. lco:: MAPPING :choices: <filename>, <json> - Filename from which to read a user-defined mapping, or mapping as serialized JSon. + Filename from which to read a user-defined mapping, or mapping as serialized JSON . - .. lco:: WRITE_MAPPING :choices: <filename> diff --git a/doc/source/drivers/vector/geojson.rst b/doc/source/drivers/vector/geojson.rst index 08fd7d5d9332..1779832e2fb1 100644 --- a/doc/source/drivers/vector/geojson.rst +++ b/doc/source/drivers/vector/geojson.rst @@ -83,7 +83,7 @@ If a top-level member of GeoJSON data is of any other type than feature. Otherwise, a layer will consists of a set of features. If the :oo:`NATIVE_DATA` open option is set to YES, members at the level of -the FeatureCollection will be stored as a serialized JSon object in the +the FeatureCollection will be stored as a serialized JSON object in the NATIVE_DATA item of the NATIVE_DATA metadata domain of the layer object (and "application/vnd.geo+json" in the NATIVE_MEDIA_TYPE of the NATIVE_DATA metadata domain). @@ -116,15 +116,15 @@ Default behavior is to preserve all attributes (as an union, see previous paragraph), what is equal to setting :config:`ATTRIBUTES_SKIP=NO`. -If the :oo:`NATIVE_DATA` open option is set to YES, the Feature JSon object -will be stored as a serialized JSon object in the NativeData property of +If the :oo:`NATIVE_DATA` open option is set to YES, the Feature JSON object +will be stored as a serialized JSON object in the NativeData property of the OGRFeature object (and "application/vnd.geo+json" in the NativeMediaType property). On write, if a OGRFeature to be written has its NativeMediaType property set to "application/vnd.geo+json" and its -NativeData property set to a string that is a serialized JSon object, +NativeData property set to a string that is a serialized JSON object, then extra members of this object (i.e. not the "property" dictionary, nor the first 3 dimensions of geometry coordinates) will be used to -enhance the created JSon object from the OGRFeature. See :ref:`rfc-60` +enhance the created JSON object from the OGRFeature. See :ref:`rfc-60` for more details. Geometry @@ -205,9 +205,9 @@ Open options :since: 2.1 Whether to store the native - JSon representation at FeatureCollection and Feature level. + JSON representation at FeatureCollection and Feature level. This option can be used to improve round-tripping from GeoJSON - to GeoJSON by preserving some extra JSon objects that would otherwise + to GeoJSON by preserving some extra JSON objects that would otherwise be ignored by the OGR abstraction. Note that ogr2ogr by default enable this option, unless you specify its -noNativeData switch. @@ -215,7 +215,7 @@ Open options :choices: YES, NO :since: 2.1 - Whether to expose JSon + Whether to expose JSON arrays of strings, integers or reals as a OGR String. Default is NO. Can also be set with the :config:`OGR_GEOJSON_ARRAY_AS_STRING` configuration option. @@ -300,7 +300,7 @@ Layer creation options - .. lco:: NATIVE_DATA :since: 2.1 - Serialized JSon object that + Serialized JSON object that contains extra properties to store at FeatureCollection level. - .. lco:: NATIVE_MEDIA_TYPE @@ -369,8 +369,8 @@ Layer creation options :since: 2.4 Whether to write - NaN / Infinity values. Such values are not allowed in strict JSon - mode, but some JSon parsers (libjson-c >= 0.12 for example) can + NaN / Infinity values. Such values are not allowed in strict JSON + mode, but some JSON parsers (libjson-c >= 0.12 for example) can understand them as they are allowed by ECMAScript. - .. lco:: AUTODETECT_JSON_STRINGS @@ -411,12 +411,12 @@ domains. Writing to /dev/stdout or /vsistdout/ is also supported. -Round-tripping of extra JSon members +Round-tripping of extra JSON members ------------------------------------ See :ref:`rfc-60` for more details. -Starting with GDAL 2.1, extra JSon members at the FeatureCollection, +Starting with GDAL 2.1, extra JSON members at the FeatureCollection, Feature or geometry levels that are not normally reflected in the OGR abstraction, such as the ones called "extra_XXXXX_member" in the below snippet, are by default preserved when executing ogr2ogr with GeoJSON @@ -547,5 +547,5 @@ See Also - `JSON <http://json.org/>`__ - JavaScript Object Notation - :ref:`GeoJSON sequence driver <vector.geojsonseq>` - :ref:`OGC Features and Geometries JSON (JSON-FG) driver <vector.jsonfg>` -- :ref:`ESRI JSon / FeatureService driver <vector.esrijson>` +- :ref:`ESRI JSON / FeatureService driver <vector.esrijson>` - :ref:`TopoJSON driver <vector.topojson>` diff --git a/doc/source/drivers/vector/geojsonseq.rst b/doc/source/drivers/vector/geojsonseq.rst index a03302e82212..ffdfbfc2c3dd 100644 --- a/doc/source/drivers/vector/geojsonseq.rst +++ b/doc/source/drivers/vector/geojsonseq.rst @@ -100,8 +100,8 @@ Layer creation options :since: 3.8 Whether to write - NaN / Infinity values. Such values are not allowed in strict JSon - mode, but some JSon parsers (libjson-c >= 0.12 for example) can + NaN / Infinity values. Such values are not allowed in strict JSON + mode, but some JSON parsers (libjson-c >= 0.12 for example) can understand them as they are allowed by ECMAScript. - .. lco:: AUTODETECT_JSON_STRINGS diff --git a/doc/source/drivers/vector/mongodbv3.rst b/doc/source/drivers/vector/mongodbv3.rst index 4fd297d15c09..d421502f0919 100644 --- a/doc/source/drivers/vector/mongodbv3.rst +++ b/doc/source/drivers/vector/mongodbv3.rst @@ -157,7 +157,7 @@ the geometry field. However, in the current state, SQL attribute filters set with SetAttributeFilter() are evaluated only on client-side. To enable server-side filtering, the string passed to SetAttributeFilter() must be -a JSon object in the `MongoDB filter +a JSON object in the `MongoDB filter syntax <https://docs.mongodb.com/manual/reference/method/db.collection.find/index.html>`__. Paging @@ -187,7 +187,7 @@ option. It is also possible to set the JSON_FIELD=YES open option so that a \_json special field is added to the OGR schema. When reading MongoDB -documents as OGR features, the full JSon version of the document will be +documents as OGR features, the full JSON version of the document will be stored in the \_json field. This might be useful in case of complex documents or with data types that do not translate well in OGR data types. On creation/update of documents, if the \_json field is present @@ -209,10 +209,10 @@ at all. ExecuteSQL() interface ---------------------- -If specifying "MongoDB" as the dialect of ExecuteSQL(), a JSon string +If specifying "MongoDB" as the dialect of ExecuteSQL(), a JSON string with a serialized `MongoDB command <https://docs.mongodb.com/manual/reference/command/index.html>`__ -can be passed. The result will be returned as a JSon string in a single +can be passed. The result will be returned as a JSON string in a single OGR feature. Standard SQL requests will be executed on client-side. diff --git a/doc/source/drivers/vector/mvt.rst b/doc/source/drivers/vector/mvt.rst index 5c665678df1e..39ba4ad33bd6 100644 --- a/doc/source/drivers/vector/mvt.rst +++ b/doc/source/drivers/vector/mvt.rst @@ -268,8 +268,8 @@ Dataset creation options - .. co:: CONF :choices: <json>, <filename> - Layer configuration as a JSon serialized string. - Or, starting with GDAL 3.0.1, filename containing the configuration as JSon. + Layer configuration as a JSON serialized string. + Or, starting with GDAL 3.0.1, filename containing the configuration as JSON . - .. co:: SIMPLIFICATION :choices: float @@ -366,7 +366,7 @@ Layer configuration ------------------- The above mentioned CONF dataset creation option can be set to a string -whose value is a JSon serialized document such as the below one: +whose value is a JSON serialized document such as the below one: .. code-block:: json diff --git a/doc/source/drivers/vector/sqlite.rst b/doc/source/drivers/vector/sqlite.rst index 257ed243d00d..4e16be0f1956 100644 --- a/doc/source/drivers/vector/sqlite.rst +++ b/doc/source/drivers/vector/sqlite.rst @@ -28,7 +28,7 @@ are also handled. Starting with GDAL 2.2, the "JSonStringList", "JSonIntegerList", "JSonInteger64List" and "JSonRealList" SQLite declaration types are used to map the corresponding OGR StringList, IntegerList, Integer64List and -RealList types. The field values are then encoded as JSon arrays, with +RealList types. The field values are then encoded as JSON arrays, with proper CSV escaping. SQLite databases often do not work well over NFS, or some other From 5f179fd4ca15c4953b5d5dd9d0fa1fbcc8976371 Mon Sep 17 00:00:00 2001 From: AbelPau <92721356+AbelPau@users.noreply.github.com> Date: Thu, 25 Apr 2024 18:38:08 +0200 Subject: [PATCH 188/230] MiraMonVectorDriver: deleting unused functions/parameters (#9759) --- ogr/ogrsf_frmts/miramon/mm_constants.h | 4 - ogr/ogrsf_frmts/miramon/mm_gdal_functions.c | 307 +++----------------- ogr/ogrsf_frmts/miramon/mm_gdal_functions.h | 4 +- ogr/ogrsf_frmts/miramon/mm_wrlayr.c | 70 +---- ogr/ogrsf_frmts/miramon/mm_wrlayr.h | 1 - 5 files changed, 37 insertions(+), 349 deletions(-) diff --git a/ogr/ogrsf_frmts/miramon/mm_constants.h b/ogr/ogrsf_frmts/miramon/mm_constants.h index dbef6343ccd4..eccbf9211012 100644 --- a/ogr/ogrsf_frmts/miramon/mm_constants.h +++ b/ogr/ogrsf_frmts/miramon/mm_constants.h @@ -161,10 +161,6 @@ typedef unsigned char MM_BYTE; #define MM_PRIVATE_ARC_DB_FIELDS 5 #define MM_PRIVATE_POLYGON_DB_FIELDS 6 -#define MM_NOU_N_DECIMALS_NO_APLICA 0 -#define MM_APLICAR_NOU_N_DECIMALS 1 -#define MM_NOMES_DOCUMENTAR_NOU_N_DECIMALS 2 -#define MM_PREGUNTA_SI_APLICAR_NOU_N_DECIM 3 #define MM_CHARACTERS_DOUBLE 40 #ifdef GDAL_COMPILATION diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c index e1d6239950b4..0edb01c2ed1a 100644 --- a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c @@ -201,7 +201,6 @@ const char *MMLog(const char *pszMsg, int nLineNumber) static const char MM_EmptyString[] = {""}; #define MM_SetEndOfString (*MM_EmptyString) -static const char MM_BlankString[] = {" "}; void fclose_and_nullify(FILE_TYPE **pFunc) { @@ -313,20 +312,6 @@ struct MM_DATA_BASE_XP *MM_CreateDBFHeader(MM_EXT_DBF_N_FIELDS n_camps, return bd_xp; } -MM_BYTE MM_DBFFieldTypeToVariableProcessing(MM_BYTE tipus_camp_DBF) -{ - switch (tipus_camp_DBF) - { - case 'N': - return MM_QUANTITATIVE_CONTINUOUS_FIELD; - case 'D': - case 'C': - case 'L': - return MM_CATEGORICAL_FIELD; - } - return MM_CATEGORICAL_FIELD; -} - static MM_BYTE MM_GetDefaultDesiredDBFFieldWidth(const struct MM_FIELD *camp) { size_t a, b, c, d, e; @@ -2148,142 +2133,6 @@ size_t MM_DefineFirstPointFieldsDB_XP(struct MM_DATA_BASE_XP *bd_xp) return i_camp; } -static int MM_SprintfDoubleWidth(char *cadena, size_t cadena_size, int amplada, - int n_decimals, double valor_double, - MM_BOOLEAN *Error_sprintf_n_decimals) -{ -#define VALOR_LIMIT_IMPRIMIR_EN_FORMAT_E 1E+17 -#define VALOR_MASSA_PETIT_PER_IMPRIMIR_f 1E-17 - char cadena_treball[MM_CHARACTERS_DOUBLE + 1]; - int retorn_printf; - - if (MM_IsNANDouble(valor_double)) - { - if (amplada < 3) - { - *cadena = *MM_EmptyString; - return EOF; - } - return snprintf(cadena, cadena_size, "NAN"); - } - if (MM_IsDoubleInfinite(valor_double)) - { - if (amplada < 3) - { - *cadena = *MM_EmptyString; - return EOF; - } - return snprintf(cadena, cadena_size, "INF"); - } - - *Error_sprintf_n_decimals = FALSE; - if (valor_double == 0) - { - retorn_printf = snprintf(cadena_treball, sizeof(cadena_treball), - "%*.*f", amplada, n_decimals, valor_double); - if (retorn_printf >= (int)sizeof(cadena_treball)) - { - *cadena = *MM_EmptyString; - return retorn_printf; - } - - if (retorn_printf > amplada) - { - int escurcament = retorn_printf - amplada; - if (escurcament > n_decimals) - { - *cadena = *MM_EmptyString; - return EOF; - } - *Error_sprintf_n_decimals = TRUE; - n_decimals = n_decimals - escurcament; - retorn_printf = snprintf(cadena, cadena_size, "%*.*f", amplada, - n_decimals, valor_double); - } - else - CPLStrlcpy(cadena, cadena_treball, cadena_size); - - return retorn_printf; - } - - if (valor_double > VALOR_LIMIT_IMPRIMIR_EN_FORMAT_E || - valor_double < -VALOR_LIMIT_IMPRIMIR_EN_FORMAT_E || - (valor_double < VALOR_MASSA_PETIT_PER_IMPRIMIR_f && - valor_double > -VALOR_MASSA_PETIT_PER_IMPRIMIR_f)) - { - retorn_printf = snprintf(cadena_treball, sizeof(cadena_treball), - "%*.*E", amplada, n_decimals, valor_double); - - if (retorn_printf >= (int)sizeof(cadena_treball)) - { - *cadena = *MM_EmptyString; - return retorn_printf; - } - if (retorn_printf > amplada) - { - int escurcament = retorn_printf - amplada; - if (escurcament > n_decimals) - { - *cadena = *MM_EmptyString; - return EOF; - } - *Error_sprintf_n_decimals = TRUE; - n_decimals = n_decimals - escurcament; - retorn_printf = snprintf(cadena, cadena_size, "%*.*E", amplada, - n_decimals, valor_double); - } - else - CPLStrlcpy(cadena, cadena_treball, cadena_size); - - return retorn_printf; - } - - retorn_printf = snprintf(cadena_treball, sizeof(cadena_treball), "%*.*f", - amplada, n_decimals, valor_double); - - if (retorn_printf >= (int)sizeof(cadena_treball)) - { - *cadena = *MM_EmptyString; - return retorn_printf; - } - - if (retorn_printf > amplada) - { - int escurcament = retorn_printf - amplada; - if (escurcament > n_decimals) - { - *cadena = *MM_EmptyString; - return EOF; - } - *Error_sprintf_n_decimals = TRUE; - n_decimals = n_decimals - escurcament; - retorn_printf = snprintf(cadena, cadena_size, "%*.*f", amplada, - n_decimals, valor_double); - } - else - CPLStrlcpy(cadena, cadena_treball, cadena_size); - - return retorn_printf; - -#undef VALOR_LIMIT_IMPRIMIR_EN_FORMAT_E -#undef VALOR_MASSA_PETIT_PER_IMPRIMIR_f -} // End of MM_SprintfDoubleWidth() - -static MM_BOOLEAN MM_EmptyString_function(const char *cadena) -{ - const char *ptr = cadena; - - for (; *ptr; ptr++) - { - if (*ptr != ' ' && *ptr != '\t') - { - return FALSE; - } - } - - return TRUE; -} - int MM_SecureCopyStringFieldValue(char **pszStringDst, const char *pszStringSrc, MM_EXT_DBF_N_FIELDS *nStringCurrentLength) { @@ -2319,8 +2168,7 @@ int MM_SecureCopyStringFieldValue(char **pszStringDst, const char *pszStringSrc, int MM_ChangeDBFWidthField(struct MM_DATA_BASE_XP *data_base_XP, MM_EXT_DBF_N_FIELDS nIField, MM_BYTES_PER_FIELD_TYPE_DBF nNewWidth, - MM_BYTE nNewPrecision, - MM_BYTE que_fer_amb_reformatat_decimals) + MM_BYTE nNewPrecision) { char *record, *whites = nullptr; MM_BYTES_PER_FIELD_TYPE_DBF l_glop1, l_glop2, i_glop2; @@ -2331,8 +2179,6 @@ int MM_ChangeDBFWidthField(struct MM_DATA_BASE_XP *data_base_XP, size_t retorn_fwrite; int retorn_TruncaFitxer; - MM_BOOLEAN error_sprintf_n_decimals = FALSE; - canvi_amplada = nNewWidth - data_base_XP->pField[nIField].BytesPerField; if (data_base_XP->nRecords != 0) @@ -2421,136 +2267,55 @@ int MM_ChangeDBFWidthField(struct MM_DATA_BASE_XP *data_base_XP, } break; case 'N': - if (nNewPrecision == - data_base_XP->pField[nIField].DecimalsIfFloat || - que_fer_amb_reformatat_decimals == - MM_NOU_N_DECIMALS_NO_APLICA) - que_fer_amb_reformatat_decimals = - MM_NOMES_DOCUMENTAR_NOU_N_DECIMALS; - else if (que_fer_amb_reformatat_decimals == - MM_PREGUNTA_SI_APLICAR_NOU_N_DECIM) - que_fer_amb_reformatat_decimals = - MM_NOMES_DOCUMENTAR_NOU_N_DECIMALS; - - if (que_fer_amb_reformatat_decimals == - MM_NOMES_DOCUMENTAR_NOU_N_DECIMALS) - { - if (canvi_amplada >= 0) - { - if (1 != - fwrite_function(whites, canvi_amplada, 1, - data_base_XP->pfDataBase) || - 1 != fwrite_function( - record + l_glop1, - data_base_XP->pField[nIField] - .BytesPerField, - 1, data_base_XP->pfDataBase)) - { - free_function(whites); - free_function(record); - return 1; - } - } - else if (canvi_amplada < 0) - { - j = (GInt32)(l_glop1 + - (data_base_XP->pField[nIField] - .BytesPerField - - 1)); - while (TRUE) - { - j--; - if (j < (GInt32)l_glop1 || record[j] == ' ') - { - j++; - break; - } - } - - if ((data_base_XP->pField[nIField].BytesPerField + - l_glop1 - j) < nNewWidth) - j -= (GInt32)(nNewWidth - - (data_base_XP->pField[nIField] - .BytesPerField + - l_glop1 - j)); - - retorn_fwrite = - fwrite_function(record + j, nNewWidth, 1, - data_base_XP->pfDataBase); - if (1 != retorn_fwrite) - { - free_function(whites); - free_function(record); - return 1; - } - } - } - else // MM_APLICAR_NOU_N_DECIMALS + if (canvi_amplada >= 0) { - double valor; - char *sz_valor; - size_t sz_valor_size = - max_function( - nNewWidth, - data_base_XP->pField[nIField].BytesPerField) + - 1; - - if ((sz_valor = calloc_function(sz_valor_size)) == - nullptr) // Sumo 1 per poder posar-hi el \0 + if (1 != fwrite_function(whites, canvi_amplada, 1, + data_base_XP->pfDataBase) || + 1 != + fwrite_function( + record + l_glop1, + data_base_XP->pField[nIField].BytesPerField, + 1, data_base_XP->pfDataBase)) { free_function(whites); free_function(record); return 1; } - memcpy(sz_valor, record + l_glop1, - data_base_XP->pField[nIField].BytesPerField); - sz_valor[data_base_XP->pField[nIField].BytesPerField] = - 0; - - if (!MM_EmptyString_function(sz_valor)) + } + else if (canvi_amplada < 0) + { + j = (GInt32)(l_glop1 + (data_base_XP->pField[nIField] + .BytesPerField - + 1)); + while (TRUE) { - if (sscanf(sz_valor, "%lf", &valor) != 1) - memset( - sz_valor, *MM_BlankString, - max_function(nNewWidth, - data_base_XP->pField[nIField] - .BytesPerField)); - else - { - MM_SprintfDoubleWidth( - sz_valor, sz_valor_size, nNewWidth, - nNewPrecision, valor, - &error_sprintf_n_decimals); - } + j--; - retorn_fwrite = - fwrite_function(sz_valor, nNewWidth, 1, - data_base_XP->pfDataBase); - if (1 != retorn_fwrite) + if (j < (GInt32)l_glop1 || record[j] == ' ') { - free_function(whites); - free_function(record); - free_function(sz_valor); - return 1; + j++; + break; } } - else + + if ((data_base_XP->pField[nIField].BytesPerField + + l_glop1 - j) < nNewWidth) + j -= (GInt32)(nNewWidth - + (data_base_XP->pField[nIField] + .BytesPerField + + l_glop1 - j)); + + retorn_fwrite = fwrite_function( + record + j, nNewWidth, 1, data_base_XP->pfDataBase); + if (1 != retorn_fwrite) { - memset(sz_valor, *MM_BlankString, nNewWidth); - retorn_fwrite = - fwrite_function(sz_valor, nNewWidth, 1, - data_base_XP->pfDataBase); - if (1 != retorn_fwrite) - { - free_function(whites); - free_function(record); - free_function(sz_valor); - return 1; - } + free_function(whites); + free_function(record); + return 1; } - free_function(sz_valor); } + break; default: free_function(whites); @@ -2606,8 +2371,6 @@ int MM_ChangeDBFWidthField(struct MM_DATA_BASE_XP *data_base_XP, } data_base_XP->pField[nIField].DecimalsIfFloat = nNewPrecision; - //DonaData(&(data_base_XP->day), &(data_base_XP->month), &(data_base_XP->year)); - if ((MM_UpdateEntireHeader(data_base_XP)) == FALSE) return 1; diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h index a0feda45dbda..a26294010c5c 100644 --- a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h @@ -109,7 +109,6 @@ MM_FIRST_RECORD_OFFSET_TYPE MM_GiveOffsetExtendedFieldName(const struct MM_FIELD *camp); struct MM_DATA_BASE_XP *MM_CreateDBFHeader(MM_EXT_DBF_N_FIELDS n_camps, MM_BYTE nCharSet); -MM_BYTE MM_DBFFieldTypeToVariableProcessing(MM_BYTE tipus_camp_DBF); void MM_ReleaseMainFields(struct MM_DATA_BASE_XP *data_base_XP); void MM_ReleaseDBFHeader(struct MM_DATA_BASE_XP *data_base_XP); MM_BOOLEAN MM_CreateDBFFile(struct MM_DATA_BASE_XP *bd_xp, @@ -134,8 +133,7 @@ int MM_SecureCopyStringFieldValue(char **pszStringDst, const char *pszStringSrc, int MM_ChangeDBFWidthField(struct MM_DATA_BASE_XP *data_base_XP, MM_EXT_DBF_N_FIELDS quincamp, MM_BYTES_PER_FIELD_TYPE_DBF novaamplada, - MM_BYTE nou_decimals, - MM_BYTE que_fer_amb_reformatat_decimals); + MM_BYTE nou_decimals); int MM_GetArcHeights(double *coord_z, FILE_TYPE *pF, MM_N_VERTICES_TYPE n_vrt, struct MM_ZD *pZDescription, uint32_t flag); diff --git a/ogr/ogrsf_frmts/miramon/mm_wrlayr.c b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c index 816df6bcb580..2b22d60d6437 100644 --- a/ogr/ogrsf_frmts/miramon/mm_wrlayr.c +++ b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c @@ -369,73 +369,6 @@ static int MMWriteHeader(FILE_TYPE *pF, struct MM_TH *pMMHeader) return 0; } -int MMWriteEmptyHeader(FILE_TYPE *pF, int layerType, int nVersion) -{ - struct MM_TH pMMHeader; - - memset(&pMMHeader, 0, sizeof(pMMHeader)); - switch (nVersion) - { - case MM_32BITS_VERSION: - pMMHeader.aLayerVersion[0] = '0'; - pMMHeader.aLayerVersion[1] = '1'; - pMMHeader.aLayerSubVersion = '1'; - break; - case MM_64BITS_VERSION: - case MM_LAST_VERSION: - default: - pMMHeader.aLayerVersion[0] = '0'; - pMMHeader.aLayerVersion[1] = '2'; - pMMHeader.aLayerSubVersion = '0'; - break; - } - switch (layerType) - { - case MM_LayerType_Point: - pMMHeader.aFileType[0] = 'P'; - pMMHeader.aFileType[1] = 'N'; - pMMHeader.aFileType[2] = 'T'; - break; - case MM_LayerType_Point3d: - pMMHeader.aFileType[0] = 'P'; - pMMHeader.aFileType[1] = 'N'; - pMMHeader.aFileType[2] = 'T'; - pMMHeader.bIs3d = 1; - break; - case MM_LayerType_Arc: - pMMHeader.aFileType[0] = 'A'; - pMMHeader.aFileType[1] = 'R'; - pMMHeader.aFileType[2] = 'C'; - break; - case MM_LayerType_Arc3d: - pMMHeader.aFileType[0] = 'A'; - pMMHeader.aFileType[1] = 'R'; - pMMHeader.aFileType[2] = 'C'; - pMMHeader.bIs3d = 1; - break; - case MM_LayerType_Pol: - pMMHeader.aFileType[0] = 'P'; - pMMHeader.aFileType[1] = 'O'; - pMMHeader.aFileType[2] = 'L'; - break; - case MM_LayerType_Pol3d: - pMMHeader.aFileType[0] = 'P'; - pMMHeader.aFileType[1] = 'O'; - pMMHeader.aFileType[2] = 'L'; - pMMHeader.bIs3d = 1; - break; - default: - break; - } - pMMHeader.nElemCount = 0; - pMMHeader.hBB.dfMinX = MM_UNDEFINED_STATISTICAL_VALUE; - pMMHeader.hBB.dfMaxX = -MM_UNDEFINED_STATISTICAL_VALUE; - pMMHeader.hBB.dfMinY = MM_UNDEFINED_STATISTICAL_VALUE; - pMMHeader.hBB.dfMaxY = -MM_UNDEFINED_STATISTICAL_VALUE; - - return MMWriteHeader(pF, &pMMHeader); -} - /* -------------------------------------------------------------------- */ /* Layer Functions: Z section */ /* -------------------------------------------------------------------- */ @@ -6459,8 +6392,7 @@ MMTestAndFixValueToRecordDBXP(struct MiraMonVectLayerInfo *hMiraMonLayer, if (MM_ChangeDBFWidthField( pMMAdmDB->pMMBDXP, nIField, nNewWidth, - pMMAdmDB->pMMBDXP->pField[nIField].DecimalsIfFloat, - (MM_BYTE)MM_NOU_N_DECIMALS_NO_APLICA)) + pMMAdmDB->pMMBDXP->pField[nIField].DecimalsIfFloat)) return 1; // The record on course also has to change its size. diff --git a/ogr/ogrsf_frmts/miramon/mm_wrlayr.h b/ogr/ogrsf_frmts/miramon/mm_wrlayr.h index c9c87e8ad765..506cad8f2a90 100644 --- a/ogr/ogrsf_frmts/miramon/mm_wrlayr.h +++ b/ogr/ogrsf_frmts/miramon/mm_wrlayr.h @@ -125,7 +125,6 @@ int MMInitLayerByType(struct MiraMonVectLayerInfo *hMiraMonLayer); int MMDestroyLayer(struct MiraMonVectLayerInfo *hMiraMonLayer); int MMCloseLayer(struct MiraMonVectLayerInfo *hMiraMonLayer); int MMReadHeader(FILE_TYPE *pF, struct MM_TH *pMMHeader); -int MMWriteEmptyHeader(FILE_TYPE *pF, int layerType, int nVersion); int MMReadAHArcSection(struct MiraMonVectLayerInfo *hMiraMonLayer); int MMReadPHPolygonSection(struct MiraMonVectLayerInfo *hMiraMonLayer); int MMReadZDescriptionHeaders(struct MiraMonVectLayerInfo *hMiraMonLayer, From 0c6df13f39ed48d1c93f57edbef52ce589c0865f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 18:52:38 +0200 Subject: [PATCH 189/230] Update scripts/typos_allowlist.txt [ci skip] --- scripts/typos_allowlist.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/typos_allowlist.txt b/scripts/typos_allowlist.txt index d6e0ee34aa44..be12d93cefc9 100644 --- a/scripts/typos_allowlist.txt +++ b/scripts/typos_allowlist.txt @@ -328,3 +328,5 @@ either 2 or 4 comma separated values. The same rules apply for the source and de assert f.GetField("PERIMETRE") == pytest.approx(1289.866489495, abs=1e-5) assert f.GetField("PERIMETRE") == pytest.approx(1123.514024, abs=1e-5) assert f.GetField("PERIMETRE") == pytest.approx(680.544697, abs=1e-5) + assert f["PERIMETRE"] == [3.414213562, 3.414213562] + assert f["PERIMETRE"] == [32, 32] From 8e7b158e27a025825befa143f69bfc552083dca0 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 19:06:33 +0200 Subject: [PATCH 190/230] Shapelib: resync with upstream --- ogr/ogrsf_frmts/shape/sbnsearch.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ogr/ogrsf_frmts/shape/sbnsearch.c b/ogr/ogrsf_frmts/shape/sbnsearch.c index ea443cf784d6..769c2298049b 100644 --- a/ogr/ogrsf_frmts/shape/sbnsearch.c +++ b/ogr/ogrsf_frmts/shape/sbnsearch.c @@ -670,7 +670,7 @@ static bool SBNSearchDiskInternal(SearchStruct *psSearch, int nDepth, { free(psNode->pabyShapeDesc); psNode->pabyShapeDesc = SHPLIB_NULLPTR; - char szMessage[128]; + char szMessage[192]; snprintf( szMessage, sizeof(szMessage), "Inconsistent shape count for bin idx=%d of node %d. " From 8d0bd4e198eecdbcdc6990c305e454e7efe8ad8f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 19:14:58 +0200 Subject: [PATCH 191/230] OGRPGDecodeVersionString(): cleanup --- ogr/ogrsf_frmts/pg/ogrpgdatasource.cpp | 68 ++++++-------------------- 1 file changed, 15 insertions(+), 53 deletions(-) diff --git a/ogr/ogrsf_frmts/pg/ogrpgdatasource.cpp b/ogr/ogrsf_frmts/pg/ogrpgdatasource.cpp index e86973fbfb2a..1baed2df3d4b 100644 --- a/ogr/ogrsf_frmts/pg/ogrpgdatasource.cpp +++ b/ogr/ogrsf_frmts/pg/ogrpgdatasource.cpp @@ -140,61 +140,23 @@ CPLString OGRPGDataSource::GetCurrentSchema() void OGRPGDataSource::OGRPGDecodeVersionString(PGver *psVersion, const char *pszVer) { + // Skip leading spaces while (*pszVer == ' ') pszVer++; - - const char *ptr = pszVer; - // get Version string - while (*ptr && *ptr != ' ') - ptr++; - GUInt32 iLen = static_cast<int>(ptr - pszVer); - char szVer[10] = {}; - if (iLen > sizeof(szVer) - 1) - iLen = sizeof(szVer) - 1; - strncpy(szVer, pszVer, iLen); - szVer[iLen] = '\0'; - - ptr = pszVer = szVer; - - // get Major number - while (*ptr && *ptr != '.') - ptr++; - iLen = static_cast<int>(ptr - pszVer); - char szNum[25] = {}; - if (iLen > sizeof(szNum) - 1) - iLen = sizeof(szNum) - 1; - strncpy(szNum, pszVer, iLen); - szNum[iLen] = '\0'; - psVersion->nMajor = atoi(szNum); - - if (*ptr == 0) - return; - pszVer = ++ptr; - - // get Minor number - while (*ptr && *ptr != '.') - ptr++; - iLen = static_cast<int>(ptr - pszVer); - if (iLen > sizeof(szNum) - 1) - iLen = sizeof(szNum) - 1; - strncpy(szNum, pszVer, iLen); - szNum[iLen] = '\0'; - psVersion->nMinor = atoi(szNum); - - if (*ptr) - { - pszVer = ++ptr; - - // get Release number - while (*ptr && *ptr != '.') - ptr++; - iLen = static_cast<int>(ptr - pszVer); - if (iLen > sizeof(szNum) - 1) - iLen = sizeof(szNum) - 1; - strncpy(szNum, pszVer, iLen); - szNum[iLen] = '\0'; - psVersion->nRelease = atoi(szNum); - } + std::string osVer(pszVer); + // And truncate at the first space + const auto nPosSpace = osVer.find(' '); + if (nPosSpace != std::string::npos) + osVer.resize(nPosSpace); + + memset(psVersion, 0, sizeof(*psVersion)); + const CPLStringList aosTokens(CSLTokenizeString2(osVer.c_str(), ".", 0)); + if (aosTokens.size() >= 1) + psVersion->nMajor = atoi(aosTokens[0]); + if (aosTokens.size() >= 2) + psVersion->nMinor = atoi(aosTokens[1]); + if (aosTokens.size() >= 3) + psVersion->nRelease = atoi(aosTokens[2]); } /************************************************************************/ From 6abe8985e29c29ea3610008858c08ca246b817bf Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 20:13:18 +0200 Subject: [PATCH 192/230] swig/python/README.rst: document workaround if numpy 2 cannot be installed --- swig/python/README.rst | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/swig/python/README.rst b/swig/python/README.rst index c7286e95c433..91790c7d093d 100644 --- a/swig/python/README.rst +++ b/swig/python/README.rst @@ -83,6 +83,21 @@ Verify that the necessary dependencies have been installed and then run the foll pip install --no-cache --force-reinstall gdal[numpy]=="$(gdal-config --version).*" +Potential issues with GDAL >= 3.9, Python >= 3.9 and NumPy 2.0 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The pyproject.toml file of GDAL 3.9 requires numpy >= 2.0.0rc1 (for Python >= 3.9) +at build time to be able to build bindings that are compatible of both NumPy 1 +and NumPy 2. +If for some reason the numpy >= 2.0.0rc1 build dependency can not be installed, +it is possible to manually install the build requirements, and invoke ``pip install`` +with the ``--no-build-isolation`` flag. + +:: + + pip install numpy==<required_version> wheel setuptools>=67 + pip install gdal[numpy]=="$(gdal-config --version).*" --no-build-isolation + Building as part of the GDAL library source tree ------------------------------------------------ From cac773c2351a432e828fdaba813c475fc03533fa Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 20:13:34 +0200 Subject: [PATCH 193/230] source/api/python_bindings.rst: resync with swig/python/README.rst --- doc/source/api/python_bindings.rst | 53 ++++++++++++++++++++++++++---- 1 file changed, 47 insertions(+), 6 deletions(-) diff --git a/doc/source/api/python_bindings.rst b/doc/source/api/python_bindings.rst index 901507d8f36a..0604335dddf9 100644 --- a/doc/source/api/python_bindings.rst +++ b/doc/source/api/python_bindings.rst @@ -49,17 +49,58 @@ GDAL can be installed from the `Python Package Index <https://pypi.org/project/G :: - pip install GDAL + pip install gdal -It will be necessary to have libgdal and its development headers installed -if pip is expected to do a source build because no wheel is available -for your specified platform and Python version. -To install the version of the Python bindings matching your native GDAL library: +In order to enable numpy-based raster support, libgdal and its development headers must be installed as well as the Python packages numpy, setuptools, and wheel. +To install the Python dependencies and build numpy-based raster support: + + +:: + + pip install numpy>1.0.0 wheel setuptools>=67 + pip install gdal[numpy]=="$(gdal-config --version).*" + + +Users can verify that numpy-based raster support has been installed with: + +:: + + python3 -c 'from osgeo import gdal_array' + + +If this command raises an ImportError, numpy-based raster support has not been properly installed: + +:: + + Traceback (most recent call last): + File "<string>", line 1, in <module> + File "/usr/local/lib/python3.12/dist-packages/osgeo/gdal_array.py", line 10, in <module> + from . import _gdal_array + ImportError: cannot import name '_gdal_array' from 'osgeo' (/usr/local/lib/python3.12/dist-packages/osgeo/__init__.py) + + +This is most often due to pip reusing a cached GDAL installation. +Verify that the necessary dependencies have been installed and then run the following to force a clean build: + +:: + pip install --no-cache --force-reinstall gdal[numpy]=="$(gdal-config --version).*" + + +Potential issues with GDAL >= 3.9, Python >= 3.9 and NumPy 2.0 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The pyproject.toml file of GDAL 3.9 requires numpy >= 2.0.0rc1 (for Python >= 3.9) +at build time to be able to build bindings that are compatible of both NumPy 1 +and NumPy 2. +If for some reason the numpy >= 2.0.0rc1 build dependency can not be installed, +it is possible to manually install the build requirements, and invoke ``pip install`` +with the ``--no-build-isolation`` flag. :: - pip install GDAL=="$(gdal-config --version).*" + pip install numpy==<required_version> wheel setuptools>=67 + pip install gdal[numpy]=="$(gdal-config --version).*" --no-build-isolation Building as part of the GDAL library source tree From d9c291b1af65a031d14e28725ea880b41d169b9c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Thu, 25 Apr 2024 20:21:34 +0200 Subject: [PATCH 194/230] docker/ubuntu-full/Dockerfile: re-enable QB3 [ci skip] --- docker/ubuntu-full/Dockerfile | 23 +++++++++++------------ docker/ubuntu-full/bh-set-envvars.sh | 2 ++ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/docker/ubuntu-full/Dockerfile b/docker/ubuntu-full/Dockerfile index 306663f48611..8ecd42b0aa85 100644 --- a/docker/ubuntu-full/Dockerfile +++ b/docker/ubuntu-full/Dockerfile @@ -188,19 +188,18 @@ RUN . /buildscripts/bh-set-envvars.sh \ && rm -rf FileGDB_API-RHEL7-64gcc83.tar.gz \ ) ; fi -# DISABLED: cf https://github.com/lucianpls/QB3/issues/16 # Build libqb3 -#RUN . /buildscripts/bh-set-envvars.sh \ -# && git clone https://github.com/lucianpls/QB3.git \ -# && cd QB3/QB3lib \ -# && mkdir build \ -# && cd build \ -# && cmake -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_BUILD_TYPE=Release .. \ -# && make -j$(nproc) \ -# && make -j$(nproc) install \ -# && make install DESTDIR="/build_thirdparty" \ -# && cd ../../.. \ -# && rm -rf QB3 +RUN . /buildscripts/bh-set-envvars.sh \ + && git clone https://github.com/lucianpls/QB3.git \ + && cd QB3/QB3lib \ + && mkdir build \ + && cd build \ + && cmake .. ${CMAKE_EXTRA_ARGS} -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_BUILD_TYPE=Release \ + && make -j$(nproc) \ + && make -j$(nproc) install \ + && make install DESTDIR="/build_thirdparty" \ + && cd ../../.. \ + && rm -rf QB3 ARG WITH_PDFIUM=yes RUN if echo "$WITH_PDFIUM" | grep -Eiq "^(y(es)?|1|true)$" ; then ( \ diff --git a/docker/ubuntu-full/bh-set-envvars.sh b/docker/ubuntu-full/bh-set-envvars.sh index 36d8f5dd1906..e64393454f26 100644 --- a/docker/ubuntu-full/bh-set-envvars.sh +++ b/docker/ubuntu-full/bh-set-envvars.sh @@ -12,9 +12,11 @@ if test "${TARGET_ARCH:-}" != ""; then export CC=${GCC_ARCH}-linux-gnu-gcc-13 export CXX=${GCC_ARCH}-linux-gnu-g++-13 export WITH_HOST="--host=${GCC_ARCH}-linux-gnu" + export CMAKE_EXTRA_ARGS=" -DCMAKE_SYSTEM_PROCESSOR=${TARGET_ARCH} " else export APT_ARCH_SUFFIX="" export WITH_HOST="" GCC_ARCH="$(uname -m)" export GCC_ARCH + export CMAKE_EXTRA_ARGS="" fi From 7cbe1e2042113ce3aaab1a950b972487d3d7a5bb Mon Sep 17 00:00:00 2001 From: Oleksii Leonov <mail@oleksiileonov.com> Date: Thu, 25 Apr 2024 16:48:09 -0300 Subject: [PATCH 195/230] docs: added a link to Ruby language GDAL bindings https://github.com/telus-agcg/ffi-gdal - active project, supports Ruby 2.6-3.3 and GDAL 2.4, GDAL 3+; - maintained and supported by a couple of big agriculture companies; - originated in 2015; - provides both low-level FFI and high-level idiomatic Ruby wrapper. --- doc/source/api/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/api/index.rst b/doc/source/api/index.rst index d16790de2fa0..127aea2f66a1 100644 --- a/doc/source/api/index.rst +++ b/doc/source/api/index.rst @@ -123,6 +123,7 @@ API Perl <https://metacpan.org/release/Geo-GDAL-FFI> PHP <http://dl.maptools.org/dl/php_ogr/php_ogr_documentation.html> R <https://cran.r-project.org/web/packages/gdalraster/index.html> + Ruby <https://github.com/telus-agcg/ffi-gdal> Rust <https://github.com/georust/gdal> .. warning:: From d4a83318ac41484463559c2ef02e8f588d01daa1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=A9=8D=E4=B8=B9=E5=B0=BC=20Dan=20Jacobson?= <jidanni@jidanni.org> Date: Fri, 26 Apr 2024 17:37:48 +0800 Subject: [PATCH 196/230] Update ogr_sql_dialect.rst to give orginfo clue (Maybe there is also a gdalinfo clue that should be added too?) --- doc/source/user/ogr_sql_dialect.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/source/user/ogr_sql_dialect.rst b/doc/source/user/ogr_sql_dialect.rst index 46e681897737..bcf02b195945 100644 --- a/doc/source/user/ogr_sql_dialect.rst +++ b/doc/source/user/ogr_sql_dialect.rst @@ -565,7 +565,8 @@ The OGR SQL dialect adds the geometry field of the datasource to the result set by default. Users do not need to select the geometry explicitly but it is still possible to do so. Common use case is when geometry is the only field that is needed. In this case the name of the geometry field to be used in the SQL statement is the -name returned by :cpp:func:`OGRLayer::GetGeometryColumn`. If the method returns +name returned by :cpp:func:`OGRLayer::GetGeometryColumn`, and also +"Geometry Column = ..." in :program:`ogrinfo` output. If the method returns an empty string then a special name "_ogr_geometry_" must be used. The name begins with an underscore and SQL syntax requires that it must appear between double quotes. In addition the command line interpreter may require that double quotes are escaped From 5a3947fb4499543755e692ed1e4fb40846732b45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=A9=8D=E4=B8=B9=E5=B0=BC=20Dan=20Jacobson?= <jidanni@jidanni.org> Date: Fri, 26 Apr 2024 17:49:45 +0800 Subject: [PATCH 197/230] Update l1b.rst adding verb --- doc/source/drivers/raster/l1b.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/drivers/raster/l1b.rst b/doc/source/drivers/raster/l1b.rst index 4d964b8fef33..424962f44b5d 100644 --- a/doc/source/drivers/raster/l1b.rst +++ b/doc/source/drivers/raster/l1b.rst @@ -24,7 +24,7 @@ Driver capabilities Georeference ------------ -Note, that GDAL simple affine georeference model completely unsuitable +Note that GDAL simple affine georeference model is completely unsuitable for the NOAA data. So you should not rely on it. It is recommended to use the thin plate spline warper (tps). Automatic image rectification can be done with ground control points (GCPs) from the input file. From 33c2d8e04027388a5c0927fac69a3daf4f80e02c Mon Sep 17 00:00:00 2001 From: AbelPau <a.pau@creaf.uab.cat> Date: Fri, 26 Apr 2024 11:50:55 +0200 Subject: [PATCH 198/230] MiraMonVector driver. Adding tests * Relation between polygons and its linestrings * Test that a field cannot be added after feature creation. --- .../InexistentCycle1/SimplePolFile.arc | Bin 0 -> 536 bytes .../InexistentCycle1/SimplePolFile.nod | Bin 0 -> 92 bytes .../InexistentCycle1/SimplePolFile.pol | Bin 0 -> 349 bytes .../InexistentCycle1/SimplePolFileA.dbf | Bin 0 -> 511 bytes .../InexistentCycle1/SimplePolFileA.rel | 89 +++++++++++++++++ .../InexistentCycle1/SimplePolFileN.dbf | Bin 0 -> 261 bytes .../InexistentCycle1/SimplePolFileN.rel | 64 ++++++++++++ .../InexistentCycle1/SimplePolFileP.dbf | Bin 0 -> 729 bytes .../InexistentCycle1/SimplePolFileP.rel | 93 ++++++++++++++++++ .../InexistentCycle2/SimplePolFile.arc | Bin 0 -> 536 bytes .../InexistentCycle2/SimplePolFile.nod | Bin 0 -> 92 bytes .../InexistentCycle2/SimplePolFile.pol | Bin 0 -> 349 bytes .../InexistentCycle2/SimplePolFileA.dbf | Bin 0 -> 511 bytes .../InexistentCycle2/SimplePolFileA.rel | 89 +++++++++++++++++ .../InexistentCycle2/SimplePolFileN.dbf | Bin 0 -> 261 bytes .../InexistentCycle2/SimplePolFileN.rel | 64 ++++++++++++ .../InexistentCycle2/SimplePolFileP.dbf | Bin 0 -> 729 bytes .../InexistentCycle2/SimplePolFileP.rel | 90 +++++++++++++++++ .../SimplePolFile.arc | Bin 0 -> 536 bytes .../SimplePolFile.nod | Bin 0 -> 92 bytes .../SimplePolFile.pol | Bin 0 -> 349 bytes .../SimplePolFileA.dbf | Bin 0 -> 511 bytes .../SimplePolFileA.rel | 89 +++++++++++++++++ .../SimplePolFileN.dbf | Bin 0 -> 261 bytes .../SimplePolFileN.rel | 64 ++++++++++++ .../SimplePolFileP.dbf | Bin 0 -> 729 bytes .../SimplePolFileP.rel | 93 ++++++++++++++++++ autotest/ogr/ogr_miramon_vector.py | 44 ++++++++- 28 files changed, 778 insertions(+), 1 deletion(-) create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.arc create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.nod create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.pol create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileA.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileA.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileN.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileN.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileP.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileP.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.arc create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.nod create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.pol create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileA.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileA.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileN.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileN.rel create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileP.dbf create mode 100644 autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileP.rel create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.arc create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.nod create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.pol create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileA.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileA.rel create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileN.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileN.rel create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileP.dbf create mode 100644 autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileP.rel diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.arc b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.arc new file mode 100644 index 0000000000000000000000000000000000000000..431d702a3a3540257dfdffce360f123848d1eeb8 GIT binary patch literal 536 zcmZ<^a#k?ZGi2E98#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHG%nT3!Qx`GM zUGJG~heHqB*RaOxb#QfTKxH?eYC%-nGFH!W^(hYSdv8Sre(Qvr|LB0;fx_u^4xaNr zCY1GcIdA~gS}-y&01+6weNBkr>znA{yGPA9OQp%dR8&BU|5*pzT}L-DR<}-Rasb)4 z1E`4!NQ1DHuuoUCOREFSoy(sY)!yRmaHu{}u-Mq85$;Zy`;T~ST9YW-=1{=1XUjX6 z7Px;txb6*opjzRu;BL<4DeCnY=C!#Uo5sIk0^I$V@=Mw{p7%H?Xf64*P_-TIU%uVd zKAU`+9d=kPniM47=K%Bfnxh;csREN6-p-uN_CLD;9xj3#+rAovwmOK1@3PEPnB>6k zX|B$DufqY|zE6uM1zmpvG#46Auy6`G`>G`2PoqP}O}kqSbK4wX;RFjmfwL6`*4t|x X(gN6zt#oa7FnhbkkNsGM1G;$t@$uhR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.nod b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.nod new file mode 100644 index 0000000000000000000000000000000000000000..e5d310c3c0cee2ef29d314c8b4e269060b4f902e GIT binary patch literal 92 zcmebCcTq6ZGi2E98#`I@Yl%af+p%f<8zwmHXlZ`)KDENZbN<JKvc4_{W(EjgWME?O V05U;z0F;h^%7G|GD4PjL0|5OV62Jfe literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.pol b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.pol new file mode 100644 index 0000000000000000000000000000000000000000..0a2e4736fe6478f5c622ee571b3c8a07b4443c76 GIT binary patch literal 349 zcmWIW_fas^GgR2^8#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHGEDQ+12xKxL zxD3odHX;2;${9RB2A~0${)l<*de3Y-9D3NkhBaQVgWC_%0>dEvZOd3a%hjhiG>W+N z7MK2ZaNm0?D)3t;-2IOZ=p87WUgzLB|6@W~UzY<+FNh8R5^i4;V)*(dIxIVRca6>b zuMWO@)Qq!KnjB0;1*G_&b-?|9bQ5EB>y#!G{SiR@Qo=r6(JrkHr@l^&Y`%Ke0Tc!x Nzz(ti2$(?u1OVn<W$pj~ literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileA.dbf b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..22d64985be2349156fb209f77c88885217bf9fc4 GIT binary patch literal 511 zcmZ{fK?=e!5Jd+H-MQ19i<}@dF^%c6*t7&Bg<8a2_Flo$c{5uPzb)e|!soxd3CSM$ zJ|m(h-KTRkTcXgUZm8REB8oHJt**p$^~30Imp}a6$1c^wq<{Q;xghq9Pa%dcUt)Um zcE<es8^KM%l@_Jau9D7L<0_O$tj?A~!9sF~O6h|vl_|7AiNxw`DKspkpFauf3RxOs Kt0OoPtLqo7Pdilr literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileA.rel b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileA.rel new file mode 100644 index 000000000000..355451718486 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileA.rel @@ -0,0 +1,89 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204654+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels +unitatsY=? + +[EXTENT] +toler_env=0 +MinX=335.3187440533326 +MaxX=1224.163653663228 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204653+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=SimplePolFile.pol + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204654+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileN.dbf b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..19847ee7684ceccc4001218201034fe3e646fd67 GIT binary patch literal 261 zcmZRsW|LuNU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 kXfR0H6(Y}wrk_^<8!*5oXlR5@RKXCNC=Rt)O)xY903eGTUH||9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileN.rel b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileN.rel new file mode 100644 index 000000000000..c0f476e1f0de --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204653+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16204652+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204653+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileP.dbf b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..5d00a81ae4a44cefb1edfafaf085f49073b42336 GIT binary patch literal 729 zcma)&OHRWu5QYa8b%8)^U~ev9+2cp-rAbW{sf5Zzh+S^L2{=4%<V+rFBxaHM=l|v* zXZO3?-vYqP%}@TW;t8NR^v4w5ni`<J(7RPj+keH>#qZAzevYYmk6ns|Kkt_dLW-d{ z!~QjI`;h9qJjcfU^T(+<Zt~D|a+n6lPXM?t#h5b3q6hvsf|4H?g}9<Fmf&It!dK0$ z>BZEnkfbdsCeeBBtkuqsrj)2F$q}WWOg2r+Fm~qrI@1e@R5?;CuU6bhEl`u`oUw{m p9#Kr@>{?Boj2&irAr)1#w=wGkH(Fa?5mv&O{Cf>5J<a^@+8-ctQn>&C literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileP.rel b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileP.rel new file mode 100644 index 000000000000..440dc6ff801b --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFileP.rel @@ -0,0 +1,93 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204988+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=00691677-6d15-40f8-9d62-e8df34876e80_SimplePolFileP + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code= +codeSpace= +DatasetTitle=Simple Pol File + +[OVERVIEW:ASPECTES_TECNICS] +ArcSource= + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204988+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[EXTENT] +toler_env=0 +MinX=335.318744053333 +MaxX=1224.16365366323 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204986+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern +visible=0 +TractamentVariable=Ordinal + +[TAULA_PRINCIPAL:N_VERTEXS] +descriptor=Nombre de vèrtexs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:PERIMETRE] +descriptor=Perímetre del polígon + +[TAULA_PRINCIPAL:AREA] +descriptor=Àrea del polígon + +[TAULA_PRINCIPAL:N_ARCS] +descriptor=Nombre d'arcs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_POLIG] +descriptor=Nombre de polígons elementals +visible=0 +MostrarUnitats=0 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampArea=AREA +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG + +[TAULA_PRINCIPAL:ATT1] +descriptor=atribute1 + +[TAULA_PRINCIPAL:ATT2] +descriptor=atribute2 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.arc b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.arc new file mode 100644 index 0000000000000000000000000000000000000000..431d702a3a3540257dfdffce360f123848d1eeb8 GIT binary patch literal 536 zcmZ<^a#k?ZGi2E98#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHG%nT3!Qx`GM zUGJG~heHqB*RaOxb#QfTKxH?eYC%-nGFH!W^(hYSdv8Sre(Qvr|LB0;fx_u^4xaNr zCY1GcIdA~gS}-y&01+6weNBkr>znA{yGPA9OQp%dR8&BU|5*pzT}L-DR<}-Rasb)4 z1E`4!NQ1DHuuoUCOREFSoy(sY)!yRmaHu{}u-Mq85$;Zy`;T~ST9YW-=1{=1XUjX6 z7Px;txb6*opjzRu;BL<4DeCnY=C!#Uo5sIk0^I$V@=Mw{p7%H?Xf64*P_-TIU%uVd zKAU`+9d=kPniM47=K%Bfnxh;csREN6-p-uN_CLD;9xj3#+rAovwmOK1@3PEPnB>6k zX|B$DufqY|zE6uM1zmpvG#46Auy6`G`>G`2PoqP}O}kqSbK4wX;RFjmfwL6`*4t|x X(gN6zt#oa7FnhbkkNsGM1G;$t@$uhR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.nod b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.nod new file mode 100644 index 0000000000000000000000000000000000000000..e5d310c3c0cee2ef29d314c8b4e269060b4f902e GIT binary patch literal 92 zcmebCcTq6ZGi2E98#`I@Yl%af+p%f<8zwmHXlZ`)KDENZbN<JKvc4_{W(EjgWME?O V05U;z0F;h^%7G|GD4PjL0|5OV62Jfe literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.pol b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.pol new file mode 100644 index 0000000000000000000000000000000000000000..0a2e4736fe6478f5c622ee571b3c8a07b4443c76 GIT binary patch literal 349 zcmWIW_fas^GgR2^8#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHGEDQ+12xKxL zxD3odHX;2;${9RB2A~0${)l<*de3Y-9D3NkhBaQVgWC_%0>dEvZOd3a%hjhiG>W+N z7MK2ZaNm0?D)3t;-2IOZ=p87WUgzLB|6@W~UzY<+FNh8R5^i4;V)*(dIxIVRca6>b zuMWO@)Qq!KnjB0;1*G_&b-?|9bQ5EB>y#!G{SiR@Qo=r6(JrkHr@l^&Y`%Ke0Tc!x Nzz(ti2$(?u1OVn<W$pj~ literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileA.dbf b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..22d64985be2349156fb209f77c88885217bf9fc4 GIT binary patch literal 511 zcmZ{fK?=e!5Jd+H-MQ19i<}@dF^%c6*t7&Bg<8a2_Flo$c{5uPzb)e|!soxd3CSM$ zJ|m(h-KTRkTcXgUZm8REB8oHJt**p$^~30Imp}a6$1c^wq<{Q;xghq9Pa%dcUt)Um zcE<es8^KM%l@_Jau9D7L<0_O$tj?A~!9sF~O6h|vl_|7AiNxw`DKspkpFauf3RxOs Kt0OoPtLqo7Pdilr literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileA.rel b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileA.rel new file mode 100644 index 000000000000..355451718486 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileA.rel @@ -0,0 +1,89 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204654+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels +unitatsY=? + +[EXTENT] +toler_env=0 +MinX=335.3187440533326 +MaxX=1224.163653663228 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204653+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=SimplePolFile.pol + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204654+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileN.dbf b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..19847ee7684ceccc4001218201034fe3e646fd67 GIT binary patch literal 261 zcmZRsW|LuNU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 kXfR0H6(Y}wrk_^<8!*5oXlR5@RKXCNC=Rt)O)xY903eGTUH||9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileN.rel b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileN.rel new file mode 100644 index 000000000000..c0f476e1f0de --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204653+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16204652+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204653+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileP.dbf b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..5d00a81ae4a44cefb1edfafaf085f49073b42336 GIT binary patch literal 729 zcma)&OHRWu5QYa8b%8)^U~ev9+2cp-rAbW{sf5Zzh+S^L2{=4%<V+rFBxaHM=l|v* zXZO3?-vYqP%}@TW;t8NR^v4w5ni`<J(7RPj+keH>#qZAzevYYmk6ns|Kkt_dLW-d{ z!~QjI`;h9qJjcfU^T(+<Zt~D|a+n6lPXM?t#h5b3q6hvsf|4H?g}9<Fmf&It!dK0$ z>BZEnkfbdsCeeBBtkuqsrj)2F$q}WWOg2r+Fm~qrI@1e@R5?;CuU6bhEl`u`oUw{m p9#Kr@>{?Boj2&irAr)1#w=wGkH(Fa?5mv&O{Cf>5J<a^@+8-ctQn>&C literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileP.rel b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileP.rel new file mode 100644 index 000000000000..5377f858fe96 --- /dev/null +++ b/autotest/ogr/data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFileP.rel @@ -0,0 +1,90 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204988+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=00691677-6d15-40f8-9d62-e8df34876e80_SimplePolFileP + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code= +codeSpace= +DatasetTitle=Simple Pol File + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204988+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[EXTENT] +toler_env=0 +MinX=335.318744053333 +MaxX=1224.16365366323 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204986+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern +visible=0 +TractamentVariable=Ordinal + +[TAULA_PRINCIPAL:N_VERTEXS] +descriptor=Nombre de vèrtexs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:PERIMETRE] +descriptor=Perímetre del polígon + +[TAULA_PRINCIPAL:AREA] +descriptor=Àrea del polígon + +[TAULA_PRINCIPAL:N_ARCS] +descriptor=Nombre d'arcs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_POLIG] +descriptor=Nombre de polígons elementals +visible=0 +MostrarUnitats=0 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampArea=AREA +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG + +[TAULA_PRINCIPAL:ATT1] +descriptor=atribute1 + +[TAULA_PRINCIPAL:ATT2] +descriptor=atribute2 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.arc b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.arc new file mode 100644 index 0000000000000000000000000000000000000000..431d702a3a3540257dfdffce360f123848d1eeb8 GIT binary patch literal 536 zcmZ<^a#k?ZGi2E98#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHG%nT3!Qx`GM zUGJG~heHqB*RaOxb#QfTKxH?eYC%-nGFH!W^(hYSdv8Sre(Qvr|LB0;fx_u^4xaNr zCY1GcIdA~gS}-y&01+6weNBkr>znA{yGPA9OQp%dR8&BU|5*pzT}L-DR<}-Rasb)4 z1E`4!NQ1DHuuoUCOREFSoy(sY)!yRmaHu{}u-Mq85$;Zy`;T~ST9YW-=1{=1XUjX6 z7Px;txb6*opjzRu;BL<4DeCnY=C!#Uo5sIk0^I$V@=Mw{p7%H?Xf64*P_-TIU%uVd zKAU`+9d=kPniM47=K%Bfnxh;csREN6-p-uN_CLD;9xj3#+rAovwmOK1@3PEPnB>6k zX|B$DufqY|zE6uM1zmpvG#46Auy6`G`>G`2PoqP}O}kqSbK4wX;RFjmfwL6`*4t|x X(gN6zt#oa7FnhbkkNsGM1G;$t@$uhR literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.nod b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.nod new file mode 100644 index 0000000000000000000000000000000000000000..e5d310c3c0cee2ef29d314c8b4e269060b4f902e GIT binary patch literal 92 zcmebCcTq6ZGi2E98#`I@Yl%af+p%f<8zwmHXlZ`)KDENZbN<JKvc4_{W(EjgWME?O V05U;z0F;h^%7G|GD4PjL0|5OV62Jfe literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.pol b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.pol new file mode 100644 index 0000000000000000000000000000000000000000..0a2e4736fe6478f5c622ee571b3c8a07b4443c76 GIT binary patch literal 349 zcmWIW_fas^GgR2^8#`I@Yl*|Y+{;szD@=CS(bD|peQJe+oy>ifmSbHGEDQ+12xKxL zxD3odHX;2;${9RB2A~0${)l<*de3Y-9D3NkhBaQVgWC_%0>dEvZOd3a%hjhiG>W+N z7MK2ZaNm0?D)3t;-2IOZ=p87WUgzLB|6@W~UzY<+FNh8R5^i4;V)*(dIxIVRca6>b zuMWO@)Qq!KnjB0;1*G_&b-?|9bQ5EB>y#!G{SiR@Qo=r6(JrkHr@l^&Y`%Ke0Tc!x Nzz(ti2$(?u1OVn<W$pj~ literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileA.dbf b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileA.dbf new file mode 100644 index 0000000000000000000000000000000000000000..22d64985be2349156fb209f77c88885217bf9fc4 GIT binary patch literal 511 zcmZ{fK?=e!5Jd+H-MQ19i<}@dF^%c6*t7&Bg<8a2_Flo$c{5uPzb)e|!soxd3CSM$ zJ|m(h-KTRkTcXgUZm8REB8oHJt**p$^~30Imp}a6$1c^wq<{Q;xghq9Pa%dcUt)Um zcE<es8^KM%l@_Jau9D7L<0_O$tj?A~!9sF~O6h|vl_|7AiNxw`DKspkpFauf3RxOs Kt0OoPtLqo7Pdilr literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileA.rel b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileA.rel new file mode 100644 index 000000000000..355451718486 --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileA.rel @@ -0,0 +1,89 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204654+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=0e09e6cf-0d31-499e-bcf0-bee4d3e9d87a_SimplePolFileA +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[SPATIAL_REFERENCE_SYSTEM:HORIZONTAL] +HorizontalSystemDefinition=Local +HorizontalSystemIdentifier=plane +unitats=STB#T_pixels +unitatsY=? + +[EXTENT] +toler_env=0 +MinX=335.3187440533326 +MaxX=1224.163653663228 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204653+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_VERTEXS] +visible=0 +MostrarUnitats=0 +descriptor=Nombre de vèrtexs + +[TAULA_PRINCIPAL:LONG_ARC] +descriptor=Longitud de l'arc + +[TAULA_PRINCIPAL:NODE_INI] +visible=0 +MostrarUnitats=0 +descriptor=Node inicial + +[TAULA_PRINCIPAL:NODE_FI] +visible=0 +MostrarUnitats=0 +descriptor=Node final + +[OVERVIEW:ASPECTES_TECNICS] +Ciclat1=SimplePolFile.pol + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204654+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampLongitudArc=LONG_ARC +NomCampNodeIni=NODE_INI +NomCampNodeFi=NODE_FI diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileN.dbf b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileN.dbf new file mode 100644 index 0000000000000000000000000000000000000000..19847ee7684ceccc4001218201034fe3e646fd67 GIT binary patch literal 261 zcmZRsW|LuNU|?uu&;gQYK!kz8(<R<L$kENynSsF%BrA%h#4*S@INmYd&))^Re28a2 kXfR0H6(Y}wrk_^<8!*5oXlR5@RKXCNC=Rt)O)xY903eGTUH||9 literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileN.rel b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileN.rel new file mode 100644 index 000000000000..c0f476e1f0de --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileN.rel @@ -0,0 +1,64 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204653+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code=701e2102-a0ba-4cb2-aeb5-bdb329c79868_SimplePolFileN +codeSpace= +DatasetTitle=Simple Pol File [píxels] + +[EXTENT] +toler_env=0 + +[OVERVIEW] +CreationDate=20230628 16204652+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +visible=0 +descriptor=Identificador Gràfic intern +MostrarUnitats=0 + +[TAULA_PRINCIPAL:ARCS_A_NOD] +MostrarUnitats=0 +descriptor=Nombre d'arcs al node + +[TAULA_PRINCIPAL:TIPUS_NODE] +MostrarUnitats=0 +descriptor=Tipus de node + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204653+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampArcsANode=ARCS_A_NOD +NomCampTipusNode=TIPUS_NODE diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileP.dbf b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileP.dbf new file mode 100644 index 0000000000000000000000000000000000000000..5d00a81ae4a44cefb1edfafaf085f49073b42336 GIT binary patch literal 729 zcma)&OHRWu5QYa8b%8)^U~ev9+2cp-rAbW{sf5Zzh+S^L2{=4%<V+rFBxaHM=l|v* zXZO3?-vYqP%}@TW;t8NR^v4w5ni`<J(7RPj+keH>#qZAzevYYmk6ns|Kkt_dLW-d{ z!~QjI`;h9qJjcfU^T(+<Zt~D|a+n6lPXM?t#h5b3q6hvsf|4H?g}9<Fmf&It!dK0$ z>BZEnkfbdsCeeBBtkuqsrj)2F$q}WWOg2r+Fm~qrI@1e@R5?;CuU6bhEl`u`oUw{m p9#Kr@>{?Boj2&irAr)1#w=wGkH(Fa?5mv&O{Cf>5J<a^@+8-ctQn>&C literal 0 HcmV?d00001 diff --git a/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileP.rel b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileP.rel new file mode 100644 index 000000000000..d0740ac9db3a --- /dev/null +++ b/autotest/ogr/data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFileP.rel @@ -0,0 +1,93 @@ +[VERSIO] +VersMetaDades=5 +SubVersMetaDades=0 +Vers=4 +SubVers=3 + +[METADADES] +language=cat +MDIdiom=cat +dateStamp=20230628 16204988+0200 +characterSet=006 +nOrganismes=1 +FileIdentifier=00691677-6d15-40f8-9d62-e8df34876e80_SimplePolFileP + +[METADADES:ORGANISME_1] +role=009 +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[IDENTIFICATION] +code= +codeSpace= +DatasetTitle=Simple Pol File + +[OVERVIEW:ASPECTES_TECNICS] +ArcSource=SimplePolFile + +[QUALITY:LINEAGE:PROCESS1] +nOrganismes=1 +history=C:\MiraMon\MM64.exe +date=20230628 16204988+0200 + +[QUALITY:LINEAGE:PROCESS1:ORGANISME_1] +IndividualName=Abel Pau +PositionName=Tècnic SIG +OrganisationName=Students and educational institutions + +[QUALITY:LINEAGE] +processes=1 + +[EXTENT] +toler_env=0 +MinX=335.318744053333 +MaxX=1224.16365366323 +MinY=390.371075166458 +MaxY=856.814462416696 + +[OVERVIEW] +CreationDate=20230628 16204986+0200 + +[TAULA_PRINCIPAL] +IdGrafic=ID_GRAFIC +TipusRelacio=RELACIO_1_N_DICC + +[TAULA_PRINCIPAL:ID_GRAFIC] +descriptor=Identificador Gràfic intern +visible=0 +TractamentVariable=Ordinal + +[TAULA_PRINCIPAL:N_VERTEXS] +descriptor=Nombre de vèrtexs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:PERIMETRE] +descriptor=Perímetre del polígon + +[TAULA_PRINCIPAL:AREA] +descriptor=Àrea del polígon + +[TAULA_PRINCIPAL:N_ARCS] +descriptor=Nombre d'arcs +visible=0 +MostrarUnitats=0 + +[TAULA_PRINCIPAL:N_POLIG] +descriptor=Nombre de polígons elementals +visible=0 +MostrarUnitats=0 + +[GEOMETRIA_I_TOPOLOGIA] +NomCampNVertexs=N_VERTEXS +NomCampPerimetre=PERIMETRE +NomCampArea=AREA +NomCampNArcs=N_ARCS +NomCampNPoligons=N_POLIG + +[TAULA_PRINCIPAL:ATT1] +descriptor=atribute1 + +[TAULA_PRINCIPAL:ATT2] +descriptor=atribute2 diff --git a/autotest/ogr/ogr_miramon_vector.py b/autotest/ogr/ogr_miramon_vector.py index 19562018b21c..e7a2bd497b4a 100644 --- a/autotest/ogr/ogr_miramon_vector.py +++ b/autotest/ogr/ogr_miramon_vector.py @@ -331,6 +331,18 @@ def test_ogr_miramon_read_simple_polygon(): check_simple_polygon(ds) +# testing a polygon where the reference to arc has no extension +# the result has to be the same than if it has extension +def test_ogr_miramon_read_simple_polygon_no_ext(): + + ds = gdal.OpenEx( + "data/miramon/Polygons/SimplePolygonsCycleNoExt/SimplePolFile.pol", + gdal.OF_VECTOR, + ) + assert ds is not None, "Failed to get dataset" + check_simple_polygon(ds) + + def test_ogr_miramon_write_simple_polygon_EmptyVersion(tmp_vsimem): out_filename = str(tmp_vsimem / "out.pol") @@ -850,6 +862,14 @@ def test_ogr_miramon_OpenLanguageArc(Language, expected_description): ("data/miramon/CorruptedFiles/NoArcRel/SimpleArcFile.arc", "rel must exist"), ("data/miramon/CorruptedFiles/NoPolRel/SimplePolFile.pol", "rel must exist"), ("data/miramon/CorruptedFiles/BadCycle/SimplePolFile.pol", "Cannot open file"), + ( + "data/miramon/CorruptedFiles/InexistentCycle1/SimplePolFile.pol", + "Cannot open file", + ), + ( + "data/miramon/CorruptedFiles/InexistentCycle2/SimplePolFile.pol", + "Error reading the ARC file in the metadata file", + ), ], ) def test_ogr_miramon_corrupted_files(name, message): @@ -885,7 +905,7 @@ def test_ogr_miramon_corrupted_files(name, message): ), ], ) -def test_ogr_miramon_corrupted_features_point(name, message): +def test_ogr_miramon_corrupted_features(name, message): ds = gdal.OpenEx( name, @@ -1367,3 +1387,25 @@ def test_ogr_miramon_write_basic_multigeometry(tmp_path): ) ds = None + + +def test_ogr_miramon_create_field_after_feature(tmp_path): + + filename = str(tmp_path / "DataSetMULTIPOINT") + ds = ogr.GetDriverByName("MiramonVector").CreateDataSource(filename) + srs = osr.SpatialReference() + srs.ImportFromEPSG(32631) + lyr = ds.CreateLayer("test", srs=srs, geom_type=ogr.wkbUnknown) + create_common_attributes(lyr) + f = ogr.Feature(lyr.GetLayerDefn()) + assign_common_attributes(f) + + f.SetGeometry(ogr.CreateGeometryFromWkt("MULTIPOINT (0 0, 1 0)")) + lyr.CreateFeature(f) + + # MiraMon doesn't allow that + with pytest.raises( + Exception, + match="Cannot create fields to a layer with already existing features in it", + ): + create_common_attributes(lyr) From 5d257329962efe86d9a734ba8f653ad454144d49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=A9=8D=E4=B8=B9=E5=B0=BC=20Dan=20Jacobson?= <jidanni@jidanni.org> Date: Fri, 26 Apr 2024 17:59:06 +0800 Subject: [PATCH 199/230] Update csv.rst geography field -> geometry field --- doc/source/drivers/vector/csv.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/drivers/vector/csv.rst b/doc/source/drivers/vector/csv.rst index ec4ae49abd36..919069147dfa 100644 --- a/doc/source/drivers/vector/csv.rst +++ b/doc/source/drivers/vector/csv.rst @@ -517,7 +517,7 @@ Examples ogr2ogr -f CSV output.csv input.shp -lco GEOMETRY=AS_XYZ - This example shows using ogr2ogr to transform a shapefile into a .csv - file with geography field formatted using GeoJSON format. + file with geometry field formatted using GeoJSON format. :: From e15ffa00bfca3874e91bfd35b7d902f0c5cf5a52 Mon Sep 17 00:00:00 2001 From: Alessandro Pasotti <elpaso@itopen.it> Date: Fri, 26 Apr 2024 14:24:22 +0200 Subject: [PATCH 200/230] typo in gdaltindex.rst --- doc/source/programs/gdaltindex.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/programs/gdaltindex.rst b/doc/source/programs/gdaltindex.rst index 15cd905f4188..6dd1668c30d2 100644 --- a/doc/source/programs/gdaltindex.rst +++ b/doc/source/programs/gdaltindex.rst @@ -145,7 +145,7 @@ tileindex, or as input for the :ref:`GTI <raster.gti>` driver. .. option:: <file_or_dir> The input GDAL raster files, can be multiple files separated by spaces. - Wildcards my also be used. Stores the file locations in the same style as + Wildcards may also be used. Stores the file locations in the same style as specified here, unless :option:`-write_absolute_path` option is also used. Starting with GDAL 3.9, this can also be a directory name. :option:`-recursive` From 3b69bce2524ae22e9bc7c515d919ee0590fb5b63 Mon Sep 17 00:00:00 2001 From: AbelPau <a.pau@creaf.uab.cat> Date: Fri, 26 Apr 2024 14:51:27 +0200 Subject: [PATCH 201/230] MIraMonVector Fix. Sometimes REL files contain more than 1024 charactes lines. I set the ajustment to 10000 to make more dificult to find this limit. --- ogr/ogrsf_frmts/miramon/mm_wrlayr.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ogr/ogrsf_frmts/miramon/mm_wrlayr.c b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c index 2b22d60d6437..ae1101dd3aec 100644 --- a/ogr/ogrsf_frmts/miramon/mm_wrlayr.c +++ b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c @@ -5133,7 +5133,7 @@ char *MMReturnValueFromSectionINIFile(const char *filename, const char *section, { char *value = nullptr; #ifndef GDAL_COMPILATION - char line[1024]; + char line[10000]; #endif const char *pszLine; char *section_header = nullptr; @@ -5155,7 +5155,7 @@ char *MMReturnValueFromSectionINIFile(const char *filename, const char *section, { pszLine = line; #else - while ((pszLine = CPLReadLine2L(file, 1024, nullptr)) != nullptr) + while ((pszLine = CPLReadLine2L(file, 10000, nullptr)) != nullptr) { #endif char *pszString = From 78066156a1659c4fdb7abb47ff02f338547aca7b Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 26 Apr 2024 14:54:45 +0200 Subject: [PATCH 202/230] Miramon: avoid Unsigned-integer-overflow in MMCreateExtendedDBFIndex() Validate that FirstRecordOffset as computed in MM_ReadExtendedDBFHeaderFromFile() is not negative. Otherwise it gets later passed to MMCreateExtendedDBFIndex() which casts it to a uint64_t, and thus lead to unsigned integer overflow when doing: ``` fseek_function(f, (MM_FILE_OFFSET)offset_1era + (MM_FILE_OFFSET)bytes_acumulats_id_grafic, SEEK_SET); ``` Fixes https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=68303 --- ogr/ogrsf_frmts/miramon/mm_gdal_functions.c | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c index 0edb01c2ed1a..a7f52941ccf6 100644 --- a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c @@ -1123,7 +1123,7 @@ int MM_ReadExtendedDBFHeaderFromFile(const char *szFileName, FILE_TYPE *pf; unsigned short int two_bytes; MM_EXT_DBF_N_FIELDS nIField; - MM_FIRST_RECORD_OFFSET_TYPE offset_primera_fitxa; + uint16_t offset_primera_fitxa; MM_FIRST_RECORD_OFFSET_TYPE offset_fals = 0; MM_BOOLEAN incoherent_record_size = FALSE; MM_BYTE un_byte; @@ -1268,9 +1268,17 @@ int MM_ReadExtendedDBFHeaderFromFile(const char *szFileName, memcpy(&FirstRecordOffsetLow16Bits, &offset_primera_fitxa, 2); memcpy(&FirstRecordOffsetHigh16Bits, &pMMBDXP->reserved_2, 2); - pMMBDXP->FirstRecordOffset = - ((GUInt32)FirstRecordOffsetHigh16Bits << 16) | - FirstRecordOffsetLow16Bits; + GUInt32 nTmp = ((GUInt32)FirstRecordOffsetHigh16Bits << 16) | + FirstRecordOffsetLow16Bits; + if (nTmp > INT32_MAX) + { + free_function(pMMBDXP->pField); + pMMBDXP->pField = nullptr; + pMMBDXP->nFields = 0; + fclose_and_nullify(&pMMBDXP->pfDataBase); + return 1; + } + pMMBDXP->FirstRecordOffset = (MM_FIRST_RECORD_OFFSET_TYPE)nTmp; if (some_problems_when_reading > 0) offset_fals = pMMBDXP->FirstRecordOffset; From 96d59ecd7f501d1d38d1f43aa0f7c73dcfa67a19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=A9=8D=E4=B8=B9=E5=B0=BC=20Dan=20Jacobson?= <jidanni@jidanni.org> Date: Fri, 26 Apr 2024 17:21:32 +0800 Subject: [PATCH 203/230] Update ogr2ogr.rst to show how to extract _only_ the geometry --- doc/source/programs/ogr2ogr.rst | 102 +++++++++++++++++++++++--------- 1 file changed, 73 insertions(+), 29 deletions(-) diff --git a/doc/source/programs/ogr2ogr.rst b/doc/source/programs/ogr2ogr.rst index c1925fa2e756..47cefa27ae88 100644 --- a/doc/source/programs/ogr2ogr.rst +++ b/doc/source/programs/ogr2ogr.rst @@ -658,57 +658,101 @@ This utility is also callable from C with :cpp:func:`GDALVectorTranslate`. Examples -------- -Basic conversion from Shapefile to GeoPackage: +* Basic conversion from Shapefile to GeoPackage: -.. code-block:: bash + .. code-block:: bash - ogr2ogr output.gpkg input.shp + ogr2ogr output.gpkg input.shp -Change the coordinate reference system from ``EPSG:4326`` to ``EPSG:3857``: +* Change the coordinate reference system from ``EPSG:4326`` to ``EPSG:3857``: -.. code-block:: bash + .. code-block:: bash - ogr2ogr -s_srs EPSG:4326 -t_srs EPSG:3857 output.gpkg input.gpkg + ogr2ogr -s_srs EPSG:4326 -t_srs EPSG:3857 output.gpkg input.gpkg -Example appending to an existing layer: +* Example appending to an existing layer: -.. code-block:: bash + .. code-block:: bash - ogr2ogr -append -f PostgreSQL PG:dbname=warmerda abc.tab + ogr2ogr -append -f PostgreSQL PG:dbname=warmerda abc.tab -Clip input layer with a bounding box (<xmin> <ymin> <xmax> <ymax>): +* Clip input layer with a bounding box (<xmin> <ymin> <xmax> <ymax>): -.. code-block:: bash + .. code-block:: bash - ogr2ogr -spat -13.931 34.886 46.23 74.12 output.gpkg natural_earth_vector.gpkg + ogr2ogr -spat -13.931 34.886 46.23 74.12 output.gpkg natural_earth_vector.gpkg -Filter Features by a ``-where`` clause: +* Filter Features by a ``-where`` clause: -.. code-block:: bash + .. code-block:: bash - ogr2ogr -where "\"POP_EST\" < 1000000" \ - output.gpkg natural_earth_vector.gpkg ne_10m_admin_0_countries + ogr2ogr -where "\"POP_EST\" < 1000000" \ + output.gpkg natural_earth_vector.gpkg ne_10m_admin_0_countries -Example reprojecting from ETRS_1989_LAEA_52N_10E to EPSG:4326 and clipping to a bounding box: +More examples are given in the individual format pages. -.. code-block:: bash +Advanced examples +----------------- - ogr2ogr -wrapdateline -t_srs EPSG:4326 -clipdst -5 40 15 55 france_4326.shp europe_laea.shp +* Reprojecting from ETRS_1989_LAEA_52N_10E to EPSG:4326 and clipping to a bounding box: -Example for using the ``-fieldmap`` setting. The first field of the source layer is -used to fill the third field (index 2 = third field) of the target layer, the -second field of the source layer is ignored, the third field of the source -layer used to fill the fifth field of the target layer. + .. code-block:: bash -.. code-block:: bash + ogr2ogr -wrapdateline -t_srs EPSG:4326 -clipdst -5 40 15 55 france_4326.shp europe_laea.shp - ogr2ogr -append -fieldmap 2,-1,4 dst.shp src.shp +* Using the ``-fieldmap`` setting. The first field of the source layer is + used to fill the third field (index 2 = third field) of the target layer, the + second field of the source layer is ignored, the third field of the source + layer used to fill the fifth field of the target layer. -Note that not all formats preserve geometries on layer creation by default. E.g., here we need ``-lco``: + .. code-block:: bash -.. code-block:: bash + ogr2ogr -append -fieldmap 2,-1,4 dst.shp src.shp - ogr2ogr -lco GEOMETRY=AS_XYZ TrackWaypoint.csv TrackWaypoint.kml +* Outputing geometries with the CSV driver. -More examples are given in the individual format pages. + By default, this driver does not preserve geometries on layer creation by + default. An explicit layer creation option is needed: + + .. code-block:: bash + + ogr2ogr -lco GEOMETRY=AS_XYZ TrackWaypoint.csv TrackWaypoint.kml + +* Extracting only geometries. + + There are different situations, depending if the input layer has a named geometry + column, or not. First check, with ogrinfo if there is a reported geometry column. + + .. code-block:: bash + + ogrinfo -so CadNSDI.gdb.zip PLSSPoint | grep 'Geometry Column' + Geometry Column = SHAPE + + In that situation where the input format is a FileGeodatabase, it is called SHAPE + and can thus be referenced directly in a SELECT statement. + + .. code-block:: bash + + ogr2ogr -sql "SELECT SHAPE FROM PLSSPoint LIMIT 2" \ + -lco GEOMETRY=AS_XY -f CSV /vsistdout/ CadNSDI.gdb.zip + + For a shapefile with a unamed geometry column, ``_ogr_geometry_`` can be used as + a special name to designate the implicit geometry column, when using the default + :ref:`OGR SQL <ogr_sql_dialect>` dialect. The name begins with + an underscore and SQL syntax requires that it must appear between double quotes. + In addition the command line interpreter may require that double quotes are + escaped and the final SELECT statement could look like: + + .. code-block:: bash + + ogr2ogr -sql "SELECT \"_ogr_geometry_\" FROM PLSSPoint LIMIT 2" \ + -lco GEOMETRY=AS_XY -f CSV /vsistdout/ CadNSDI.shp + + If using the :ref:`SQL SQLite <sql_sqlite_dialect>` dialect, the special geometry + name is ``geometry`` when the source geometry column has no name. + + .. code-block:: bash + + ogr2ogr -sql "SELECT geometry FROM PLSSPoint LIMIT 2" -dialect SQLite \ + -lco GEOMETRY=AS_XY -f CSV /vsistdout/ CadNSDI.shp From 036f813e380ddcc9eb8d35a8069253b540a001f2 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 26 Apr 2024 19:55:21 +0200 Subject: [PATCH 204/230] docker/ubuntu-full/bh-proj.sh: shellcheck fix --- docker/ubuntu-full/bh-proj.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/ubuntu-full/bh-proj.sh b/docker/ubuntu-full/bh-proj.sh index a0216409d2e3..ea82edf6cddc 100755 --- a/docker/ubuntu-full/bh-proj.sh +++ b/docker/ubuntu-full/bh-proj.sh @@ -100,6 +100,6 @@ fi apt-get update -y DEBIAN_FRONTEND=noninteractive apt-get install -y patchelf patchelf --set-soname libinternalproj.so.${PROJ_SO_FIRST} ${DESTDIR}${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO} -for i in ${DESTDIR}${PROJ_INSTALL_PREFIX}/bin/*; do +for i in "${DESTDIR}${PROJ_INSTALL_PREFIX}/bin"/*; do patchelf --replace-needed libproj.so.${PROJ_SO_FIRST} libinternalproj.so.${PROJ_SO_FIRST} $i; done From 9932ea45a58e72ffade842bf5415d0c6f774414e Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Fri, 26 Apr 2024 19:49:36 +0200 Subject: [PATCH 205/230] Doc: updates to support building with upcoming update proj-docs Docker image updated to Ubuntu 24.04 I've tested locally those changes with a local build of the updated proj-docs Docker image per https://github.com/OSGeo/PROJ/pull/4128 --- .github/workflows/doc_build.yml | 10 +++++++++- doc/source/conf.py | 18 +++++++++++++++++- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/.github/workflows/doc_build.yml b/.github/workflows/doc_build.yml index c46988153e6b..250730b65364 100644 --- a/.github/workflows/doc_build.yml +++ b/.github/workflows/doc_build.yml @@ -30,7 +30,7 @@ jobs: apt update apt install -y libproj-dev swig python3 -m pip install -r doc/requirements.txt - python3 -m pip install numpy + python3 -m pip install numpy setuptools pushd . mkdir build cd build @@ -43,6 +43,14 @@ jobs: -DOGR_BUILD_OPTIONAL_DRIVERS=OFF cmake --build . -j$(nproc) cmake --install . + # With the new ghcr.io/osgeo/proj-docs image based on Ubuntu 24.04 + # a venv is activated. The above does not install the + # Python bindings into it (and the ones in the system are not found + # without overriding PYTHONPATH), so do it through pip install + cd swig/python + python3 setup.py sdist + cp dist/* /tmp/gdal.tar.gz + python3 -m pip install /tmp/gdal.tar.gz ldconfig popd diff --git a/doc/source/conf.py b/doc/source/conf.py index 7b01a6d3954c..98b497994f04 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -429,6 +429,20 @@ \fi """ +# Package substitutefont no longer exists since TeXLive 2023 later than August 2023 +# and has been replaced with sphinxpackagesubstitutefont +# https://github.com/jfbu/sphinx/commit/04cbd819b0e285d058549b2173af7efadf1cd020 +import sphinx + +if os.path.exists( + os.path.join( + os.path.dirname(sphinx.__file__), "texinputs", "sphinxpackagesubstitutefont.sty" + ) +): + substitutefont_package = "sphinxpackagesubstitutefont" +else: + substitutefont_package = "substitutefont" + latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', @@ -436,7 +450,9 @@ #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. "preamble": preamble, - "inputenc": "\\usepackage[utf8]{inputenc}\n\\usepackage{CJKutf8}\n\\usepackage{substitutefont}", + "inputenc": "\\usepackage[utf8]{inputenc}\n\\usepackage{CJKutf8}\n\\usepackage{" + + substitutefont_package + + "}", "babel": "\\usepackage[russian,main=english]{babel}\n\\selectlanguage{english}", "fontenc": "\\usepackage[LGR,X2,T1]{fontenc}" # Latex figure (float) alignment From 9f806aaac4571638da6302bbe8fab9d179db716f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 27 Apr 2024 12:59:08 +0200 Subject: [PATCH 206/230] Internal libtiff: EvaluateIFDdatasizeReading(): avoid unsigned integer overflow (master only) Fixes https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=68327 --- frmts/gtiff/libtiff/tif_dirread.c | 28 +++++++--------------------- 1 file changed, 7 insertions(+), 21 deletions(-) diff --git a/frmts/gtiff/libtiff/tif_dirread.c b/frmts/gtiff/libtiff/tif_dirread.c index 390a9104d299..4305a512d1d0 100644 --- a/frmts/gtiff/libtiff/tif_dirread.c +++ b/frmts/gtiff/libtiff/tif_dirread.c @@ -4083,7 +4083,7 @@ static int ByteCountLooksBad(TIFF *tif) */ static bool EvaluateIFDdatasizeReading(TIFF *tif, TIFFDirEntry *dp) { - const int data_width = TIFFDataWidth(dp->tdir_type); + const uint64_t data_width = TIFFDataWidth(dp->tdir_type); if (data_width != 0 && dp->tdir_count > UINT64_MAX / data_width) { TIFFErrorExtR(tif, "EvaluateIFDdatasizeReading", @@ -4093,6 +4093,12 @@ static bool EvaluateIFDdatasizeReading(TIFF *tif, TIFFDirEntry *dp) const uint64_t datalength = dp->tdir_count * data_width; if (datalength > ((tif->tif_flags & TIFF_BIGTIFF) ? 0x8U : 0x4U)) { + if (tif->tif_dir.td_dirdatasize_read > UINT64_MAX - datalength) + { + TIFFErrorExtR(tif, "EvaluateIFDdatasizeReading", + "Too large IFD data size"); + return false; + } tif->tif_dir.td_dirdatasize_read += datalength; if (!(tif->tif_flags & TIFF_BIGTIFF)) { @@ -4518,9 +4524,7 @@ int TIFFReadDirectory(TIFF *tif) enum TIFFReadDirEntryErr err; err = TIFFReadDirEntryShort(tif, dp, &value); if (!EvaluateIFDdatasizeReading(tif, dp)) - { goto bad; - } if (err == TIFFReadDirEntryErrCount) err = TIFFReadDirEntryPersampleShort(tif, dp, &value); @@ -4552,9 +4556,7 @@ int TIFFReadDirectory(TIFF *tif) else err = TIFFReadDirEntryDoubleArray(tif, dp, &data); if (!EvaluateIFDdatasizeReading(tif, dp)) - { goto bad; - } if (err != TIFFReadDirEntryErrOk) { fip = TIFFFieldWithTag(tif, dp->tdir_tag); @@ -4598,9 +4600,7 @@ int TIFFReadDirectory(TIFF *tif) _TIFFmemcpy(&(tif->tif_dir.td_stripoffset_entry), dp, sizeof(TIFFDirEntry)); if (!EvaluateIFDdatasizeReading(tif, dp)) - { goto bad; - } } break; case TIFFTAG_STRIPBYTECOUNTS: @@ -4629,9 +4629,7 @@ int TIFFReadDirectory(TIFF *tif) _TIFFmemcpy(&(tif->tif_dir.td_stripbytecount_entry), dp, sizeof(TIFFDirEntry)); if (!EvaluateIFDdatasizeReading(tif, dp)) - { goto bad; - } } break; case TIFFTAG_COLORMAP: @@ -4688,9 +4686,7 @@ int TIFFReadDirectory(TIFF *tif) else err = TIFFReadDirEntryShortArray(tif, dp, &value); if (!EvaluateIFDdatasizeReading(tif, dp)) - { goto bad; - } if (err != TIFFReadDirEntryErrOk) { fip = TIFFFieldWithTag(tif, dp->tdir_tag); @@ -6423,9 +6419,7 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) if (err == TIFFReadDirEntryErrOk) { if (!EvaluateIFDdatasizeReading(tif, dp)) - { return 0; - } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } @@ -6440,9 +6434,7 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) if (err == TIFFReadDirEntryErrOk) { if (!EvaluateIFDdatasizeReading(tif, dp)) - { return 0; - } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } @@ -6457,9 +6449,7 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) if (err == TIFFReadDirEntryErrOk) { if (!EvaluateIFDdatasizeReading(tif, dp)) - { return 0; - } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } @@ -6474,9 +6464,7 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) if (err == TIFFReadDirEntryErrOk) { if (!EvaluateIFDdatasizeReading(tif, dp)) - { return 0; - } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } @@ -6491,9 +6479,7 @@ static int TIFFFetchNormalTag(TIFF *tif, TIFFDirEntry *dp, int recover) if (err == TIFFReadDirEntryErrOk) { if (!EvaluateIFDdatasizeReading(tif, dp)) - { return 0; - } if (!TIFFSetField(tif, dp->tdir_tag, data)) return (0); } From 069d82c632b5bd092184701557208840bd7f3629 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 27 Apr 2024 15:33:36 +0200 Subject: [PATCH 207/230] Arrow/Parquet: fix writing empty point Z with GEOMETRY_ENCODING=GEOARROW_INTERLEAVED, and test spatial filtering of that encoding Tests scenario of https://github.com/qgis/QGIS/issues/57228 --- autotest/ogr/ogr_parquet.py | 131 ++++++++++++++++++ .../arrow_common/ograrrowlayer.hpp | 2 +- .../arrow_common/ograrrowwriterlayer.hpp | 22 +-- 3 files changed, 146 insertions(+), 9 deletions(-) diff --git a/autotest/ogr/ogr_parquet.py b/autotest/ogr/ogr_parquet.py index ebfb3ca3b889..129cc2beed51 100755 --- a/autotest/ogr/ogr_parquet.py +++ b/autotest/ogr/ogr_parquet.py @@ -3680,6 +3680,137 @@ def check(lyr): assert lyr.GetFeatureCount() != 0 +############################################################################### +# Check GeoArrow fixed size list / interleaved encoding + + +@pytest.mark.parametrize( + "wkt", + [ + "POINT (1 2)", + "POINT Z (1 2 3)", + "LINESTRING (1 2,3 4)", + "LINESTRING Z (1 2 3,4 5 6)", + "POLYGON ((0 1,2 3,10 20,0 1))", + "POLYGON ((0 0,0 10,10 10,10 0,0 0),(1 1,1 9,9 9,9 1,1 1))", + "POLYGON Z ((0 1 10,2 3 20,10 20 30,0 1 10))", + "MULTIPOINT ((1 2),(3 4))", + "MULTIPOINT Z ((1 2 3),(4 5 6))", + "MULTILINESTRING ((1 2,3 4),(5 6,7 8,9 10))", + "MULTILINESTRING Z ((1 2 3,4 5 6),(7 8 9,10 11 12,13 14 15))", + "MULTIPOLYGON (((0 1,2 3,10 20,0 1)),((100 110,100 120,120 120,100 110)))", + "MULTIPOLYGON (((0 0,0 10,10 10,10 0,0 0),(1 1,1 9,9 9,9 1,1 1)),((100 110,100 120,120 120,100 110)))", + "MULTIPOLYGON Z (((0 1 10,2 3 20,10 20 30,0 1 10)))", + ], +) +@pytest.mark.parametrize("covering_bbox", [True, False]) +@gdaltest.enable_exceptions() +def test_ogr_parquet_geoarrow_fixed_size_list(tmp_vsimem, wkt, covering_bbox): + + geom = ogr.CreateGeometryFromWkt(wkt) + + filename = str(tmp_vsimem / "test_ogr_parquet_geoarrow_fixed_size_list.parquet") + + ds = ogr.GetDriverByName("Parquet").CreateDataSource(filename) + + lyr = ds.CreateLayer( + "test", + geom_type=geom.GetGeometryType(), + options=[ + "GEOMETRY_ENCODING=GEOARROW_INTERLEAVED", + "WRITE_COVERING_BBOX=" + ("YES" if covering_bbox else "NO"), + ], + ) + lyr.CreateField(ogr.FieldDefn("foo")) + + # Nominal geometry + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometry(geom) + lyr.CreateFeature(f) + + # Null geometry + f = ogr.Feature(lyr.GetLayerDefn()) + lyr.CreateFeature(f) + + # Empty geometry + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometry(ogr.Geometry(geom.GetGeometryType())) + lyr.CreateFeature(f) + + # Nominal geometry + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometry(geom) + lyr.CreateFeature(f) + + geom2 = None + if geom.GetGeometryCount() > 1: + geom2 = geom.Clone() + geom2.RemoveGeometry(1) + f = ogr.Feature(lyr.GetLayerDefn()) + f.SetGeometry(geom2) + lyr.CreateFeature(f) + + ds = None + + def check(lyr): + assert lyr.GetGeomType() == geom.GetGeometryType() + + f = lyr.GetNextFeature() + ogrtest.check_feature_geometry(f, geom) + + f = lyr.GetNextFeature() + if geom.GetGeometryType() in (ogr.wkbPoint, ogr.wkbPoint25D): + assert f.GetGeometryRef().IsEmpty() + else: + assert f.GetGeometryRef() is None + + f = lyr.GetNextFeature() + ogrtest.check_feature_geometry(f, ogr.Geometry(geom.GetGeometryType())) + + f = lyr.GetNextFeature() + ogrtest.check_feature_geometry(f, geom) + + if geom2: + f = lyr.GetNextFeature() + ogrtest.check_feature_geometry(f, geom2) + + ds = ogr.Open(filename) + lyr = ds.GetLayer(0) + check(lyr) + + # Check that ignoring attribute fields doesn't impact geometry reading + ds = ogr.Open(filename) + lyr = ds.GetLayer(0) + lyr.SetIgnoredFields(["foo"]) + check(lyr) + + ds = ogr.Open(filename) + lyr = ds.GetLayer(0) + minx, maxx, miny, maxy = geom.GetEnvelope() + + lyr.SetSpatialFilter(geom) + assert lyr.GetFeatureCount() == (3 if geom.GetGeometryCount() > 1 else 2) + + lyr.SetSpatialFilterRect(maxx + 1, miny, maxx + 2, maxy) + assert lyr.GetFeatureCount() == 0 + + lyr.SetSpatialFilterRect(minx, maxy + 1, maxx, maxy + 2) + assert lyr.GetFeatureCount() == 0 + + lyr.SetSpatialFilterRect(minx - 2, miny, minx - 1, maxy) + assert lyr.GetFeatureCount() == 0 + + lyr.SetSpatialFilterRect(minx, miny - 2, maxx, miny - 1) + assert lyr.GetFeatureCount() == 0 + if ( + minx != miny + and maxx != maxy + and ogr.GT_Flatten(geom.GetGeometryType()) != ogr.wkbMultiPoint + ): + lyr.SetSpatialFilterRect(minx + 0.1, miny + 0.1, maxx - 0.1, maxy - 0.1) + assert lyr.GetFeatureCount() != 0 + + ############################################################################### # Test reading a file with an extension on a regular field not registered with # PyArrow diff --git a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp index b7a4fc847f09..d1ee6ec52263 100644 --- a/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp +++ b/ogr/ogrsf_frmts/arrow_common/ograrrowlayer.hpp @@ -833,7 +833,7 @@ static bool IsPointType(const std::shared_ptr<arrow::DataType> &type, bHasZOut = false; bHasMOut = true; } - else if (osValueFieldName == "xyz") + else /* if (osValueFieldName == "xyz" || osValueFieldName == "element") */ { bHasMOut = false; bHasZOut = true; diff --git a/ogr/ogrsf_frmts/arrow_common/ograrrowwriterlayer.hpp b/ogr/ogrsf_frmts/arrow_common/ograrrowwriterlayer.hpp index 39c429fe523c..17a8c0b3f6f2 100644 --- a/ogr/ogrsf_frmts/arrow_common/ograrrowwriterlayer.hpp +++ b/ogr/ogrsf_frmts/arrow_common/ograrrowwriterlayer.hpp @@ -1261,10 +1261,10 @@ inline OGRErr OGRArrowWriterLayer::BuildGeometry(OGRGeometry *poGeom, std::numeric_limits<double>::quiet_NaN())); OGR_ARROW_RETURN_OGRERR_NOT_OK(poValueBuilder->Append( std::numeric_limits<double>::quiet_NaN())); - if (OGR_GT_HasZ(eGType)) + if (bHasZ) OGR_ARROW_RETURN_OGRERR_NOT_OK(poValueBuilder->Append( std::numeric_limits<double>::quiet_NaN())); - if (OGR_GT_HasM(eGType)) + if (bHasM) OGR_ARROW_RETURN_OGRERR_NOT_OK(poValueBuilder->Append( std::numeric_limits<double>::quiet_NaN())); } @@ -1364,6 +1364,12 @@ inline OGRErr OGRArrowWriterLayer::BuildGeometry(OGRGeometry *poGeom, std::numeric_limits<double>::quiet_NaN())); OGR_ARROW_RETURN_OGRERR_NOT_OK(poValueBuilder->Append( std::numeric_limits<double>::quiet_NaN())); + if (bHasZ) + OGR_ARROW_RETURN_OGRERR_NOT_OK(poValueBuilder->Append( + std::numeric_limits<double>::quiet_NaN())); + if (bHasM) + OGR_ARROW_RETURN_OGRERR_NOT_OK(poValueBuilder->Append( + std::numeric_limits<double>::quiet_NaN())); } else { @@ -1371,13 +1377,13 @@ inline OGRErr OGRArrowWriterLayer::BuildGeometry(OGRGeometry *poGeom, poValueBuilder->Append(poPoint->getX())); OGR_ARROW_RETURN_OGRERR_NOT_OK( poValueBuilder->Append(poPoint->getY())); + if (bHasZ) + OGR_ARROW_RETURN_OGRERR_NOT_OK( + poValueBuilder->Append(poPoint->getZ())); + if (bHasM) + OGR_ARROW_RETURN_OGRERR_NOT_OK( + poValueBuilder->Append(poPoint->getM())); } - if (bHasZ) - OGR_ARROW_RETURN_OGRERR_NOT_OK( - poValueBuilder->Append(poPoint->getZ())); - if (bHasM) - OGR_ARROW_RETURN_OGRERR_NOT_OK( - poValueBuilder->Append(poPoint->getM())); break; } From 4863911555a7621e8e5ceb85fa106ff1838e914e Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 27 Apr 2024 16:42:58 +0200 Subject: [PATCH 208/230] CI: fix Conda platform names for latest GHA MacOS name changes Cf https://github.com/actions/runner-images/blob/main/README.md macOS 14 macos-latest-large or macos-14-large macOS 14 Arm64 macos-latest, macos-14, macos-latest-xlarge or macos-14-xlarge --- .github/workflows/conda.yml | 4 +++- ci/travis/conda/compile.sh | 10 ++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index 78b3bfc9c9a6..26b2f3beb176 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -28,7 +28,9 @@ jobs: strategy: fail-fast: true matrix: - platform: ['ubuntu-latest','windows-latest','macos-latest','macos-14'] + # macos-latest: arm64 + # macos-latest-large: x86_64 + platform: ['ubuntu-latest','windows-latest','macos-latest','macos-latest-large'] env: GHA_CI_PLATFORM: ${{ matrix.platform }} diff --git a/ci/travis/conda/compile.sh b/ci/travis/conda/compile.sh index dcb73e80a665..6baa58ceeb21 100755 --- a/ci/travis/conda/compile.sh +++ b/ci/travis/conda/compile.sh @@ -13,12 +13,14 @@ if grep -q "ubuntu" <<< "$GHA_CI_PLATFORM"; then ARCH="64" fi -if grep -q "macos-14" <<< "$GHA_CI_PLATFORM"; then - CONDA_PLAT="osx" - ARCH="arm64" -elif grep -q "macos" <<< "$GHA_CI_PLATFORM"; then +# macos-latest: arm64 +# macos-latest-large: x86_64 +if grep -q "macos-latest-large" <<< "$GHA_CI_PLATFORM"; then CONDA_PLAT="osx" ARCH="64" +elif grep -q "macos-latest" <<< "$GHA_CI_PLATFORM"; then + CONDA_PLAT="osx" + ARCH="arm64" fi conda build recipe --clobber-file recipe/recipe_clobber.yaml --output-folder packages -m ".ci_support/${CONDA_PLAT}_${ARCH}_.yaml" From 0875a5a18c76f661432cd721ca288c0cb4f17b85 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 27 Apr 2024 17:06:26 +0200 Subject: [PATCH 209/230] Revert "CI: fix Conda platform names for latest GHA MacOS name changes" This reverts commit 4863911555a7621e8e5ceb85fa106ff1838e914e. --- .github/workflows/conda.yml | 4 +--- ci/travis/conda/compile.sh | 10 ++++------ 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index 26b2f3beb176..78b3bfc9c9a6 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -28,9 +28,7 @@ jobs: strategy: fail-fast: true matrix: - # macos-latest: arm64 - # macos-latest-large: x86_64 - platform: ['ubuntu-latest','windows-latest','macos-latest','macos-latest-large'] + platform: ['ubuntu-latest','windows-latest','macos-latest','macos-14'] env: GHA_CI_PLATFORM: ${{ matrix.platform }} diff --git a/ci/travis/conda/compile.sh b/ci/travis/conda/compile.sh index 6baa58ceeb21..dcb73e80a665 100755 --- a/ci/travis/conda/compile.sh +++ b/ci/travis/conda/compile.sh @@ -13,14 +13,12 @@ if grep -q "ubuntu" <<< "$GHA_CI_PLATFORM"; then ARCH="64" fi -# macos-latest: arm64 -# macos-latest-large: x86_64 -if grep -q "macos-latest-large" <<< "$GHA_CI_PLATFORM"; then - CONDA_PLAT="osx" - ARCH="64" -elif grep -q "macos-latest" <<< "$GHA_CI_PLATFORM"; then +if grep -q "macos-14" <<< "$GHA_CI_PLATFORM"; then CONDA_PLAT="osx" ARCH="arm64" +elif grep -q "macos" <<< "$GHA_CI_PLATFORM"; then + CONDA_PLAT="osx" + ARCH="64" fi conda build recipe --clobber-file recipe/recipe_clobber.yaml --output-folder packages -m ".ci_support/${CONDA_PLAT}_${ARCH}_.yaml" From c9982227e3244d7357ecd834aa105b7429645f42 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 27 Apr 2024 17:37:49 +0200 Subject: [PATCH 210/230] CI: conda.yml: change macos-latest to macos-13 --- .github/workflows/conda.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index 78b3bfc9c9a6..d64da3f0c831 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -28,7 +28,9 @@ jobs: strategy: fail-fast: true matrix: - platform: ['ubuntu-latest','windows-latest','macos-latest','macos-14'] + # macos-13: Intel + # macos-14: arm64 + platform: ['ubuntu-latest','windows-latest','macos-13','macos-14'] env: GHA_CI_PLATFORM: ${{ matrix.platform }} From 1e7d09e58b4d40a88ea9d932438a1c63d35d5718 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sat, 27 Apr 2024 18:01:00 +0200 Subject: [PATCH 211/230] doc_build.sh: fix 'deploy ssh key' step --- .github/workflows/doc_build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/doc_build.yml b/.github/workflows/doc_build.yml index 250730b65364..fe5a40cfa5c2 100644 --- a/.github/workflows/doc_build.yml +++ b/.github/workflows/doc_build.yml @@ -105,7 +105,7 @@ jobs: if: ${{ github.ref_name == 'master' && github.repository == 'OSGeo/gdal' }} shell: bash -l {0} run: | - mkdir /root/.ssh && echo "${{ secrets.SSH_KEY_DOCS }}" > /root/.ssh/id_rsa + mkdir -p /root/.ssh && echo "${{ secrets.SSH_KEY_DOCS }}" > /root/.ssh/id_rsa chmod 700 /root/.ssh && chmod 600 /root/.ssh/id_rsa ssh-keyscan -t rsa github.com >> /root/.ssh/known_hosts eval `ssh-agent -s` From 6c3f748f468a72acf50173997259e18e881da93d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=A9=8D=E4=B8=B9=E5=B0=BC=20Dan=20Jacobson?= <jidanni@jidanni.org> Date: Sun, 28 Apr 2024 17:13:47 +0800 Subject: [PATCH 212/230] Update gdaltransform.rst -gcp description (#9789) --- doc/source/programs/gdaltransform.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/programs/gdaltransform.rst b/doc/source/programs/gdaltransform.rst index 3c76070a138b..48ab76be5365 100644 --- a/doc/source/programs/gdaltransform.rst +++ b/doc/source/programs/gdaltransform.rst @@ -109,7 +109,7 @@ projection,including GCP-based transformations. .. option:: -gcp <pixel> <line> <easting> <northing> [<elevation>] - Provide a GCP to be used for transformation (generally three or more are required) + Provide a GCP to be used for transformation (generally three or more are required). Pixel and line need not be integers. .. option:: -output_xy From fbdcb690663db08ba8159d4033e02aeab0713a78 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 28 Apr 2024 11:16:57 +0200 Subject: [PATCH 213/230] ogr2ogr.rst: remove out of topic LIMIT 2 --- doc/source/programs/ogr2ogr.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/source/programs/ogr2ogr.rst b/doc/source/programs/ogr2ogr.rst index 47cefa27ae88..52f116478b9b 100644 --- a/doc/source/programs/ogr2ogr.rst +++ b/doc/source/programs/ogr2ogr.rst @@ -734,7 +734,7 @@ Advanced examples .. code-block:: bash - ogr2ogr -sql "SELECT SHAPE FROM PLSSPoint LIMIT 2" \ + ogr2ogr -sql "SELECT SHAPE FROM PLSSPoint" \ -lco GEOMETRY=AS_XY -f CSV /vsistdout/ CadNSDI.gdb.zip For a shapefile with a unamed geometry column, ``_ogr_geometry_`` can be used as @@ -746,7 +746,7 @@ Advanced examples .. code-block:: bash - ogr2ogr -sql "SELECT \"_ogr_geometry_\" FROM PLSSPoint LIMIT 2" \ + ogr2ogr -sql "SELECT \"_ogr_geometry_\" FROM PLSSPoint" \ -lco GEOMETRY=AS_XY -f CSV /vsistdout/ CadNSDI.shp If using the :ref:`SQL SQLite <sql_sqlite_dialect>` dialect, the special geometry @@ -754,5 +754,5 @@ Advanced examples .. code-block:: bash - ogr2ogr -sql "SELECT geometry FROM PLSSPoint LIMIT 2" -dialect SQLite \ + ogr2ogr -sql "SELECT geometry FROM PLSSPoint" -dialect SQLite \ -lco GEOMETRY=AS_XY -f CSV /vsistdout/ CadNSDI.shp From 84048bd5232275c119ef61c803ff08dd6d3190db Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 28 Apr 2024 16:48:42 +0200 Subject: [PATCH 214/230] =?UTF-8?q?typo=20fixes=C2=A0[ci=20skip]?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- doc/source/programs/ogr2ogr.rst | 4 ++-- scripts/typos_allowlist.txt | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/source/programs/ogr2ogr.rst b/doc/source/programs/ogr2ogr.rst index 52f116478b9b..0da1bfdc566a 100644 --- a/doc/source/programs/ogr2ogr.rst +++ b/doc/source/programs/ogr2ogr.rst @@ -710,7 +710,7 @@ Advanced examples ogr2ogr -append -fieldmap 2,-1,4 dst.shp src.shp -* Outputing geometries with the CSV driver. +* Outputting geometries with the CSV driver. By default, this driver does not preserve geometries on layer creation by default. An explicit layer creation option is needed: @@ -737,7 +737,7 @@ Advanced examples ogr2ogr -sql "SELECT SHAPE FROM PLSSPoint" \ -lco GEOMETRY=AS_XY -f CSV /vsistdout/ CadNSDI.gdb.zip - For a shapefile with a unamed geometry column, ``_ogr_geometry_`` can be used as + For a shapefile with a unnamed geometry column, ``_ogr_geometry_`` can be used as a special name to designate the implicit geometry column, when using the default :ref:`OGR SQL <ogr_sql_dialect>` dialect. The name begins with an underscore and SQL syntax requires that it must appear between double quotes. diff --git a/scripts/typos_allowlist.txt b/scripts/typos_allowlist.txt index be12d93cefc9..df2da55ec290 100644 --- a/scripts/typos_allowlist.txt +++ b/scripts/typos_allowlist.txt @@ -330,3 +330,4 @@ either 2 or 4 comma separated values. The same rules apply for the source and de assert f.GetField("PERIMETRE") == pytest.approx(680.544697, abs=1e-5) assert f["PERIMETRE"] == [3.414213562, 3.414213562] assert f["PERIMETRE"] == [32, 32] +# Package substitutefont no longer exists since TeXLive 2023 later than August 2023 From ca9ebd8b7a45429eaffff6b54f1edb93f1693aed Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 28 Apr 2024 17:08:54 +0200 Subject: [PATCH 215/230] gdalinfo and ogrinfo -json: add newline character at end of JSON output Refs https://github.com/qgis/QGIS/issues/57266 --- apps/gdalinfo_lib.cpp | 1 + apps/ogrinfo_lib.cpp | 1 + 2 files changed, 2 insertions(+) diff --git a/apps/gdalinfo_lib.cpp b/apps/gdalinfo_lib.cpp index b67eb9e1d29c..8aa00d02b6a7 100644 --- a/apps/gdalinfo_lib.cpp +++ b/apps/gdalinfo_lib.cpp @@ -1898,6 +1898,7 @@ char *GDALInfo(GDALDatasetH hDataset, const GDALInfoOptions *psOptions) #endif )); json_object_put(poJsonObject); + Concat(osStr, psOptions->bStdoutOutput, "\n"); } if (psOptionsToFree != nullptr) diff --git a/apps/ogrinfo_lib.cpp b/apps/ogrinfo_lib.cpp index 9aa19640505a..e287a7cb7ae7 100644 --- a/apps/ogrinfo_lib.cpp +++ b/apps/ogrinfo_lib.cpp @@ -2175,6 +2175,7 @@ char *GDALVectorInfo(GDALDatasetH hDataset, | JSON_C_TO_STRING_NOSLASHESCAPE #endif )); + ConcatStr(osRet, psOptions->bStdoutOutput, "\n"); } return VSI_STRDUP_VERBOSE(osRet); From bc0bf4483712c56853d2adff0a50fc301b338bab Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 28 Apr 2024 16:44:12 +0200 Subject: [PATCH 216/230] OGRFeature::SetField(): add warnings when detecting some lossy conversions (refs #9792) --- ogr/ogrfeature.cpp | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/ogr/ogrfeature.cpp b/ogr/ogrfeature.cpp index 9bf78fe7ed54..7f8fd5d588cf 100644 --- a/ogr/ogrfeature.cpp +++ b/ogr/ogrfeature.cpp @@ -3832,6 +3832,12 @@ void OGRFeature::SetField(int iField, GIntBig nValue) else if (eType == OFTReal) { pauFields[iField].Real = static_cast<double>(nValue); + if (static_cast<GIntBig>(pauFields[iField].Real) != nValue) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Lossy conversion occurred when trying to set " + "a real field from a 64 bit integer value."); + } } else if (eType == OFTIntegerList) { @@ -3988,12 +3994,34 @@ void OGRFeature::SetField(int iField, double dfValue) : dfValue > nMax ? nMax : static_cast<int>(dfValue); pauFields[iField].Integer = OGRFeatureGetIntegerValue(poFDefn, nVal); + if (!(nVal == dfValue)) + { + if (std::isnan(dfValue)) + pauFields[iField].Integer = nMin; + CPLError(CE_Warning, CPLE_AppDefined, + "Lossy conversion occurred when trying to set " + "32 bit integer field from a real value."); + } pauFields[iField].Set.nMarker2 = 0; pauFields[iField].Set.nMarker3 = 0; } else if (eType == OFTInteger64) { - pauFields[iField].Integer64 = static_cast<GIntBig>(dfValue); + const auto nMin = std::numeric_limits<GIntBig>::min(); + const auto nMax = std::numeric_limits<GIntBig>::max(); + const auto nVal = dfValue < static_cast<double>(nMin) ? nMin + : dfValue > static_cast<double>(nMax) + ? nMax + : static_cast<GIntBig>(dfValue); + pauFields[iField].Integer64 = nVal; + if (!(static_cast<double>(nVal) == dfValue)) + { + if (std::isnan(dfValue)) + pauFields[iField].Integer64 = nMin; + CPLError(CE_Warning, CPLE_AppDefined, + "Lossy conversion occurred when trying to set " + "64 bit integer field from a real value."); + } pauFields[iField].Set.nMarker3 = 0; } else if (eType == OFTRealList) From 9cc3ba227f6dbff72c376abce9c4f619cca3ce7f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 28 Apr 2024 16:45:06 +0200 Subject: [PATCH 217/230] OGRLayer::WriteArrowBatch(): add tolerance for field type mismatches if int32/int64/real Also add an option IF_FIELD_NOT_PRESERVED=ERROR to error out when lossy conversion occurs. Default behavior is to just emit a CE_Warning. Fixes #9792 --- autotest/ogr/ogr_mem.py | 100 ++++++++++++++ ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp | 160 +++++++++++++++++++++- 2 files changed, 254 insertions(+), 6 deletions(-) diff --git a/autotest/ogr/ogr_mem.py b/autotest/ogr/ogr_mem.py index 770ac0254e4a..a308eac2a1c9 100755 --- a/autotest/ogr/ogr_mem.py +++ b/autotest/ogr/ogr_mem.py @@ -1599,6 +1599,106 @@ def test_ogr_mem_write_arrow_error_negative_fid(): dst_lyr.WriteArrowBatch(schema, array, ["FID=id"]) +############################################################################### +# Test writing a ArrowArray into a OGR field whose types don't fully match + + +@pytest.mark.parametrize("IF_FIELD_NOT_PRESERVED", [None, "ERROR"]) +@pytest.mark.parametrize( + "input_type,output_type,input_vals,output_vals", + [ + [ogr.OFTInteger64, ogr.OFTInteger, [123456, None], [123456, None]], + [ogr.OFTInteger64, ogr.OFTInteger, [1234567890123], [(1 << 31) - 1]], + [ogr.OFTReal, ogr.OFTInteger, [1], [1]], + [ogr.OFTReal, ogr.OFTInteger, [1.23], [1]], + [ogr.OFTReal, ogr.OFTInteger, [float("nan")], [-(1 << 31)]], + [ogr.OFTReal, ogr.OFTInteger64, [1], [1]], + [ogr.OFTReal, ogr.OFTInteger64, [1.23], [1]], + [ogr.OFTReal, ogr.OFTInteger64, [float("nan")], [-(1 << 63)]], + [ogr.OFTInteger64, ogr.OFTReal, [1234567890123, None], [1234567890123, None]], + [ + ogr.OFTInteger64, + ogr.OFTReal, + [((1 << 63) - 1), None], + [float((1 << 63) - 1), None], + ], + # below is never lossy + [ogr.OFTInteger, ogr.OFTInteger64, [123456, None], [123456, None]], + [ogr.OFTInteger, ogr.OFTReal, [123456, None], [123456, None]], + ], +) +@gdaltest.enable_exceptions() +def test_ogr_mem_write_arrow_accepted_field_type_mismatch( + input_type, output_type, input_vals, output_vals, IF_FIELD_NOT_PRESERVED +): + + if input_vals[0] == ((1 << 63) - 1) and output_type == ogr.OFTReal: + # This conversion from INT64_MAX to double doesn't seem to be lossy + # on arm64 or s390x (weird...) + import platform + + if platform.machine() not in ("x86_64", "AMD64"): + pytest.skip("Skipping test on platform.machine() = " + platform.machine()) + + src_ds = ogr.GetDriverByName("Memory").CreateDataSource("") + src_lyr = src_ds.CreateLayer("src_lyr") + + src_lyr.CreateField(ogr.FieldDefn("my_field", input_type)) + + for v in input_vals: + src_feature = ogr.Feature(src_lyr.GetLayerDefn()) + if v: + src_feature["my_field"] = v + src_lyr.CreateFeature(src_feature) + + ds = ogr.GetDriverByName("Memory").CreateDataSource("") + dst_lyr = ds.CreateLayer("dst_lyr") + dst_lyr.CreateField(ogr.FieldDefn("my_field", output_type)) + + stream = src_lyr.GetArrowStream(["INCLUDE_FID=NO"]) + schema = stream.GetSchema() + + lossy_conversion = (input_vals != output_vals) or ( + input_vals[0] == ((1 << 63) - 1) and output_type == ogr.OFTReal + ) + + while True: + array = stream.GetNextRecordBatch() + if array is None: + break + if IF_FIELD_NOT_PRESERVED: + if lossy_conversion: + with gdal.quiet_errors(), pytest.raises( + Exception, match="value of field my_field cannot not preserved" + ): + dst_lyr.WriteArrowBatch( + schema, + array, + {"IF_FIELD_NOT_PRESERVED": IF_FIELD_NOT_PRESERVED}, + ) + return + else: + dst_lyr.WriteArrowBatch( + schema, array, {"IF_FIELD_NOT_PRESERVED": IF_FIELD_NOT_PRESERVED} + ) + else: + if lossy_conversion: + with gdal.quiet_errors(): + gdal.ErrorReset() + dst_lyr.WriteArrowBatch(schema, array) + assert gdal.GetLastErrorType() == gdal.CE_Warning + else: + gdal.ErrorReset() + dst_lyr.WriteArrowBatch(schema, array) + assert gdal.GetLastErrorType() == gdal.CE_None + + dst_lyr.ResetReading() + + for v in output_vals: + f = dst_lyr.GetNextFeature() + assert f["my_field"] == v + + ############################################################################### diff --git a/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp b/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp index 4ab02935f5ba..60da6d9e6a52 100644 --- a/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp +++ b/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp @@ -6311,6 +6311,64 @@ static bool BuildOGRFieldInfo( bTypeOK = true; break; } + else if (eOGRType == OFTInteger && + sType.eType == OFTInteger64) + { + // Potentially lossy. + CPLDebug("OGR", + "For field %s, writing from Arrow array of " + "type Int64 into OGR Int32 field. " + "Potentially loss conversion can happen", + sInfo.osName.c_str()); + bFallbackTypesUsed = true; + bTypeOK = true; + break; + } + else if (eOGRType == OFTInteger && sType.eType == OFTReal) + { + // Potentially lossy. + CPLDebug("OGR", + "For field %s, writing from Arrow array of " + "type Real into OGR Int32 field. " + "Potentially loss conversion can happen", + sInfo.osName.c_str()); + bFallbackTypesUsed = true; + bTypeOK = true; + break; + } + else if (eOGRType == OFTInteger64 && sType.eType == OFTReal) + { + // Potentially lossy. + CPLDebug("OGR", + "For field %s, writing from Arrow array of " + "type Real into OGR Int64 field. " + "Potentially loss conversion can happen", + sInfo.osName.c_str()); + bFallbackTypesUsed = true; + bTypeOK = true; + break; + } + else if (eOGRType == OFTReal && sType.eType == OFTInteger64) + { + // Potentially lossy. + CPLDebug("OGR", + "For field %s, writing from Arrow array of " + "type Int64 into OGR Real field. " + "Potentially loss conversion can happen", + sInfo.osName.c_str()); + bFallbackTypesUsed = true; + bTypeOK = true; + break; + } + else if ((eOGRType == OFTInteger64 || + eOGRType == OFTReal) && + sType.eType == OFTInteger) + { + // Non-lossy + bFallbackTypesUsed = true; + bTypeOK = true; + break; + } else { CPLError(CE_Failure, CPLE_AppDefined, @@ -7153,8 +7211,18 @@ static bool FillFeature(OGRLayer *poLayer, const struct ArrowSchema *schema, * will be supported by WriteArrowBatch(). * * OGR fields for the corresponding children arrays must exist and be of a - * compatible type. For attribute fields, they should be created with - * CreateFieldFromArrowSchema(). + * compatible type. For attribute fields, they should generally be created with + * CreateFieldFromArrowSchema(). This is strictly required for output drivers + * Arrow or Parquet, and strongly recommended otherwise. For geometry fields, + * they should be created either implicitly at CreateLayer() type + * (if geom_type != wkbNone), or explicitly with CreateGeomField(). + * + * Starting with GDAL 3.9, some tolerance has been introduced in the base + * implementation of WriteArrowBatch() for scenarios that involve appending to + * an already existing output layer when the input Arrow field type and the + * OGR layer field type are 32/64-bi integers or real number, but do not match + * exactly, which may cause lossy conversions. The IF_FIELD_NOT_PRESERVED option + * can be used to control the behavior in case of lossy conversion. * * Arrays for geometry columns should be of binary or large binary type and * contain WKB geometry. @@ -7180,6 +7248,14 @@ static bool FillFeature(OGRLayer *poLayer, const struct ArrowSchema *schema, * to WARNING will cause the function to emit a warning but continue its * processing. * </li> + * <li>IF_FIELD_NOT_PRESERVED=ERROR/WARNING. (since GDAL 3.9) + * Action to perform when the input field value is not preserved in the + * output layer. + * The default is WARNING, which will cause the function to emit a warning + * but continue its processing. + * Setting it to ERROR will cause the function to error out if a lossy + * conversion is detected. + * </li> * <li>GEOMETRY_NAME=name. Name of the geometry column. If not provided, * GetGeometryColumn() is used. The special name * OGRLayer::DEFAULT_ARROW_GEOMETRY_NAME is also recognized if neither @@ -7315,6 +7391,9 @@ bool OGRLayer::WriteArrowBatch(const struct ArrowSchema *schema, const bool bWarningIfFIDNotPreserved = EQUAL(CSLFetchNameValueDef(papszOptions, "IF_FID_NOT_PRESERVED", ""), "WARNING"); + const bool bErrorIfFieldNotPreserved = + EQUAL(CSLFetchNameValueDef(papszOptions, "IF_FIELD_NOT_PRESERVED", ""), + "ERROR"); const char *pszGeomFieldName = CSLFetchNameValueDef( papszOptions, "GEOMETRY_NAME", GetGeometryColumn()); if (!pszGeomFieldName || pszGeomFieldName[0] == 0) @@ -7485,6 +7564,59 @@ bool OGRLayer::WriteArrowBatch(const struct ArrowSchema *schema, /*bForgiving=*/true, /*bUseISO8601ForDateTimeAsString=*/true); oFeatureTarget.SetFID(oFeature.GetFID()); + + if (bErrorIfFieldNotPreserved) + { + for (int i = 0; i < poLayerDefn->GetFieldCount(); ++i) + { + if (!oFeature.IsFieldSetAndNotNullUnsafe(i)) + { + continue; + } + bool bLossyConversion = false; + const auto eSrcType = + oLayerDefnTmp.GetFieldDefnUnsafe(i)->GetType(); + const auto eDstType = + poLayerDefn->GetFieldDefnUnsafe(i)->GetType(); + if (eSrcType == OFTInteger64 && eDstType == OFTInteger && + oFeatureTarget.GetFieldAsIntegerUnsafe(i) != + oFeature.GetFieldAsInteger64Unsafe(i)) + { + bLossyConversion = true; + } + else if (eSrcType == OFTReal && eDstType == OFTInteger && + oFeatureTarget.GetFieldAsIntegerUnsafe(i) != + oFeature.GetFieldAsDoubleUnsafe(i)) + { + bLossyConversion = true; + } + else if (eSrcType == OFTReal && eDstType == OFTInteger64 && + static_cast<double>( + oFeatureTarget.GetFieldAsInteger64Unsafe(i)) != + oFeature.GetFieldAsDoubleUnsafe(i)) + { + bLossyConversion = true; + } + else if (eSrcType == OFTInteger64 && eDstType == OFTReal && + static_cast<GIntBig>( + oFeatureTarget.GetFieldAsDoubleUnsafe(i)) != + oFeature.GetFieldAsInteger64Unsafe(i)) + { + bLossyConversion = true; + } + if (bLossyConversion) + { + CPLError(CE_Failure, CPLE_AppDefined, + "For feature " CPL_FRMT_GIB + ", value of field %s cannot not preserved", + oFeatureTarget.GetFID(), + oLayerDefnTmp.GetFieldDefn(i)->GetNameRef()); + if (bTransactionOK) + RollbackTransaction(); + return false; + } + } + } } const auto nInputFID = poFeatureTarget->GetFID(); @@ -7598,10 +7730,18 @@ bool OGRLayer::WriteArrowBatch(const struct ArrowSchema *schema, * will be supported by WriteArrowBatch(). * * OGR fields for the corresponding children arrays must exist and be of a - * compatible type. For attribute fields, they should be created with - * CreateFieldFromArrowSchema(). For geometry fields, they should be created - * either implicitly at CreateLayer() type (if geom_type != wkbNone), or - * explicitly with CreateGeomField(). + * compatible type. For attribute fields, they should generally be created with + * CreateFieldFromArrowSchema(). This is strictly required for output drivers + * Arrow or Parquet, and strongly recommended otherwise. For geometry fields, + * they should be created either implicitly at CreateLayer() type + * (if geom_type != wkbNone), or explicitly with CreateGeomField(). + * + * Starting with GDAL 3.9, some tolerance has been introduced in the base + * implementation of WriteArrowBatch() for scenarios that involve appending to + * an already existing output layer when the input Arrow field type and the + * OGR layer field type are 32/64-bi integers or real number, but do not match + * exactly, which may cause lossy conversions. The IF_FIELD_NOT_PRESERVED option + * can be used to control the behavior in case of lossy conversion. * * Arrays for geometry columns should be of binary or large binary type and * contain WKB geometry. @@ -7627,6 +7767,14 @@ bool OGRLayer::WriteArrowBatch(const struct ArrowSchema *schema, * to WARNING will cause the function to emit a warning but continue its * processing. * </li> + * <li>IF_FIELD_NOT_PRESERVED=ERROR/WARNING. (since GDAL 3.9) + * Action to perform when the input field value is not preserved in the + * output layer. + * The default is WARNING, which will cause the function to emit a warning + * but continue its processing. + * Setting it to ERROR will cause the function to error out if a lossy + * conversion is detected. + * </li> * <li>GEOMETRY_NAME=name. Name of the geometry column. If not provided, * GetGeometryColumn() is used. The special name * OGRLayer::DEFAULT_ARROW_GEOMETRY_NAME is also recognized if neither From 94d08f6b3c0c27ff05ae41a0ce0b1e864a117c43 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 28 Apr 2024 21:15:54 +0200 Subject: [PATCH 218/230] Parquet: fix ResetReading() implementation, when using the ParquetDataset API and when there's a single batch --- autotest/ogr/ogr_parquet.py | 18 ++++++++++++++++++ ogr/ogrsf_frmts/parquet/ogr_parquet.h | 3 ++- .../parquet/ogrparquetdatasetlayer.cpp | 10 ---------- ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp | 17 +++++++++++++---- 4 files changed, 33 insertions(+), 15 deletions(-) diff --git a/autotest/ogr/ogr_parquet.py b/autotest/ogr/ogr_parquet.py index 129cc2beed51..e08e4f00d27a 100755 --- a/autotest/ogr/ogr_parquet.py +++ b/autotest/ogr/ogr_parquet.py @@ -1550,6 +1550,24 @@ def test_ogr_parquet_read_partitioned_flat(use_vsi, use_metadata_file, prefix): del s +############################################################################### +# Run test_ogrsf + + +@pytest.mark.skipif(not _has_arrow_dataset(), reason="GDAL not built with ArrowDataset") +def test_ogr_parquet_test_ogrsf_dataset(): + + if test_cli_utilities.get_test_ogrsf_path() is None: + pytest.skip() + + ret = gdaltest.runexternal( + test_cli_utilities.get_test_ogrsf_path() + " -ro data/parquet/partitioned_flat" + ) + + assert "INFO" in ret + assert "ERROR" not in ret + + ############################################################################### # Test reading a HIVE partitioned dataset diff --git a/ogr/ogrsf_frmts/parquet/ogr_parquet.h b/ogr/ogrsf_frmts/parquet/ogr_parquet.h index d52ce264f3f1..8b6c84c08c65 100644 --- a/ogr/ogrsf_frmts/parquet/ogr_parquet.h +++ b/ogr/ogrsf_frmts/parquet/ogr_parquet.h @@ -66,6 +66,8 @@ class OGRParquetLayerBase CPL_NON_FINAL : public OGRArrowLayer public: int TestCapability(const char *) override; + void ResetReading() override; + GDALDataset *GetDataset() override; }; @@ -245,7 +247,6 @@ class OGRParquetDatasetLayer final : public OGRParquetLayerBase const std::shared_ptr<arrow::Schema> &schema, CSLConstList papszOpenOptions); - void ResetReading() override; GIntBig GetFeatureCount(int bForce) override; OGRErr GetExtent(OGREnvelope *psExtent, int bForce = TRUE) override; OGRErr GetExtent(int iGeomField, OGREnvelope *psExtent, diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetdatasetlayer.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetdatasetlayer.cpp index 571dca353072..da81f47b0f44 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetdatasetlayer.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetdatasetlayer.cpp @@ -95,16 +95,6 @@ void OGRParquetDatasetLayer::EstablishFeatureDefn() m_poFeatureDefn->GetGeomFieldCount()); } -/************************************************************************/ -/* ResetReading() */ -/************************************************************************/ - -void OGRParquetDatasetLayer::ResetReading() -{ - m_poRecordBatchReader.reset(); - OGRParquetLayerBase::ResetReading(); -} - /************************************************************************/ /* ReadNextBatch() */ /************************************************************************/ diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp index ce09378b2c2a..67938e383e1c 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetlayer.cpp @@ -70,6 +70,19 @@ GDALDataset *OGRParquetLayerBase::GetDataset() return m_poDS; } +/************************************************************************/ +/* ResetReading() */ +/************************************************************************/ + +void OGRParquetLayerBase::ResetReading() +{ + if (m_iRecordBatch != 0) + { + m_poRecordBatchReader.reset(); + } + OGRArrowLayer::ResetReading(); +} + /************************************************************************/ /* LoadGeoMetadata() */ /************************************************************************/ @@ -1250,10 +1263,6 @@ OGRFeature *OGRParquetLayer::GetFeature(GIntBig nFID) void OGRParquetLayer::ResetReading() { - if (m_iRecordBatch != 0) - { - m_poRecordBatchReader.reset(); - } OGRParquetLayerBase::ResetReading(); m_oFeatureIdxRemappingIter = m_asFeatureIdxRemapping.begin(); m_nFeatureIdxSelected = 0; From cd641e2423d19706073eebe20fd96e1f41e4cc22 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 28 Apr 2024 21:17:38 +0200 Subject: [PATCH 219/230] Parquet: fix opening single file Parquet datasets with the ParquetDataset API when using PARQUET:filename.parquet --- autotest/ogr/ogr_parquet.py | 19 ++++++++ ogr/ogrsf_frmts/parquet/ogrparquetdriver.cpp | 49 ++++++++++++-------- 2 files changed, 49 insertions(+), 19 deletions(-) diff --git a/autotest/ogr/ogr_parquet.py b/autotest/ogr/ogr_parquet.py index e08e4f00d27a..a7679ad7d3a5 100755 --- a/autotest/ogr/ogr_parquet.py +++ b/autotest/ogr/ogr_parquet.py @@ -1568,6 +1568,25 @@ def test_ogr_parquet_test_ogrsf_dataset(): assert "ERROR" not in ret +############################################################################### +# Run test_ogrsf + + +@pytest.mark.skipif(not _has_arrow_dataset(), reason="GDAL not built with ArrowDataset") +def test_ogr_parquet_test_ogrsf_dataset_on_file(): + + if test_cli_utilities.get_test_ogrsf_path() is None: + pytest.skip() + + ret = gdaltest.runexternal( + test_cli_utilities.get_test_ogrsf_path() + + " -ro PARQUET:data/parquet/test.parquet" + ) + + assert "INFO" in ret + assert "ERROR" not in ret + + ############################################################################### # Test reading a HIVE partitioned dataset diff --git a/ogr/ogrsf_frmts/parquet/ogrparquetdriver.cpp b/ogr/ogrsf_frmts/parquet/ogrparquetdriver.cpp index 5756ced588fd..96719ea79db0 100644 --- a/ogr/ogrsf_frmts/parquet/ogrparquetdriver.cpp +++ b/ogr/ogrsf_frmts/parquet/ogrparquetdriver.cpp @@ -391,20 +391,33 @@ OpenParquetDatasetWithoutMetadata(const std::string &osBasePathIn, auto fs = GetFileSystem(osBasePath, osQueryParameters); arrow::dataset::FileSystemFactoryOptions options; - auto partitioningFactory = arrow::dataset::HivePartitioning::MakeFactory(); - options.partitioning = - arrow::dataset::PartitioningOrFactory(std::move(partitioningFactory)); + std::shared_ptr<arrow::dataset::DatasetFactory> factory; + VSIStatBufL sStat; + if (VSIStatL(osBasePath.c_str(), &sStat) == 0 && VSI_ISREG(sStat.st_mode)) + { + PARQUET_ASSIGN_OR_THROW( + factory, arrow::dataset::FileSystemDatasetFactory::Make( + std::move(fs), {osBasePath}, + std::make_shared<arrow::dataset::ParquetFileFormat>(), + std::move(options))); + } + else + { + auto partitioningFactory = + arrow::dataset::HivePartitioning::MakeFactory(); + options.partitioning = arrow::dataset::PartitioningOrFactory( + std::move(partitioningFactory)); - arrow::fs::FileSelector selector; - selector.base_dir = osBasePath; - selector.recursive = true; + arrow::fs::FileSelector selector; + selector.base_dir = osBasePath; + selector.recursive = true; - std::shared_ptr<arrow::dataset::DatasetFactory> factory; - PARQUET_ASSIGN_OR_THROW( - factory, arrow::dataset::FileSystemDatasetFactory::Make( - std::move(fs), std::move(selector), - std::make_shared<arrow::dataset::ParquetFileFormat>(), - std::move(options))); + PARQUET_ASSIGN_OR_THROW( + factory, arrow::dataset::FileSystemDatasetFactory::Make( + std::move(fs), std::move(selector), + std::make_shared<arrow::dataset::ParquetFileFormat>(), + std::move(options))); + } return OpenFromDatasetFactory(osBasePath, factory, papszOpenOptions); } @@ -597,20 +610,19 @@ static GDALDataset *OGRParquetDriverOpen(GDALOpenInfo *poOpenInfo) // Detect if the directory contains .parquet files, or // subdirectories with a name of the form "key=value", typical // of HIVE partitioning. - char **papszFiles = VSIReadDir(osBasePath.c_str()); - for (char **papszIter = papszFiles; papszIter && *papszIter; - ++papszIter) + const CPLStringList aosFiles(VSIReadDir(osBasePath.c_str())); + for (const char *pszFilename : cpl::Iterate(aosFiles)) { - if (EQUAL(CPLGetExtension(*papszIter), "parquet")) + if (EQUAL(CPLGetExtension(pszFilename), "parquet")) { bLikelyParquetDataset = true; break; } - else if (strchr(*papszIter, '=')) + else if (strchr(pszFilename, '=')) { // HIVE partitioning if (VSIStatL(CPLFormFilename(osBasePath.c_str(), - *papszIter, nullptr), + pszFilename, nullptr), &sStat) == 0 && VSI_ISDIR(sStat.st_mode)) { @@ -619,7 +631,6 @@ static GDALDataset *OGRParquetDriverOpen(GDALOpenInfo *poOpenInfo) } } } - CSLDestroy(papszFiles); } if (bStartedWithParquetPrefix || bLikelyParquetDataset) From 9c95548f01ab8fadacc241ceadd3554191395bfe Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 28 Apr 2024 21:28:02 +0200 Subject: [PATCH 220/230] Really fix Coverity Scan warning --- alg/gdalgrid.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alg/gdalgrid.cpp b/alg/gdalgrid.cpp index 51378a9dd420..9e46953da70e 100644 --- a/alg/gdalgrid.cpp +++ b/alg/gdalgrid.cpp @@ -2601,8 +2601,8 @@ static int GDALGridProgressMultiThread(GDALGridJob *psJob) // Return TRUE if the computation must be interrupted. static int GDALGridProgressMonoThread(GDALGridJob *psJob) { - const int nCounter = ++(*psJob->pnCounter); // coverity[missing_lock] + const int nCounter = ++(*psJob->pnCounter); if (!psJob->pfnRealProgress(nCounter / static_cast<double>(psJob->nYSize), "", psJob->pRealProgressArg)) { From 854600477d425a3a7168ac75102af278683f79e9 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Sun, 28 Apr 2024 23:02:45 +0200 Subject: [PATCH 221/230] OGRCloneArrowArray(): add missing support for 'tss:' Arrow type --- autotest/ogr/ogr_arrow.py | 18 ++++ ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp | 104 +++++++++++----------- 2 files changed, 70 insertions(+), 52 deletions(-) diff --git a/autotest/ogr/ogr_arrow.py b/autotest/ogr/ogr_arrow.py index 8e33f05b87a7..5c2088f71165 100755 --- a/autotest/ogr/ogr_arrow.py +++ b/autotest/ogr/ogr_arrow.py @@ -184,6 +184,24 @@ def test_ogr_arrow_test_ogrsf_test_feather(): assert "ERROR" not in ret +############################################################################### +# Run test_ogrsf on a Feather file + + +def test_ogr_arrow_test_ogrsf_test_feather_all_types(): + import test_cli_utilities + + if test_cli_utilities.get_test_ogrsf_path() is None: + pytest.skip() + + ret = gdaltest.runexternal( + test_cli_utilities.get_test_ogrsf_path() + " -ro data/arrow/test.feather" + ) + + assert "INFO" in ret + assert "ERROR" not in ret + + ############################################################################### # Run test_ogrsf on a IPC stream file diff --git a/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp b/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp index 4ab02935f5ba..766af381e420 100644 --- a/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp +++ b/ogr/ogrsf_frmts/generic/ogrlayerarrow.cpp @@ -202,6 +202,43 @@ static inline bool IsLargeBinary(const char *format) return format[0] == ARROW_LETTER_LARGE_BINARY && format[1] == 0; } +static inline bool IsTimestampInternal(const char *format, char chType) +{ + return format[0] == 't' && format[1] == 's' && format[2] == chType && + format[3] == ':'; +} + +static inline bool IsTimestampSeconds(const char *format) +{ + return IsTimestampInternal(format, 's'); +} + +static inline bool IsTimestampMilliseconds(const char *format) +{ + return IsTimestampInternal(format, 'm'); +} + +static inline bool IsTimestampMicroseconds(const char *format) +{ + return IsTimestampInternal(format, 'u'); +} + +static inline bool IsTimestampNanoseconds(const char *format) +{ + return IsTimestampInternal(format, 'n'); +} + +static inline bool IsTimestamp(const char *format) +{ + return IsTimestampSeconds(format) || IsTimestampMilliseconds(format) || + IsTimestampMicroseconds(format) || IsTimestampNanoseconds(format); +} + +static inline const char *GetTimestampTimezone(const char *format) +{ + return IsTimestamp(format) ? format + strlen("tm?:") : ""; +} + /************************************************************************/ /* TestBit() */ /************************************************************************/ @@ -2820,20 +2857,9 @@ static bool IsHandledSchema(bool bTopLevel, const struct ArrowSchema *schema, return true; } - const char *const apszHandledFormatsPrefix[] = { - "w:", // fixed width binary - "tss:", // timestamp [seconds] with timezone - "tsm:", // timestamp [milliseconds] with timezone - "tsu:", // timestamp [microseconds] with timezone - "tsn:", // timestamp [nanoseconds] with timezone - }; - - for (const char *pszHandledFormat : apszHandledFormatsPrefix) + if (IsFixedWidthBinary(format) || IsTimestamp(format)) { - if (strncmp(format, pszHandledFormat, strlen(pszHandledFormat)) == 0) - { - return true; - } + return true; } CPLDebug("OGR", "Field %s has unhandled format '%s'", @@ -4303,37 +4329,30 @@ static bool SetFieldForOtherFormats(OGRFeature &oFeature, static_cast<GIntBig>(static_cast<const int64_t *>( array->buffers[1])[nOffsettedIndex])); } - else if (format[0] == 't' && format[1] == 's' && format[2] == 's' && - format[3] == ':') // STARTS_WITH(format, "tss:") + else if (IsTimestampSeconds(format)) { - // timestamp [seconds] with timezone ArrowTimestampToOGRDateTime( static_cast<const int64_t *>(array->buffers[1])[nOffsettedIndex], 1, - format + strlen("tss:"), oFeature, iOGRFieldIndex); + GetTimestampTimezone(format), oFeature, iOGRFieldIndex); } - else if (format[0] == 't' && format[1] == 's' && format[2] == 'm' && - format[3] == ':') // STARTS_WITH(format, "tsm:")) + else if (IsTimestampMilliseconds(format)) { - // timestamp [milliseconds] with timezone ArrowTimestampToOGRDateTime( static_cast<const int64_t *>(array->buffers[1])[nOffsettedIndex], - 1000, format + strlen("tsm:"), oFeature, iOGRFieldIndex); + 1000, GetTimestampTimezone(format), oFeature, iOGRFieldIndex); } - else if (format[0] == 't' && format[1] == 's' && format[2] == 'u' && - format[3] == ':') // STARTS_WITH(format, "tsu:")) + else if (IsTimestampMicroseconds(format)) { - // timestamp [microseconds] with timezone ArrowTimestampToOGRDateTime( static_cast<const int64_t *>(array->buffers[1])[nOffsettedIndex], - 1000 * 1000, format + strlen("tsu:"), oFeature, iOGRFieldIndex); + 1000 * 1000, GetTimestampTimezone(format), oFeature, + iOGRFieldIndex); } - else if (format[0] == 't' && format[1] == 's' && format[2] == 'n' && - format[3] == ':') // STARTS_WITH(format, "tsn:")) + else if (IsTimestampNanoseconds(format)) { - // timestamp [nanoseconds] with timezone ArrowTimestampToOGRDateTime( static_cast<const int64_t *>(array->buffers[1])[nOffsettedIndex], - 1000 * 1000 * 1000, format + strlen("tsn:"), oFeature, + 1000 * 1000 * 1000, GetTimestampTimezone(format), oFeature, iOGRFieldIndex); } else if (IsFixedSizeList(format)) @@ -5227,9 +5246,7 @@ static bool OGRCloneArrowArray(const struct ArrowSchema *schema, } else if (IsUInt64(format) || IsInt64(format) || IsFloat64(format) || strcmp(format, "tdm") == 0 || strcmp(format, "ttu") == 0 || - strcmp(format, "ttn") == 0 || strcmp(format, "tss") == 0 || - STARTS_WITH(format, "tsm:") || - STARTS_WITH(format, "tsu:") || STARTS_WITH(format, "tsn:")) + strcmp(format, "ttn") == 0 || IsTimestamp(format)) { nEltSize = sizeof(uint64_t); } @@ -5692,21 +5709,9 @@ static bool IsArrowSchemaSupportedInternal(const struct ArrowSchema *schema, } } - if (IsFixedWidthBinary(format)) + if (IsFixedWidthBinary(format) || IsTimestamp(format)) return true; - const char *const apszTimestamps[] = { - "tss:", // timestamp[s] - "tsm:", // timestamp[ms] - "tsu:", // timestamp[us] - "tsn:" // timestamp[ns] - }; - for (const char *pszSupported : apszTimestamps) - { - if (STARTS_WITH(format, pszSupported)) - return true; - } - AppendError("Type '" + std::string(format) + "' for field " + osFieldPrefix + fieldName + " is not supported."); return false; @@ -5991,10 +5996,7 @@ bool OGRLayer::CreateFieldFromArrowSchemaInternal( return AddField(OFTString, OFSTJSON, 0, 0); } - if (STARTS_WITH(format, "tss:") || // timestamp[s] - STARTS_WITH(format, "tsm:") || // timestamp[ms] - STARTS_WITH(format, "tsu:") || // timestamp[us] - STARTS_WITH(format, "tsn:")) // timestamp[ns] + if (IsTimestamp(format)) { return AddField(OFTDateTime, OFSTNone, 0, 0); } @@ -6343,9 +6345,7 @@ static bool BuildOGRFieldInfo( } } - if (!bTypeOK && - (STARTS_WITH(format, "tss:") || STARTS_WITH(format, "tsm:") || - STARTS_WITH(format, "tsu:") || STARTS_WITH(format, "tsn:"))) + if (!bTypeOK && IsTimestamp(format)) { sInfo.eNominalFieldType = OFTDateTime; if (eOGRType == sInfo.eNominalFieldType) From b96774086f500c8b23ab5fb1d8805962d4308565 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 02:16:22 +0000 Subject: [PATCH 222/230] Bump actions/upload-artifact from 4.3.2 to 4.3.3 Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4.3.2 to 4.3.3. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/1746f4ab65b179e0ea60a494b83293b640dd5bba...65462800fd760344b1a7b4382951275a0abb4808) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> --- .github/workflows/cifuzz.yml | 2 +- .github/workflows/conda.yml | 2 +- .github/workflows/doc_build.yml | 6 +++--- .github/workflows/linux_build.yml | 4 ++-- .github/workflows/scorecard.yml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/cifuzz.yml b/.github/workflows/cifuzz.yml index 54ca94a51caf..c482fb8973b0 100644 --- a/.github/workflows/cifuzz.yml +++ b/.github/workflows/cifuzz.yml @@ -31,7 +31,7 @@ jobs: fuzz-seconds: 600 dry-run: false - name: Upload Crash - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: failure() && steps.build.outcome == 'success' with: name: artifacts diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index d64da3f0c831..b24f7a990b63 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -74,7 +74,7 @@ jobs: source ../ci/travis/conda/compile.sh working-directory: ./gdal-feedstock - - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 + - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: ${{ matrix.platform }}-conda-package path: ./gdal-feedstock/packages/ diff --git a/.github/workflows/doc_build.yml b/.github/workflows/doc_build.yml index fe5a40cfa5c2..7b889e93ca06 100644 --- a/.github/workflows/doc_build.yml +++ b/.github/workflows/doc_build.yml @@ -89,15 +89,15 @@ jobs: # run: | # make spelling # working-directory: ./doc - - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 + - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: PDF path: doc/build/latex/gdal.pdf - - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 + - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: HTML path: doc/build/html/* - #- uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 + #- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 # with: # name: Misspelled # path: doc/build/spelling/output.txt diff --git a/.github/workflows/linux_build.yml b/.github/workflows/linux_build.yml index f9e150cd954a..87e427d77f30 100644 --- a/.github/workflows/linux_build.yml +++ b/.github/workflows/linux_build.yml @@ -338,14 +338,14 @@ jobs: docker push ${CONTAINER_NAME_FULL} - name: Upload coverage artifacts - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: ${{ matrix.id == 'coverage' }} with: name: coverage_index.html path: build-${{ matrix.id }}/coverage_html/index.html - name: Upload coverage artifacts - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: ${{ matrix.id == 'coverage' }} with: name: HTML diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index dd7972de2296..5f6dd89adc86 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -63,7 +63,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: SARIF file path: results.sarif From ecc244bd695775f276c548894c66ffb80c7710b9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 02:16:27 +0000 Subject: [PATCH 223/230] Bump conda-incubator/setup-miniconda from 3.0.3 to 3.0.4 Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 3.0.3 to 3.0.4. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/030178870c779d9e5e1b4e563269f3aa69b04081...a4260408e20b96e80095f42ff7f1a15b27dd94ca) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> --- .github/workflows/cmake_builds.yml | 6 +++--- .github/workflows/conda.yml | 2 +- .github/workflows/macos.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/cmake_builds.yml b/.github/workflows/cmake_builds.yml index 3ae5e67a55e6..c80bb1483298 100644 --- a/.github/workflows/cmake_builds.yml +++ b/.github/workflows/cmake_builds.yml @@ -410,7 +410,7 @@ jobs: shell: pwsh run: | echo "JAVA_HOME=$env:JAVA_HOME_11_X64" >> %GITHUB_ENV% - - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3 + - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 with: activate-environment: gdalenv miniforge-variant: Mambaforge @@ -507,7 +507,7 @@ jobs: git config --global core.autocrlf false - name: Checkout GDAL uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3 + - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 with: activate-environment: gdalenv miniforge-variant: Mambaforge @@ -654,7 +654,7 @@ jobs: git config --global core.autocrlf false - name: Checkout GDAL uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3 + - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 with: activate-environment: gdalenv python-version: 3.9 diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index d64da3f0c831..41d25c73bf30 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -49,7 +49,7 @@ jobs: path: ~/conda_pkgs_dir key: ${{ runner.os }}-${{ steps.get-date.outputs.today }}-conda-${{ env.CACHE_NUMBER }} - - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3 + - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 with: #miniforge-variant: Mambaforge miniforge-version: latest diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index d4b5e2087ec9..2d7f4baa80cc 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -27,7 +27,7 @@ jobs: - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 - - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3 + - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 with: channels: conda-forge auto-update-conda: true From 55205987fd6c3eeb0daff8adf3cec384f76e64bd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 02:16:45 +0000 Subject: [PATCH 224/230] Bump github/codeql-action from 3.25.1 to 3.25.3 Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3.25.1 to 3.25.3. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/c7f9125735019aa87cfc361530512d50ea439c71...d39d31e687223d841ef683f52467bd88e9b21c14) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> --- .github/workflows/codeql.yml | 4 ++-- .github/workflows/scorecard.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 3e38e1168049..b354dbf08ca0 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -98,7 +98,7 @@ jobs: # We do that after running CMake to avoid CodeQL to trigger during CMake time, # in particular during HDF5 detection which is terribly slow (https://github.com/OSGeo/gdal/issues/9549) - name: Initialize CodeQL - uses: github/codeql-action/init@c7f9125735019aa87cfc361530512d50ea439c71 # v3.25.1 + uses: github/codeql-action/init@d39d31e687223d841ef683f52467bd88e9b21c14 # v3.25.3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -118,6 +118,6 @@ jobs: (cd build && make -j$(nproc)) - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@c7f9125735019aa87cfc361530512d50ea439c71 # v3.25.1 + uses: github/codeql-action/analyze@d39d31e687223d841ef683f52467bd88e9b21c14 # v3.25.3 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index dd7972de2296..3553cf4efe1d 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -71,6 +71,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@c7f9125735019aa87cfc361530512d50ea439c71 # v3.25.1 + uses: github/codeql-action/upload-sarif@d39d31e687223d841ef683f52467bd88e9b21c14 # v3.25.3 with: sarif_file: results.sarif From df7d28826ef85f44a56cfb33570e98f5b5392742 Mon Sep 17 00:00:00 2001 From: AbelPau <92721356+AbelPau@users.noreply.github.com> Date: Mon, 29 Apr 2024 13:29:46 +0200 Subject: [PATCH 225/230] MiraMonVector: fix memleak in creation error case (#9805) Removing pFExtDBF because it was a simplified way of saying pMMBDXP->pfDataBase makes more understandable the code. Fixes https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=68316 --- .../miramon/mm_gdal_driver_structs.h | 2 - ogr/ogrsf_frmts/miramon/mm_gdal_functions.c | 54 +++++++++++-------- ogr/ogrsf_frmts/miramon/mm_gdal_functions.h | 4 +- ogr/ogrsf_frmts/miramon/mm_rdlayr.c | 2 + ogr/ogrsf_frmts/miramon/mm_wrlayr.c | 45 +++++++--------- 5 files changed, 54 insertions(+), 53 deletions(-) diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_driver_structs.h b/ogr/ogrsf_frmts/miramon/mm_gdal_driver_structs.h index 5ae905329dd4..a90fe90faa00 100644 --- a/ogr/ogrsf_frmts/miramon/mm_gdal_driver_structs.h +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_driver_structs.h @@ -354,8 +354,6 @@ struct MMAdmDatabase // MiraMon table (extended DBF) // Name of the extended DBF file char pszExtDBFLayerName[MM_CPL_PATH_BUF_SIZE]; - // Pointer to the extended DBF file - FILE_TYPE *pFExtDBF; // Pointer to a MiraMon table (auxiliary) struct MM_DATA_BASE_XP *pMMBDXP; // How to write all it to disk diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c index a7f52941ccf6..69d6004736c4 100644 --- a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.c @@ -640,11 +640,12 @@ MM_GiveOffsetExtendedFieldName(const struct MM_FIELD *camp) int MM_WriteNRecordsMMBD_XPFile(struct MMAdmDatabase *MMAdmDB) { - if (!MMAdmDB->pMMBDXP || !MMAdmDB->pFExtDBF) + if (!MMAdmDB->pMMBDXP || !MMAdmDB->pMMBDXP->pfDataBase) return 0; // Updating number of features in features table - fseek_function(MMAdmDB->pFExtDBF, MM_FIRST_OFFSET_to_N_RECORDS, SEEK_SET); + fseek_function(MMAdmDB->pMMBDXP->pfDataBase, MM_FIRST_OFFSET_to_N_RECORDS, + SEEK_SET); if (MMAdmDB->pMMBDXP->nRecords > UINT32_MAX) { @@ -658,35 +659,39 @@ int MM_WriteNRecordsMMBD_XPFile(struct MMAdmDatabase *MMAdmDB) { GUInt32 nRecords32LowBits = (GUInt32)(MMAdmDB->pMMBDXP->nRecords & UINT32_MAX); - if (fwrite_function(&nRecords32LowBits, 4, 1, MMAdmDB->pFExtDBF) != 1) + if (fwrite_function(&nRecords32LowBits, 4, 1, + MMAdmDB->pMMBDXP->pfDataBase) != 1) return 1; } - fseek_function(MMAdmDB->pFExtDBF, MM_SECOND_OFFSET_to_N_RECORDS, SEEK_SET); + fseek_function(MMAdmDB->pMMBDXP->pfDataBase, MM_SECOND_OFFSET_to_N_RECORDS, + SEEK_SET); if (MMAdmDB->pMMBDXP->dbf_version == MM_MARCA_VERSIO_1_DBF_ESTESA) { /* from 16 to 19, position MM_SECOND_OFFSET_to_N_RECORDS */ GUInt32 nRecords32HighBits = (GUInt32)(MMAdmDB->pMMBDXP->nRecords >> 32); - if (fwrite_function(&nRecords32HighBits, 4, 1, MMAdmDB->pFExtDBF) != 1) + if (fwrite_function(&nRecords32HighBits, 4, 1, + MMAdmDB->pMMBDXP->pfDataBase) != 1) return 1; /* from 20 to 27 */ if (fwrite_function(&(MMAdmDB->pMMBDXP->dbf_on_a_LAN), 8, 1, - MMAdmDB->pFExtDBF) != 1) + MMAdmDB->pMMBDXP->pfDataBase) != 1) return 1; } else { if (fwrite_function(&(MMAdmDB->pMMBDXP->dbf_on_a_LAN), 12, 1, - MMAdmDB->pFExtDBF) != 1) + MMAdmDB->pMMBDXP->pfDataBase) != 1) return 1; } return 0; } -static MM_BOOLEAN MM_UpdateEntireHeader(struct MM_DATA_BASE_XP *data_base_XP) +static MM_BOOLEAN +MM_OpenIfNeededAndUpdateEntireHeader(struct MM_DATA_BASE_XP *data_base_XP) { MM_BYTE variable_byte; MM_EXT_DBF_N_FIELDS i, j = 0; @@ -698,12 +703,14 @@ static MM_BOOLEAN MM_UpdateEntireHeader(struct MM_DATA_BASE_XP *data_base_XP) int estat; char nom_camp[MM_MAX_LON_FIELD_NAME_DBF]; size_t retorn_fwrite; - MM_BOOLEAN table_should_be_closed = FALSE; + + if (!data_base_XP) + return FALSE; if (data_base_XP->pfDataBase == nullptr) { strcpy(ModeLectura_previ, data_base_XP->ReadingMode); - strcpy(data_base_XP->ReadingMode, "wb"); + strcpy(data_base_XP->ReadingMode, "wb+"); if ((data_base_XP->pfDataBase = fopen_function(data_base_XP->szFileName, @@ -711,8 +718,11 @@ static MM_BOOLEAN MM_UpdateEntireHeader(struct MM_DATA_BASE_XP *data_base_XP) { return FALSE; } - - table_should_be_closed = TRUE; + } + else + { + // If it's open we just update the header + fseek_function(data_base_XP->pfDataBase, 0, SEEK_SET); } if ((data_base_XP->nFields) > MM_MAX_N_CAMPS_DBF_CLASSICA) @@ -1067,23 +1077,18 @@ static MM_BOOLEAN MM_UpdateEntireHeader(struct MM_DATA_BASE_XP *data_base_XP) } } - if (table_should_be_closed) - { - fclose_and_nullify(&data_base_XP->pfDataBase); - } - return TRUE; -} /* End of MM_UpdateEntireHeader() */ +} /* End of MM_OpenIfNeededAndUpdateEntireHeader() */ -MM_BOOLEAN MM_CreateDBFFile(struct MM_DATA_BASE_XP *bd_xp, - const char *NomFitxer) +MM_BOOLEAN MM_CreateAndOpenDBFFile(struct MM_DATA_BASE_XP *bd_xp, + const char *NomFitxer) { if (!NomFitxer || MMIsEmptyString(NomFitxer) || !bd_xp) return FALSE; MM_CheckDBFHeader(bd_xp); CPLStrlcpy(bd_xp->szFileName, NomFitxer, sizeof(bd_xp->szFileName)); - return MM_UpdateEntireHeader(bd_xp); + return MM_OpenIfNeededAndUpdateEntireHeader(bd_xp); } void MM_ReleaseMainFields(struct MM_DATA_BASE_XP *data_base_XP) @@ -1138,7 +1143,7 @@ int MM_ReadExtendedDBFHeaderFromFile(const char *szFileName, GUInt32 nRecords32LowBits; char *pszString; - if (!szFileName) + if (!szFileName || !pMMBDXP) return 1; CPLStrlcpy(pMMBDXP->szFileName, szFileName, sizeof(pMMBDXP->szFileName)); @@ -2187,6 +2192,9 @@ int MM_ChangeDBFWidthField(struct MM_DATA_BASE_XP *data_base_XP, size_t retorn_fwrite; int retorn_TruncaFitxer; + if (!data_base_XP) + return 1; + canvi_amplada = nNewWidth - data_base_XP->pField[nIField].BytesPerField; if (data_base_XP->nRecords != 0) @@ -2379,7 +2387,7 @@ int MM_ChangeDBFWidthField(struct MM_DATA_BASE_XP *data_base_XP, } data_base_XP->pField[nIField].DecimalsIfFloat = nNewPrecision; - if ((MM_UpdateEntireHeader(data_base_XP)) == FALSE) + if ((MM_OpenIfNeededAndUpdateEntireHeader(data_base_XP)) == FALSE) return 1; return 0; diff --git a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h index a26294010c5c..9fa16ad4b2f0 100644 --- a/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h +++ b/ogr/ogrsf_frmts/miramon/mm_gdal_functions.h @@ -111,8 +111,8 @@ struct MM_DATA_BASE_XP *MM_CreateDBFHeader(MM_EXT_DBF_N_FIELDS n_camps, MM_BYTE nCharSet); void MM_ReleaseMainFields(struct MM_DATA_BASE_XP *data_base_XP); void MM_ReleaseDBFHeader(struct MM_DATA_BASE_XP *data_base_XP); -MM_BOOLEAN MM_CreateDBFFile(struct MM_DATA_BASE_XP *bd_xp, - const char *NomFitxer); +MM_BOOLEAN MM_CreateAndOpenDBFFile(struct MM_DATA_BASE_XP *bd_xp, + const char *NomFitxer); int MM_DuplicateFieldDBXP(struct MM_FIELD *camp_final, const struct MM_FIELD *camp_inicial); int MM_WriteNRecordsMMBD_XPFile(struct MMAdmDatabase *MMAdmDB); diff --git a/ogr/ogrsf_frmts/miramon/mm_rdlayr.c b/ogr/ogrsf_frmts/miramon/mm_rdlayr.c index d52f21c0c0d7..1f3a9fc61f88 100644 --- a/ogr/ogrsf_frmts/miramon/mm_rdlayr.c +++ b/ogr/ogrsf_frmts/miramon/mm_rdlayr.c @@ -672,6 +672,8 @@ int MM_ReadExtendedDBFHeader(struct MiraMonVectLayerInfo *hMiraMonLayer) return 0; pMMBDXP = hMiraMonLayer->pMMBDXP = calloc_function(sizeof(*pMMBDXP)); + if (!pMMBDXP) + return 1; if (hMiraMonLayer->bIsPoint) { diff --git a/ogr/ogrsf_frmts/miramon/mm_wrlayr.c b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c index ae1101dd3aec..26a7764e1acc 100644 --- a/ogr/ogrsf_frmts/miramon/mm_wrlayr.c +++ b/ogr/ogrsf_frmts/miramon/mm_wrlayr.c @@ -6093,36 +6093,28 @@ int MMCheck_REL_FILE(const char *szREL_file) static int MMInitMMDB(struct MiraMonVectLayerInfo *hMiraMonLayer, struct MMAdmDatabase *pMMAdmDB) { - if (!hMiraMonLayer) - return 1; - - if (!pMMAdmDB) + if (!hMiraMonLayer || !pMMAdmDB) return 1; if (MMIsEmptyString(pMMAdmDB->pszExtDBFLayerName)) return 0; // No file, no error. Just continue strcpy(pMMAdmDB->pMMBDXP->ReadingMode, "wb+"); - if (FALSE == - MM_CreateDBFFile(pMMAdmDB->pMMBDXP, pMMAdmDB->pszExtDBFLayerName)) - return 1; - - // Opening the file - if (nullptr == (pMMAdmDB->pFExtDBF = - fopen_function(pMMAdmDB->pszExtDBFLayerName, - "r+b"))) //hMiraMonLayer->pszFlags))) + if (FALSE == MM_CreateAndOpenDBFFile(pMMAdmDB->pMMBDXP, + pMMAdmDB->pszExtDBFLayerName)) { MMCPLError(CE_Failure, CPLE_OpenFailed, - "Error pMMAdmDB: Cannot open file %s.", + "Error pMMAdmDB: Cannot create or open file %s.", pMMAdmDB->pszExtDBFLayerName); return 1; } - fseek_function(pMMAdmDB->pFExtDBF, pMMAdmDB->pMMBDXP->FirstRecordOffset, - SEEK_SET); - if (MMInitFlush(&pMMAdmDB->FlushRecList, pMMAdmDB->pFExtDBF, MM_1MB, - &pMMAdmDB->pRecList, pMMAdmDB->pMMBDXP->FirstRecordOffset, - 0)) + fseek_function(pMMAdmDB->pMMBDXP->pfDataBase, + pMMAdmDB->pMMBDXP->FirstRecordOffset, SEEK_SET); + + if (MMInitFlush(&pMMAdmDB->FlushRecList, pMMAdmDB->pMMBDXP->pfDataBase, + MM_1MB, &pMMAdmDB->pRecList, + pMMAdmDB->pMMBDXP->FirstRecordOffset, 0)) return 1; pMMAdmDB->nNumRecordOnCourse = @@ -6366,7 +6358,8 @@ MMTestAndFixValueToRecordDBXP(struct MiraMonVectLayerInfo *hMiraMonLayer, struct MM_FIELD *camp; MM_BYTES_PER_FIELD_TYPE_DBF nNewWidth; - if (!hMiraMonLayer) + if (!hMiraMonLayer || !pMMAdmDB || !pMMAdmDB->pMMBDXP || + !pMMAdmDB->pMMBDXP->pField || !pMMAdmDB->pMMBDXP->pfDataBase) return 1; camp = pMMAdmDB->pMMBDXP->pField + nIField; @@ -6388,8 +6381,6 @@ MMTestAndFixValueToRecordDBXP(struct MiraMonVectLayerInfo *hMiraMonLayer, if (MMAppendBlockToBuffer(&pMMAdmDB->FlushRecList)) return 1; - pMMAdmDB->pMMBDXP->pfDataBase = pMMAdmDB->pFExtDBF; - if (MM_ChangeDBFWidthField( pMMAdmDB->pMMBDXP, nIField, nNewWidth, pMMAdmDB->pMMBDXP->pField[nIField].DecimalsIfFloat)) @@ -6414,9 +6405,9 @@ MMTestAndFixValueToRecordDBXP(struct MiraMonVectLayerInfo *hMiraMonLayer, // File has changed its size, so it has to be updated // at the Flush tool - fseek_function(pMMAdmDB->pFExtDBF, 0, SEEK_END); + fseek_function(pMMAdmDB->pMMBDXP->pfDataBase, 0, SEEK_END); pMMAdmDB->FlushRecList.OffsetWhereToFlush = - ftell_function(pMMAdmDB->pFExtDBF); + ftell_function(pMMAdmDB->pMMBDXP->pfDataBase); } return 0; } @@ -7160,7 +7151,8 @@ static int MMCloseMMBD_XPFile(struct MiraMonVectLayerInfo *hMiraMonLayer, if (hMiraMonLayer->ReadOrWrite == MM_WRITING_MODE) { - if (!MMAdmDB->pFExtDBF) + if (!MMAdmDB->pMMBDXP || + (MMAdmDB->pMMBDXP && !MMAdmDB->pMMBDXP->pfDataBase)) { // In case of 0 elements created we have to // create an empty DBF @@ -7204,7 +7196,8 @@ static int MMCloseMMBD_XPFile(struct MiraMonVectLayerInfo *hMiraMonLayer, ret_code = 0; end_label: // Closing database files - fclose_and_nullify(&MMAdmDB->pFExtDBF); + if (MMAdmDB && MMAdmDB->pMMBDXP && MMAdmDB->pMMBDXP->pfDataBase) + fclose_and_nullify(&MMAdmDB->pMMBDXP->pfDataBase); return ret_code; } @@ -7215,7 +7208,7 @@ int MMCloseMMBD_XP(struct MiraMonVectLayerInfo *hMiraMonLayer) if (!hMiraMonLayer) return 1; - if (hMiraMonLayer->pMMBDXP) + if (hMiraMonLayer->pMMBDXP && hMiraMonLayer->pMMBDXP->pfDataBase) { fclose_and_nullify(&hMiraMonLayer->pMMBDXP->pfDataBase); } From 43daa61a4ac7d92a2d2731ae939f9878935a5f3f Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 29 Apr 2024 14:27:55 +0200 Subject: [PATCH 226/230] NEWS.md: update with 3.9.0beta2 [ci skip] --- NEWS.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/NEWS.md b/NEWS.md index 780ce09a1c09..7cf3486b1155 100644 --- a/NEWS.md +++ b/NEWS.md @@ -181,6 +181,7 @@ See [MIGRATION_GUIDE.TXT](https://github.com/OSGeo/gdal/blob/release/3.8/MIGRATI * Python sample scripts: add gdalbuildvrtofvrt.py (#9451) * Python utilities: do not display full traceback on OpenDS failures (#9534) * gdalinfo: suggest trying ogrinfo if appropriate, and vice-versa +* gdalinfo and ogrinfo -json: add newline character at end of JSON output ### Raster drivers @@ -278,6 +279,7 @@ Sentinel2 driver: * include 10m AOT and WVP bands in 10m subdataset (#9066) TileDB driver: + * Remove use of deprecated API, and bump minimum version to 2.15 * Added tiledb metadata fields to easily tag array type * be able to read datasets converted with 'tiledb-cf netcdf-convert' * make its identify() method more restrictive by not identifying /vsi file @@ -336,6 +338,7 @@ ZMap driver: GEOSGeom_setPrecision_r() * Add a OGRGeometry::roundCoordinates() method * OGRFeature: add SerializeToBinary() / DeserializeFromBinary() +* OGRFeature::SetField(): add warnings when detecting some lossy conversions (#9792) * Add OGR_G_GeodesicArea() / OGRSurface::get_GeodesicArea() * SQLite SQL dialect: implement ST_Area(geom, use_ellipsoid) * Add OGR_L_GetDataset() and implement GetDataset() in all drivers with creation @@ -343,12 +346,15 @@ ZMap driver: * Arrow array: fix decoding of ``date32[days]`` values before Epoch (Arrow->OGRFeature), and fix rounding when encoding such values (OGRFeature->Arrow) (#9636) +* OGRLayer::WriteArrowBatch(): add tolerance for field type mismatches if int32/int64/real; + Also add an option IF_FIELD_NOT_PRESERVED=ERROR to error out when lossy conversion occurs. (#9792) * OGRLayer::SetIgnoredFields(): make it take a CSLConstList argument instead of const char* ### OGRSpatialReference * Add OGRSpatialReference::exportToCF1() and importFromCF1() +* Add OSRIsDerivedProjected() / OGRSpatialReference::IsDerivedProjected() * OGRCoordinateTransformation::Transform(): change nCount parameter to size_t (C++ API only for now) (#9074) * OGRProjCT::TransformWithErrorCodes(): Improve performance of axis swapping @@ -358,6 +364,7 @@ ZMap driver: * Add OSRSetFromUserInputEx() and map it to SWIG (#9358) * Add std::string OGRSpatialReference::exportToWkt( const char* const* papszOptions = nullptr) const +* OGR_CT: use PROJJSON internally rather than in WKT:2019 (#9732) ### Utilities @@ -397,6 +404,7 @@ Arrow/Parquet drivers: pyarrow-registered extension type * handle fields with a pyarrow-registered extension type * preliminary/in-advance read support for future JSON Canonical Extension + * OGRCloneArrowArray(): add missing support for 'tss:' Arrow type CSV driver: * parse header with line breaks (#9172) @@ -473,6 +481,10 @@ Parquet driver: with a Parquet dataset source (#9497) * make it recognize bbox field from Overture Maps 2024-01-17-alpha.0 and 2024-04-16-beta.0 releases + * fix ResetReading() implementation, when using the ParquetDataset API and + when there's a single batch + * fix opening single file Parquet datasets with the ParquetDataset API when + using PARQUET:filename.parquet PGDUMP driver: * add a LAUNDER_ASCII=YES/NO (default NO) layer creation option @@ -515,6 +527,7 @@ Java bindings: Python bindings: * lots of improvements to documentation * add a pyproject.toml with numpy as a build requirement (#8926, #8069) + * pyproject.toml: use numpy>=2.0.0rc1 for python >=3.9 (#9751) * bump setuptools requirement to >= 67.0 * define entry_points.console_scripts (#8811) * add RasterAttributeTable::ReadValuesIOAsString, ReadValuesIOAsInteger, From 747681710dbdccc7306b8f6fff5c9d7deb040c64 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 12:29:54 +0000 Subject: [PATCH 227/230] Bump actions/checkout from 4.1.3 to 4.1.4 Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.3 to 4.1.4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/1d96c772d19495a3b5c517cd2bc0cb401ea0529f...0ad4b8fadaa221de15dcec353f45205ec38ea70b) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> --- .github/workflows/android_cmake.yml | 2 +- .github/workflows/clang_static_analyzer.yml | 2 +- .github/workflows/cmake_builds.yml | 12 ++++++------ .github/workflows/code_checks.yml | 14 +++++++------- .github/workflows/codeql.yml | 2 +- .github/workflows/conda.yml | 2 +- .github/workflows/coverity_scan.yml | 2 +- .github/workflows/doc_build.yml | 2 +- .github/workflows/linux_build.yml | 2 +- .github/workflows/macos.yml | 2 +- .github/workflows/scorecard.yml | 2 +- .github/workflows/slow_tests.yml | 2 +- .github/workflows/windows_build.yml | 2 +- 13 files changed, 24 insertions(+), 24 deletions(-) diff --git a/.github/workflows/android_cmake.yml b/.github/workflows/android_cmake.yml index 55a556debb9a..bfacb3a1d7e9 100644 --- a/.github/workflows/android_cmake.yml +++ b/.github/workflows/android_cmake.yml @@ -24,7 +24,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Cache uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 diff --git a/.github/workflows/clang_static_analyzer.yml b/.github/workflows/clang_static_analyzer.yml index 07965bbb2675..ad60b4728ddd 100644 --- a/.github/workflows/clang_static_analyzer.yml +++ b/.github/workflows/clang_static_analyzer.yml @@ -24,7 +24,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Run run: docker run --rm -v $PWD:$PWD ubuntu:22.04 sh -c "cd $PWD && apt update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends sudo software-properties-common && DEBIAN_FRONTEND=noninteractive sh ./ci/travis/csa_common/before_install.sh && sh ./ci/travis/csa_common/install.sh && sh ./ci/travis/csa_common/script.sh" diff --git a/.github/workflows/cmake_builds.yml b/.github/workflows/cmake_builds.yml index c80bb1483298..9578ef890eba 100644 --- a/.github/workflows/cmake_builds.yml +++ b/.github/workflows/cmake_builds.yml @@ -31,7 +31,7 @@ jobs: cache-name: cmake-ubuntu-focal steps: - name: Checkout GDAL - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Setup cache uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 id: cache @@ -311,7 +311,7 @@ jobs: run: | git config --global core.autocrlf false - name: Checkout GDAL - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Install development packages uses: msys2/setup-msys2@cc11e9188b693c2b100158c3322424c4cc1dadea # v2.22.0 with: @@ -404,7 +404,7 @@ jobs: run: | git config --global core.autocrlf false - name: Checkout GDAL - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - uses: ilammy/msvc-dev-cmd@0b201ec74fa43914dc39ae48a89fd1d8cb592756 # v1.13.0 - name: populate JAVA_HOME shell: pwsh @@ -506,7 +506,7 @@ jobs: run: | git config --global core.autocrlf false - name: Checkout GDAL - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 with: activate-environment: gdalenv @@ -573,7 +573,7 @@ jobs: with: xcode-version: 14.3 - name: Checkout GDAL - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Setup cache uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 id: cache @@ -653,7 +653,7 @@ jobs: run: | git config --global core.autocrlf false - name: Checkout GDAL - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 with: activate-environment: gdalenv diff --git a/.github/workflows/code_checks.yml b/.github/workflows/code_checks.yml index b11e0ceee661..d3adc4273709 100644 --- a/.github/workflows/code_checks.yml +++ b/.github/workflows/code_checks.yml @@ -24,7 +24,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Install Requirements run: | @@ -46,7 +46,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Detect tabulations run: ./scripts/detect_tabulations.sh @@ -81,7 +81,7 @@ jobs: linting: runs-on: ubuntu-latest steps: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 @@ -89,7 +89,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Install Requirements run: | @@ -106,7 +106,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Install Requirements run: | @@ -125,7 +125,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Set up Python uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: @@ -142,7 +142,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Install requirements run: | diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index b354dbf08ca0..3bd2f9a2ad5d 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -41,7 +41,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Install dependencies run: | diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index ba8e748a7d69..2818536a6697 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -37,7 +37,7 @@ jobs: CACHE_NUMBER: 0 steps: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Support longpaths run: git config --system core.longpaths true diff --git a/.github/workflows/coverity_scan.yml b/.github/workflows/coverity_scan.yml index e46b38d9a4ea..b1980c77bcfa 100644 --- a/.github/workflows/coverity_scan.yml +++ b/.github/workflows/coverity_scan.yml @@ -43,7 +43,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Login to GHCR if: env.CONTAINER_REGISTRY == 'ghcr.io' diff --git a/.github/workflows/doc_build.yml b/.github/workflows/doc_build.yml index 7b889e93ca06..73e4ace923ef 100644 --- a/.github/workflows/doc_build.yml +++ b/.github/workflows/doc_build.yml @@ -23,7 +23,7 @@ jobs: container: ghcr.io/osgeo/proj-docs steps: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Setup environment shell: bash -l {0} run: | diff --git a/.github/workflows/linux_build.yml b/.github/workflows/linux_build.yml index 87e427d77f30..d6c953cd045f 100644 --- a/.github/workflows/linux_build.yml +++ b/.github/workflows/linux_build.yml @@ -159,7 +159,7 @@ jobs: echo "CONTAINER_NAME_FULL=${CONTAINER_REGISTRY}/${CONTAINER_REGISTRY_USER,,}/${CONTAINER_NAME}:${CONTAINER_TAG_CLEAN}" >>${GITHUB_ENV} - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Login to Docker Hub if: env.CONTAINER_REGISTRY == 'docker.io' diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 2d7f4baa80cc..1b606f13430c 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -25,7 +25,7 @@ jobs: if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')" steps: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 with: diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index ef3649c918e6..53caa8f567a3 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -36,7 +36,7 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 with: persist-credentials: false diff --git a/.github/workflows/slow_tests.yml b/.github/workflows/slow_tests.yml index 2e0a82fb9989..40713a1f0bd0 100644 --- a/.github/workflows/slow_tests.yml +++ b/.github/workflows/slow_tests.yml @@ -47,7 +47,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Login to GHCR if: env.CONTAINER_REGISTRY == 'ghcr.io' diff --git a/.github/workflows/windows_build.yml b/.github/workflows/windows_build.yml index bba1e21e9b07..681a2626dc21 100644 --- a/.github/workflows/windows_build.yml +++ b/.github/workflows/windows_build.yml @@ -56,7 +56,7 @@ jobs: git config --global core.autocrlf false - name: Checkout - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: Set environment shell: pwsh From c73a15345f81a28533059422dbd6fe49774802f1 Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 29 Apr 2024 14:31:21 +0200 Subject: [PATCH 228/230] dependabot.yml: change interval to monthly --- .github/dependabot.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 5ace4600a1f2..8ac6b8c4984d 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -3,4 +3,4 @@ updates: - package-ecosystem: "github-actions" directory: "/" schedule: - interval: "weekly" + interval: "monthly" From 94c80fdb5659541b522896b094c732200044b17c Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Mon, 29 Apr 2024 18:00:07 +0200 Subject: [PATCH 229/230] gdal_viewshed, nearblack, ogr2ogr: call store_into() after default_value() In practice this doesn't matter given that we already set the default value in the bound variable of store_into(), but for clarity call first default_value() given that store_into() currently depends on default_value() being called first. --- apps/gdal_viewshed.cpp | 16 ++++++++-------- apps/nearblack_lib.cpp | 8 ++++---- apps/ogr2ogr_lib.cpp | 2 +- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/apps/gdal_viewshed.cpp b/apps/gdal_viewshed.cpp index ec3dfded7c37..8af666e0da20 100644 --- a/apps/gdal_viewshed.cpp +++ b/apps/gdal_viewshed.cpp @@ -81,8 +81,8 @@ MAIN_START(argc, argv) double dfObserverHeight = 2; argParser.add_argument("-oz") - .store_into(dfObserverHeight) .default_value(dfObserverHeight) + .store_into(dfObserverHeight) .metavar("<value>") .nargs(1) .help(_("The height of the observer above the DEM surface in the " @@ -90,24 +90,24 @@ MAIN_START(argc, argv) double dfVisibleVal = 255; argParser.add_argument("-vv") - .store_into(dfVisibleVal) .default_value(dfVisibleVal) + .store_into(dfVisibleVal) .metavar("<value>") .nargs(1) .help(_("Pixel value to set for visible areas.")); double dfInvisibleVal = 0.0; argParser.add_argument("-iv") - .store_into(dfInvisibleVal) .default_value(dfInvisibleVal) + .store_into(dfInvisibleVal) .metavar("<value>") .nargs(1) .help(_("Pixel value to set for invisible areas.")); double dfOutOfRangeVal = 0.0; argParser.add_argument("-ov") - .store_into(dfOutOfRangeVal) .default_value(dfOutOfRangeVal) + .store_into(dfOutOfRangeVal) .metavar("<value>") .nargs(1) .help( @@ -128,8 +128,8 @@ MAIN_START(argc, argv) double dfTargetHeight = 0.0; argParser.add_argument("-tz") - .store_into(dfTargetHeight) .default_value(dfTargetHeight) + .store_into(dfTargetHeight) .metavar("<value>") .nargs(1) .help(_("The height of the target above the DEM surface in the height " @@ -137,8 +137,8 @@ MAIN_START(argc, argv) double dfMaxDistance = 0.0; argParser.add_argument("-md") - .store_into(dfMaxDistance) .default_value(dfMaxDistance) + .store_into(dfMaxDistance) .metavar("<value>") .nargs(1) .help(_("Maximum distance from observer to compute visibility.")); @@ -147,8 +147,8 @@ MAIN_START(argc, argv) // doc/source/programs/gdal_viewshed.rst double dfCurvCoeff = 0.85714; argParser.add_argument("-cc") - .store_into(dfCurvCoeff) .default_value(dfCurvCoeff) + .store_into(dfCurvCoeff) .metavar("<value>") .nargs(1) .help(_("Coefficient to consider the effect of the curvature and " @@ -164,11 +164,11 @@ MAIN_START(argc, argv) std::string osOutputMode; argParser.add_argument("-om") - .store_into(osOutputMode) .choices("NORMAL", "DEM", "GROUND") .metavar("NORMAL|DEM|GROUND") .default_value("NORMAL") .nargs(1) + .store_into(osOutputMode) .help(_("Sets what information the output contains.")); bool bQuiet = false; diff --git a/apps/nearblack_lib.cpp b/apps/nearblack_lib.cpp index a20b6f128147..ff801ace9a99 100644 --- a/apps/nearblack_lib.cpp +++ b/apps/nearblack_lib.cpp @@ -835,17 +835,17 @@ GDALNearblackOptionsGetParser(GDALNearblackOptions *psOptions, } argParser->add_argument("-nb") - .store_into(psOptions->nMaxNonBlack) .metavar("<non_black_pixels>") - .default_value(psOptions->nMaxNonBlack) .nargs(1) + .default_value(psOptions->nMaxNonBlack) + .store_into(psOptions->nMaxNonBlack) .help(_("Number of consecutive non-black pixels.")); argParser->add_argument("-near") - .store_into(psOptions->nNearDist) .metavar("<dist>") - .default_value(psOptions->nNearDist) .nargs(1) + .default_value(psOptions->nNearDist) + .store_into(psOptions->nNearDist) .help(_("Select how far from black, white or custom colors the pixel " "values can be and still considered.")); diff --git a/apps/ogr2ogr_lib.cpp b/apps/ogr2ogr_lib.cpp index 3650db61ade0..4fa86207af85 100644 --- a/apps/ogr2ogr_lib.cpp +++ b/apps/ogr2ogr_lib.cpp @@ -7221,8 +7221,8 @@ static std::unique_ptr<GDALArgumentParser> GDALVectorTranslateOptionsGetParser( argParser->add_argument("-datelineoffset") .metavar("<val_in_degree>") - .store_into(psOptions->dfDateLineOffset) .default_value(psOptions->dfDateLineOffset) + .store_into(psOptions->dfDateLineOffset) .help(_("Offset from dateline in degrees.")); argParser->add_argument("-clipsrc") From e99530dfb3672b664de293874477addc061745be Mon Sep 17 00:00:00 2001 From: Even Rouault <even.rouault@spatialys.com> Date: Tue, 30 Apr 2024 12:44:31 +0200 Subject: [PATCH 230/230] python_bindings.rst: fix formatting --- doc/source/api/python_bindings.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/api/python_bindings.rst b/doc/source/api/python_bindings.rst index 0604335dddf9..3d8be0bf4a05 100644 --- a/doc/source/api/python_bindings.rst +++ b/doc/source/api/python_bindings.rst @@ -84,6 +84,7 @@ This is most often due to pip reusing a cached GDAL installation. Verify that the necessary dependencies have been installed and then run the following to force a clean build: :: + pip install --no-cache --force-reinstall gdal[numpy]=="$(gdal-config --version).*"