Skip to content

Commit f43f342

Browse files
committed
feat(rasterUtils): add optional compression parameter for raster writing
- Introduced `useCompression` parameter to `writeResultToRaster` for configurable compression of `GTiff` files. - Defaulted the parameter to `True` for LZW compression refactor(rasterUtils): simplify raster file writing logic and enforce driver validation; fixes #1163 - Added validation to ensure only supported drivers (`AAIGrid`, `GTiff`) are used. - Utilized `with` statement to handle file writing safely and cleanly. - Implemented LZW compression for `GTiff` raster outputs to optimize file size. - Refactored file writing logic to support driver-specific configurations.
1 parent 21e4b94 commit f43f342

File tree

4 files changed

+51
-44
lines changed

4 files changed

+51
-44
lines changed

avaframe/com1DFA/com1DFA.py

Lines changed: 19 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2017,8 +2017,7 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si
20172017
# make sure to save all desiered resuts for first and last time step for
20182018
# the report
20192019
resTypesReport = fU.splitIniValueToArraySteps(cfg["REPORT"]["plotFields"])
2020-
# always add particles to first and last time step
2021-
resTypesLast = list(set(resTypes + resTypesReport + ["particles"]))
2020+
resTypesLast = list(set(resTypes + resTypesReport))
20222021
# derive friction type
20232022
# turn friction model into integer
20242023
frictModelsList = [
@@ -2059,7 +2058,7 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si
20592058
# setup a result fields info data frame to save max values of fields and avalanche front
20602059
resultsDF = setupresultsDF(resTypesLast, cfg["VISUALISATION"].getboolean("createRangeTimeDiagram"))
20612060

2062-
# TODO: add here different time stepping options
2061+
# Add different time stepping options here
20632062
log.debug("Use standard time stepping")
20642063
# Initialize time and counters
20652064
nSave = 1
@@ -2074,6 +2073,12 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si
20742073
# export initial time step
20752074
if cfg["EXPORTS"].getboolean("exportData"):
20762075
exportFields(cfg, t, fields, dem, outDir, cuSimName, TSave="initial")
2076+
2077+
if "particles" in resTypes:
2078+
outDirData = outDir / "particles"
2079+
fU.makeADir(outDirData)
2080+
savePartToPickle(particles, outDirData, cuSimName)
2081+
20772082
# export particles properties for visulation
20782083
if cfg["VISUALISATION"].getboolean("writePartToCSV"):
20792084
particleTools.savePartToCsv(
@@ -2085,10 +2090,6 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si
20852090
countParticleCsv = countParticleCsv + 1
20862091

20872092
# export particles dictionaries of saving time steps
2088-
# (if particles is not in resType, only first and last time step are saved)
2089-
outDirData = outDir / "particles"
2090-
fU.makeADir(outDirData)
2091-
savePartToPickle(particles, outDirData, cuSimName)
20922093

20932094
zPartArray0 = copy.deepcopy(particles["z"])
20942095

@@ -2187,7 +2188,8 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si
21872188
exportFields(cfg, t, fields, dem, outDir, cuSimName, TSave="intermediate")
21882189

21892190
# export particles dictionaries of saving time steps
2190-
savePartToPickle(particles, outDirData, cuSimName)
2191+
if "particles" in resTypes:
2192+
savePartToPickle(particles, outDirData, cuSimName)
21912193

21922194
# export particles properties for visulation
21932195
if cfg["VISUALISATION"].getboolean("writePartToCSV"):
@@ -2315,7 +2317,8 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si
23152317
exportFields(cfg, t, fields, dem, outDir, cuSimName, TSave="final")
23162318

23172319
# export particles dictionaries of saving time steps
2318-
savePartToPickle(particles, outDirData, cuSimName)
2320+
if "particles" in resTypes:
2321+
savePartToPickle(particles, outDirData, cuSimName)
23192322
else:
23202323
# fetch contourline info
23212324
contourDictXY = outCom1DFA.fetchContCoors(
@@ -2371,7 +2374,7 @@ def setupresultsDF(resTypes, cfgRangeTime):
23712374
resultsDF: dataframe
23722375
data frame with on line for iniital time step and max and mean values of fields
23732376
"""
2374-
2377+
# TODO catch empty resTypes
23752378
resDict = {"timeStep": [0.0]}
23762379
for resT in resTypes:
23772380
if resT != "particles" and resT != "FTDet":
@@ -2968,12 +2971,18 @@ def exportFields(
29682971
# convert from J/cell to kJ/m²
29692972
# (by dividing the peak kinetic energy per cell by the real area of the cell)
29702973
resField = resField * 0.001 / dem["areaRaster"]
2974+
29712975
dataName = cuSimName + "_" + resType + "_" + "t%.2f" % (timeStep)
29722976
# create directory
29732977
outDirPeak = outDir / "peakFiles" / "timeSteps"
29742978
fU.makeADir(outDirPeak)
29752979
outFile = outDirPeak / dataName
29762980
IOf.writeResultToRaster(dem["originalHeader"], resField, outFile, flip=True)
2981+
log.debug(
2982+
"Results parameter: %s has been exported to Outputs/peakFiles for time step: %.2f "
2983+
% (resType, timeStep)
2984+
)
2985+
29772986
if TSave == "final":
29782987
log.debug(
29792988
"Results parameter: %s exported to Outputs/peakFiles for time step: %.2f - FINAL time step "
@@ -2985,11 +2994,6 @@ def exportFields(
29852994
fU.makeADir(outDirPeakAll)
29862995
outFile = outDirPeakAll / dataName
29872996
IOf.writeResultToRaster(dem["originalHeader"], resField, outFile, flip=True)
2988-
else:
2989-
log.debug(
2990-
"Results parameter: %s has been exported to Outputs/peakFiles for time step: %.2f "
2991-
% (resType, timeStep)
2992-
)
29932997

29942998

29952999
def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting="", module=com1DFA):

avaframe/com1DFA/com1DFACfg.ini

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ modelType = dfa
1313
#+++++++++++++ Output++++++++++++
1414
# desired result Parameters (ppr, pft, pfv, pta, FT, FV, P, FM, Vx, Vy, Vz, TA, dmDet, sfcChange, demAdapted, particles) - separated by |
1515
resType = ppr|pft|pfv
16+
1617
# saving time step, i.e. time in seconds (first and last time step are always saved)
1718
# option 1: give an interval with start:interval in seconds (tStep = 0:5 - this will save desired results every 5 seconds for the full simulation)
1819
# option 2: explicitly list all desired time steps (closest to actual computational time step) separated by | (example tSteps = 1|50.2|100)

avaframe/in2Trans/rasterUtils.py

Lines changed: 30 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""
2-
Raster (ascii and tif) file reader and handler
2+
Raster (ascii and tif) file reader and handler
33

44
"""
55

@@ -150,7 +150,7 @@ def isEqualASCheader(headerA, headerB):
150150
)
151151

152152

153-
def writeResultToRaster(header, resultArray, outFileName, flip=False):
153+
def writeResultToRaster(header, resultArray, outFileName, flip=False, useCompression=True):
154154
"""Write 2D array to a raster file with header and save to location of outFileName
155155

156156
Parameters
@@ -165,39 +165,41 @@ class with methods that give cellsize, nrows, ncols, xllcenter
165165
flip: boolean
166166
if True, flip the rows of the resultArray when writing. AF considers the first line in a data array to be the
167167
southernmost one. Some formats (e.g. tif) have the northernmost line first
168+
useCompression: boolean
169+
True if compression should be used on writing tiff files (lzw)
170+
168171

169172
Returns
170173
-------
171174
outFile: path
172175
to file being written
173176
"""
174177

175-
if header["driver"] == "AAIGrid":
176-
outFile = outFileName.parent / (outFileName.name + ".asc")
177-
elif header["driver"] == "GTiff":
178-
outFile = outFileName.parent / (outFileName.name + ".tif")
179-
180-
# try:
181-
rasterOut = rasterio.open(
182-
outFile,
183-
"w",
184-
driver=header["driver"],
185-
crs=header["crs"],
186-
nodata=header["nodata_value"],
187-
transform=header["transform"],
188-
height=resultArray.shape[0],
189-
width=resultArray.shape[1],
190-
count=1,
191-
dtype=resultArray.dtype,
192-
# decimal_precision=3,
193-
)
194-
if flip:
195-
rasterOut.write(np.flipud(resultArray), 1)
196-
else:
197-
rasterOut.write(resultArray, 1)
198-
rasterOut.close()
199-
# except:
200-
# log.error("could not write {} to {}".format(resultArray, outFileName))
178+
driver = header["driver"]
179+
if driver not in ("AAIGrid", "GTiff"):
180+
raise ValueError(f"Unsupported driver: {driver}")
181+
182+
extMap = {"AAIGrid": ".asc", "GTiff": ".tif"}
183+
outFile = outFileName.parent / (outFileName.name + extMap[driver])
184+
185+
commonKwargs = {
186+
"driver": driver,
187+
"crs": header["crs"],
188+
"nodata": header["nodata_value"],
189+
"transform": header["transform"],
190+
"height": resultArray.shape[0],
191+
"width": resultArray.shape[1],
192+
"count": 1,
193+
"dtype": resultArray.dtype,
194+
}
195+
196+
if useCompression:
197+
extraKwargs = {"compress": "lzw"} if driver == "GTiff" else {}
198+
199+
with rasterio.open(outFile, "w", **commonKwargs, **extraKwargs) as rasterOut:
200+
data = np.flipud(resultArray) if flip else resultArray
201+
rasterOut.write(data, 1)
202+
201203
return outFile
202204

203205

avaframe/in3Utils/fileHandlerUtils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,7 @@ def splitTimeValueToArrayInterval(cfgValues, endTime):
368368
Returns
369369
--------
370370
items : 1D numpy array
371-
time step values as 1D numpy array
371+
sorted time step values as 1D numpy array
372372
"""
373373

374374
if ":" in cfgValues:

0 commit comments

Comments
 (0)