Skip to content

Commit 6a2dfe4

Browse files
Fix VTK export bounds error in AMR interpolation
- Fix BoundsError when accessing data arrays during multi-level interpolation - Add safe log10 operations for scalar and vector data
1 parent d6710aa commit 6a2dfe4

File tree

1 file changed

+99
-10
lines changed

1 file changed

+99
-10
lines changed

src/functions/data/export_hydro_to_vtk.jl

Lines changed: 99 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -113,15 +113,41 @@ function export_vtk(
113113
# Helper function to interpolate fine cells to a coarser grid at level L
114114
function interpolate_to_level_coarse(xa, ya, za, sdata, vdata, L::Int)
115115
cs = boxlen / 2^L
116-
# Map fine cell coordinates to coarse grid indices
117-
coarse_idx = [(fld(xa[i], cs), fld(ya[i], cs), fld(za[i], cs)) for i in eachindex(xa)]
116+
117+
# Verify data consistency before proceeding
118+
ncoords = length(xa)
119+
if length(ya) != ncoords || length(za) != ncoords
120+
error("Coordinate arrays have mismatched lengths: x=$(length(xa)), y=$(length(ya)), z=$(length(za))")
121+
end
122+
123+
# Check that scalar data arrays have consistent sizes
124+
scalar_sizes = [length(sdata[s]) for s in scalars if haskey(sdata, s)]
125+
if !isempty(scalar_sizes)
126+
min_size = minimum(scalar_sizes)
127+
max_size = maximum(scalar_sizes)
128+
if min_size != max_size
129+
verbose && println(" Warning: Scalar data arrays have different sizes: min=$min_size, max=$max_size")
130+
end
131+
data_size = min_size # Use minimum size for safety
132+
if data_size != ncoords
133+
verbose && println(" Warning: Data size ($data_size) differs from coordinate size ($ncoords), using min($data_size, $ncoords)")
134+
safe_size = min(data_size, ncoords)
135+
else
136+
safe_size = ncoords
137+
end
138+
else
139+
safe_size = ncoords
140+
end
141+
142+
# Map fine cell coordinates to coarse grid indices (only for valid indices)
143+
coarse_idx = [(fld(xa[i], cs), fld(ya[i], cs), fld(za[i], cs)) for i in 1:safe_size]
118144
# Group fine cells by their corresponding coarse cell index
119145
idx_map = Dict{NTuple{3,Int}, Vector{Int}}()
120146
for (i, cidx) in enumerate(coarse_idx)
121147
push!(get!(idx_map, cidx, Int[]), i)
122148
end
123149
N = length(idx_map)
124-
verbose && println(" Unique coarse cells at level $L: $N (out of max $(Int(2^L)^3))")
150+
verbose && println(" Unique coarse cells at level $L: $N (out of max $(Int(2^L)^3)) [safe_size: $safe_size]")
125151
#verbose && println(" Expected file size (uncompressed): ~$(round(N * 300 / 1024^3, digits=2)) GB")
126152

127153
# Limit output cells if exceeding max_cells, prioritizing denser regions
@@ -149,17 +175,38 @@ function export_vtk(
149175
# Set coarse cell center coordinates
150176
x2[i], y2[i], z2[i] = (gx + 0.5) * cs, (gy + 0.5) * cs, (gz + 0.5) * cs
151177
idxs = idx_map[pts[i]]
152-
inv = 1.0 / length(idxs)
178+
179+
# Filter indices to ensure they're within bounds of data arrays
180+
valid_idxs = filter(j -> j <= safe_size, idxs)
181+
if length(valid_idxs) != length(idxs)
182+
verbose && println(" Warning: Filtered $(length(idxs) - length(valid_idxs)) out-of-bounds indices for coarse cell ($gx,$gy,$gz)")
183+
end
184+
185+
inv = 1.0 / length(valid_idxs)
153186
# Average scalar data over fine cells in this coarse cell
154187
for s in scalars
155-
sumv = 0.0; @inbounds for j in idxs sumv += sdata[s][j] end
156-
s2[s][i] = sumv * inv
188+
if haskey(sdata, s) && length(sdata[s]) >= safe_size
189+
sumv = 0.0
190+
for j in valid_idxs
191+
sumv += sdata[s][j]
192+
end
193+
s2[s][i] = sumv * inv
194+
else
195+
s2[s][i] = 0.0 # Default value for missing data
196+
end
157197
end
158198
# Average vector data if requested
159199
if export_vector
160200
for v in vector
161-
sumv = 0.0; @inbounds for j in idxs sumv += vdata[v][j] end
162-
v2[v][i] = sumv * inv
201+
if haskey(vdata, v) && length(vdata[v]) >= safe_size
202+
sumv = 0.0
203+
for j in valid_idxs
204+
sumv += vdata[v][j]
205+
end
206+
v2[v][i] = sumv * inv
207+
else
208+
v2[v][i] = 0.0 # Default value for missing data
209+
end
163210
end
164211
end
165212
end
@@ -189,7 +236,28 @@ function export_vtk(
189236
sdata = Dict{Symbol, Vector{Float64}}()
190237
for (s, sunit) in zip(scalars, scalars_unit)
191238
if scalars_log10
192-
arr = log10.( getvar(dataobject, s, sunit, mask=mask) )
239+
raw_arr = getvar(dataobject, s, sunit, mask=mask)
240+
if raw_arr !== nothing
241+
# Safe log10: handle negative and zero values
242+
safe_arr = similar(raw_arr, Float64)
243+
for i in eachindex(raw_arr)
244+
val = raw_arr[i]
245+
if val > 0
246+
safe_arr[i] = log10(val)
247+
elseif val == 0
248+
safe_arr[i] = -30.0 # Very small log value instead of -Inf
249+
else
250+
# For negative values, use log10 of absolute value with warning
251+
safe_arr[i] = log10(abs(val))
252+
if verbose && i <= 5 # Only warn for first few negative values
253+
println(" Warning: Negative value ($val) encountered for $s, using log10(abs(val))")
254+
end
255+
end
256+
end
257+
arr = safe_arr
258+
else
259+
arr = nothing
260+
end
193261
else
194262
arr = getvar(dataobject, s, sunit, mask=mask)
195263
end
@@ -201,7 +269,28 @@ function export_vtk(
201269
if export_vector
202270
for v in vector
203271
if vector_log10
204-
arr = log10.( getvar(dataobject, v, vector_unit, mask=mask) )
272+
raw_arr = getvar(dataobject, v, vector_unit, mask=mask)
273+
if raw_arr !== nothing
274+
# Safe log10: handle negative and zero values
275+
safe_arr = similar(raw_arr, Float64)
276+
for i in eachindex(raw_arr)
277+
val = raw_arr[i]
278+
if val > 0
279+
safe_arr[i] = log10(val)
280+
elseif val == 0
281+
safe_arr[i] = -30.0 # Very small log value instead of -Inf
282+
else
283+
# For negative values, use log10 of absolute value
284+
safe_arr[i] = log10(abs(val))
285+
if verbose && i <= 5 # Only warn for first few negative values
286+
println(" Warning: Negative value ($val) encountered for $v, using log10(abs(val))")
287+
end
288+
end
289+
end
290+
arr = safe_arr
291+
else
292+
arr = nothing
293+
end
205294
else
206295
arr = getvar(dataobject, v, vector_unit, mask=mask)
207296
end

0 commit comments

Comments
 (0)