Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions pyphare/pyphare/core/gridlayout.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,10 @@
"Pyz": "primal",
"Pzz": "primal",
"tags": "dual",
"value": "primal",
"x": "primal",
"y": "primal",
"z": "primal",
},
"y": {
"Bx": "dual",
Expand Down Expand Up @@ -79,6 +83,10 @@
"Pyz": "primal",
"Pzz": "primal",
"tags": "dual",
"value": "primal",
"x": "primal",
"y": "primal",
"z": "primal",
},
"z": {
"Bx": "dual",
Expand Down Expand Up @@ -114,6 +122,10 @@
"Pyz": "primal",
"Pzz": "primal",
"tags": "dual",
"value": "primal",
"x": "primal",
"y": "primal",
"z": "primal",
},
}
yee_centering_lower = {
Expand Down
5 changes: 3 additions & 2 deletions pyphare/pyphare/core/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,12 @@ def _compute_dot_product(patch_datas, **kwargs):


def _compute_sqrt(patch_datas, **kwargs):
ref_name = next(iter(patch_datas.keys()))
# ref_name = next(iter(patch_datas.keys())) TODO this is always "value"
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

is there something to do here ?


dset = np.sqrt(patch_datas["value"][:])

return (
{"name": "value", "data": dset, "centering": patch_datas[ref_name].centerings},
{"name": "value", "data": dset, "centering": patch_datas["value"].centerings},
)


Expand Down Expand Up @@ -139,3 +139,4 @@ def grad(hier, **kwargs):
h = compute_hier_from(_compute_grad, hier, nb_ghosts=nb_ghosts)

return VectorField(h)

170 changes: 170 additions & 0 deletions pyphare/pyphare/core/ufuncs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
import numpy as np

from pyphare.pharesee.hierarchy import ScalarField, VectorField

Check notice

Code scanning / CodeQL

Unused import Note

Import of 'ScalarField' is not used.
Import of 'VectorField' is not used.
from pyphare.pharesee.hierarchy.hierarchy_utils import compute_hier_from

Check notice

Code scanning / CodeQL

Unused import Note

Import of 'compute_hier_from' is not used.

from scipy.ndimage import gaussian_filter



def _compute_gaussian_filter_on_scalarfield(patch_datas, **kwargs):
from scipy.ndimage import gaussian_filter

ndim = patch_datas["value"].box.ndim
nb_ghosts = kwargs["nb_ghosts"]
sigma = kwargs["sigma"]
ds = np.asarray(patch_datas["value"][:])

ds_ = np.full(list(ds.shape), np.nan)

gf_ = gaussian_filter(ds, sigma=sigma)
select = tuple([slice(nb_ghosts, -nb_ghosts) for _ in range(ndim)])
ds_[select] = np.asarray(gf_[select])

return (
{"name": "value", "data": ds_, "centering": patch_datas["value"].centerings},
)


def _compute_gaussian_filter_on_vectorfield(patch_datas, **kwargs):
from scipy.ndimage import gaussian_filter

ref_name = next(iter(patch_datas.keys()))

ndim = patch_datas[ref_name].box.ndim
nb_ghosts = kwargs["nb_ghosts"]
sigma = kwargs["sigma"]
ds_x = np.asarray(patch_datas["x"][:])
ds_y = np.asarray(patch_datas["y"][:])
ds_z = np.asarray(patch_datas["z"][:])

dsx_ = np.full(list(ds_x.shape), np.nan)
dsy_ = np.full(list(ds_y.shape), np.nan)
dsz_ = np.full(list(ds_z.shape), np.nan)

gfx_ = gaussian_filter(ds_x, sigma=sigma)
gfy_ = gaussian_filter(ds_y, sigma=sigma)
gfz_ = gaussian_filter(ds_z, sigma=sigma)

select = tuple([slice(nb_ghosts, -nb_ghosts) for _ in range(ndim)])

dsx_[select] = np.asarray(gfx_[select])
dsy_[select] = np.asarray(gfy_[select])
dsz_[select] = np.asarray(gfz_[select])

return (
{"name": "x", "data": dsx_, "centering": patch_datas["x"].centerings},
{"name": "y", "data": dsy_, "centering": patch_datas["y"].centerings},
{"name": "z", "data": dsz_, "centering": patch_datas["z"].centerings},
)


# def gFilt(hier, **kwargs):
# sigma = kwargs.get("sigma", 2)
#
# # time0 = list(hier.times())[0]
# # level0 = 0
# # p0 = 0
# # pd0 = hier.levels(time0)[level0].patches[p0].patch_datas
# # key0 = list(pd0.keys())[0]
# # nb_ghosts = np.max(pd0[key0].ghosts_nbr)
#
# nb_ghosts = np.max(list(hier.level(0).patches[0].patch_datas.values())[0].ghosts_nbr)
#
# if nb_ghosts < sigma :
# print("nb_ghosts ({0}) < sigma ({1}) : your gaussian filter might be dirty".format(nb_ghosts, sigma))
#
# if hier.ndim == 1:
# if isinstance(hier, ScalarField) :
# h = compute_hier_from(_compute_gaussian_filter_on_scalarfield, hier, nb_ghosts=nb_ghosts, sigma=sigma)
# return ScalarField(h)
# elif isinstance(hier, VectorField) :
# h = compute_hier_from(_compute_gaussian_filter_on_vectorfield, hier, nb_ghosts=nb_ghosts, sigma=sigma)
# return VectorField(h)
# else:
# return NotImplemented
# else:
# return NotImplemented
Comment on lines +62 to +87

Check notice

Code scanning / CodeQL

Commented-out code Note

This comment appears to contain commented-out code.


def gF(hier, **kwargs):
sigma = kwargs.get("sigma", 4)
if sigma == 1:
raise ValueError("sigma value has to be at least 2")
h_ = hier.__deepcopy__(memo={})
ndim = hier.ndim
n_pad = 4*sigma+1
Comment on lines +90 to +96
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Sigma validation is incomplete and n_pad may be a float.

  1. sigma == 1 only rejects exactly 1; any value in (1, 2) passes. The check should be sigma < 2.
  2. n_pad = 4*sigma+1 — if sigma is a float (e.g. 4.5), n_pad will be a float. This propagates to np.arange (fine) but also to the slice() calls at line 132, where float indices will raise a TypeError in NumPy.
Proposed fix
 def gF(hier, **kwargs):
     sigma = kwargs.get("sigma", 4)
-    if sigma == 1:
-        raise ValueError("sigma value has to be at least 2")
+    if not np.isscalar(sigma):
+        raise TypeError("sigma must be a scalar value")
+    if sigma < 2:
+        raise ValueError("sigma value has to be at least 2")
     h_ = hier.__deepcopy__(memo={})
     ndim = hier.ndim
-    n_pad = 4*sigma+1
+    n_pad = int(4*sigma+1)
🧰 Tools
🪛 Ruff (0.15.0)

[warning] 93-93: Avoid specifying long messages outside the exception class

(TRY003)

🤖 Prompt for AI Agents
In `@pyphare/pyphare/core/ufuncs.py` around lines 90 - 96, The sigma validation in
gF is too lax and n_pad can become a float; change the check from "if sigma ==
1" to "if sigma < 2" to reject values in (1,2), and ensure n_pad is an integer
by computing it as n_pad = int(4 * sigma + 1) (or validate/coerce sigma to an
integer >= 2 before use); update any uses of sigma/n_pad in gF accordingly so
slice() calls and numpy indexing receive integer indices.

# The gaussian filter is calculated on the box extended by
# n_pad. Hence, the number of points is large enough so that the value
# at the last point of the real box is as equal as possible to the one
# at the first point of the next box...

for time in h_.times():
interp_ = hier.interpol(time)
for lvl in h_.levels(time).values():
for patch in lvl.patches:
names = list(patch.patch_datas.keys())
box = patch.box

for name in names:
pdata = patch.patch_datas[name]
nb_ghosts = pdata.ghosts_nbr
if not n_pad > nb_ghosts:
raise ValueError('sigma value is too small')
Comment on lines +112 to +113
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Inverted guard condition: not n_pad > nb_ghosts allows n_pad == nb_ghosts.

The check not n_pad > nb_ghosts is equivalent to n_pad <= nb_ghosts, which raises when n_pad equals nb_ghosts. Given the comment at lines 97-100 about needing padding beyond the ghost region, the intent seems to be n_pad <= nb_ghosts should be fine and only n_pad < nb_ghosts is problematic. But more importantly, nb_ghosts here is pdata.ghosts_nbr which may be an array — this comparison will produce an array, and using it in a boolean context will fail for multi-dimensional cases.

Consider clarifying the intent and using np.all() for the array comparison:

-                    if not n_pad > nb_ghosts:
-                        raise ValueError('sigma value is too small')
+                    if np.any(n_pad <= nb_ghosts):
+                        raise ValueError('sigma value is too small: n_pad must exceed nb_ghosts')
🧰 Tools
🪛 Ruff (0.15.0)

[warning] 113-113: Avoid specifying long messages outside the exception class

(TRY003)

🤖 Prompt for AI Agents
In `@pyphare/pyphare/core/ufuncs.py` around lines 112 - 113, The boolean
comparison on n_pad vs nb_ghosts (where nb_ghosts is pdata.ghosts_nbr and may be
an array) is inverted and invalid for arrays; change the guard to explicitly
check elementwise and only raise when padding is strictly smaller than the ghost
region (e.g. use numpy: if np.any(n_pad < pdata.ghosts_nbr): raise
ValueError('sigma value is too small') ), referencing the variables n_pad and
pdata.ghosts_nbr so the array comparison uses np.any/np.all as appropriate for
the intended strictness.


r_ = []
for i in range(ndim):
s_ = np.arange(box.lower[i]-n_pad, box.upper[i]+2+n_pad)*pdata.dl[i]
r_.append(s_)

func, _ = interp_[name]

if ndim == 1:
data = func(r_[0])
elif ndim == 2:
data = func(r_[0], r_[1])
elif ndim == 3:
data = func(r_[0], r_[1], r_[2])
else:
raise ValueError('unvalid dimension')

Comment thread
coderabbitai[bot] marked this conversation as resolved.
gf_ = gaussian_filter(data, sigma=sigma)
select = tuple([slice(n_pad-nb_ghosts[i], -n_pad+nb_ghosts[i]) for i in range(ndim)])
pdata.dataset = np.asarray(gf_[select])

return h_


def peakIds(hier, **kwargs):
from scipy.signal import find_peaks

times = list(hier.times())
if len(times) == 1:
time = times[0]
else:
raise ValueError('multiple time is not yet implemented')

# pks_ = np.array([])
# hgs_ = np.array([])

names_ = kwargs.pop("names", None)
if names_ is None:
names_ = list(hier.levels(time)[0].patches[0].patch_datas.keys())

# ph_ = kwargs.get('peak_heights', None)
# if ph_ is None:
# raise ValueError("the kwarg 'peak_heights' is mandatory for now...")

for lvl in hier.levels(time).values():
for patch in lvl.patches:
for name in names_:
pdata = patch.patch_datas[name]
ds = np.asarray(pdata.dataset)
# pks = find_peaks(ds, **kwargs)
# for pk, hg in zip(pks[0], pks[1]['peak_heights']):
# pks_ = np.append(pks_, np.add(np.multiply(pk, patch.dl), patch.origin))
# hgs_ = np.append(hgs_, hg)
Comment on lines +164 to +166

Check notice

Code scanning / CodeQL

Commented-out code Note

This comment appears to contain commented-out code.
out = find_peaks(ds, **kwargs)

# return pks_, hgs_
return zip(*out)
125 changes: 124 additions & 1 deletion pyphare/pyphare/pharesee/hierarchy/hierarchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,8 +438,9 @@
label = "L{level}P{patch}".format(level=lvl_nbr, patch=ip)
marker = kwargs.get("marker", "")
ls = kwargs.get("ls", "--")
lw = kwargs.get("lw", 1)
color = kwargs.get("color", "k")
ax.plot(x, val, label=label, marker=marker, ls=ls, color=color)
ax.plot(x, val, label=label, marker=marker, ls=ls, lw=lw, color=color)

ax.set_title(kwargs.get("title", ""))
ax.set_xlabel(kwargs.get("xlabel", "x"))
Expand Down Expand Up @@ -614,6 +615,128 @@

return final, dp(final, **kwargs)

def interpol(self, time, interp="nearest"):

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
pyphare.pharesee.hierarchy.hierarchy_utils
begins an import cycle.
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

^^

from pyphare.pharesee.hierarchy.hierarchy_utils import flat_finest_field
from pyphare.pharesee.run.utils import build_interpolator

nbrGhosts = list(self.level(0, time).patches[0].patch_datas.values())[0].ghosts_nbr

interp_ = {}
for qty in self.quantities():
box = self.level(0, time).patches[0].patch_datas[qty].box
dl = self.level(0, time).patches[0].patch_datas[qty].dl
data, coords = flat_finest_field(self, qty, time=time)
interp_[qty] = build_interpolator(data, coords, interp, box, dl, qty, nbrGhosts)
return interp_
Comment thread
coderabbitai[bot] marked this conversation as resolved.

def __array_ufunc__(self, ufunc, method, *inputs, **kwargs):

Check notice

Code scanning / CodeQL

Explicit returns mixed with implicit (fall through) returns Note

Mixing implicit and explicit returns may indicate an error, as implicit returns always return None.
print(f"__array_function__ of PatchHierarchy called for {ufunc.__name__}")
if method != "__call__":
return NotImplemented

final = []

times = inputs[0].times()
for x in inputs:
assert(times == x.times())
if not isinstance(x, PatchHierarchy):
raise TypeError("this arg should be a PatchHierarchy")


# for d in inputs[0].patch_levels:
# print(type(d), d)
Comment on lines +646 to +647

Check notice

Code scanning / CodeQL

Commented-out code Note

This comment appears to contain commented-out code.


ils = [key for dict_ in inputs[0].patch_levels for key in dict_]
h_type = type(inputs[0])

all_ = []
for i in range(len(times)):
pls = []
for x in inputs:
pls_ = []
for plvl in x.patch_levels[i].values():
pls_.append(plvl)
pls.append(pls_)

out = [getattr(ufunc, method)(*pl, **kwargs) for pl in zip(*pls)]

# out est une liste de liste de patchlevel : indice sur le levels (pour 1 temps donne)
# il faut les remettre dans un dict avec ilvl

final = {}
for il, pl in zip(ils, out):
final[il] = pl

all_.append(final)

h_ = PatchHierarchy(all_,
domain_box=self.domain_box,
refinement_box=self.refinement_ratio,
times=self.times(),
data_files=self.data_files,
selection_box=self.selection_box)

from .scalarfield import ScalarField

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
pyphare.pharesee.hierarchy.scalarfield
begins an import cycle.
from .vectorfield import VectorField

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
pyphare.pharesee.hierarchy.vectorfield
begins an import cycle.

if h_type is ScalarField:
return ScalarField(h_)
elif h_type is VectorField:
return VectorField(h_)

def __array_function__(self, func, types, args, kwargs):

Check notice

Code scanning / CodeQL

Explicit returns mixed with implicit (fall through) returns Note

Mixing implicit and explicit returns may indicate an error, as implicit returns always return None.
print(f"__array_function__ of Patch {func.__name__} called for {[getattr(a, 'name', a) for a in args]}")

final = []
others = []

times = args[0].times()
for x in args:
if isinstance(x, PatchHierarchy):
assert(times == x.times())
else:
others.append(x)

ils = [key for dict_ in args[0].patch_levels for key in dict_]
h_type = type(args[0])


all_ = []
for i in range(len(times)):
pls = []
for x in args:
if isinstance(x, PatchHierarchy):
pls_ = []
for plvl in x.patch_levels[i].values():
pls_.append(plvl)
pls.append(pls_)

out = []
for pl in zip(*pls):
out.append(func(*pl, *others, **kwargs))

final = {}
for il, pl in zip(ils, out):
final[il] = pl

all_.append(final)

h_ = PatchHierarchy(all_,
domain_box=self.domain_box,
refinement_box=self.refinement_ratio,
times=self.times(),
data_files=self.data_files,
selection_box=self.selection_box)

from .scalarfield import ScalarField

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
pyphare.pharesee.hierarchy.scalarfield
begins an import cycle.
from .vectorfield import VectorField

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
pyphare.pharesee.hierarchy.vectorfield
begins an import cycle.

if h_type is ScalarField:
return ScalarField(h_)
elif h_type is VectorField:
return VectorField(h_)


def finest_part_data(hierarchy, time=None):
"""
Expand Down
Loading
Loading