-
Notifications
You must be signed in to change notification settings - Fork 32
add a gaussian_filter and find_peaks for scalarfield and vectorfield #1120
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
9a3f6e7
4e21304
3053bdb
d3385c7
5b92f18
be9530f
91652be
ed5be94
78b22ee
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,170 @@ | ||
| import numpy as np | ||
|
|
||
| from pyphare.pharesee.hierarchy import ScalarField, VectorField | ||
Check noticeCode scanning / CodeQL Unused import Note
Import of 'ScalarField' is not used.
Import of 'VectorField' is not used. |
||
| from pyphare.pharesee.hierarchy.hierarchy_utils import compute_hier_from | ||
Check noticeCode scanning / CodeQL Unused import Note
Import of 'compute_hier_from' is not used.
|
||
|
|
||
| from scipy.ndimage import gaussian_filter | ||
|
|
||
|
|
||
|
|
||
| def _compute_gaussian_filter_on_scalarfield(patch_datas, **kwargs): | ||
| from scipy.ndimage import gaussian_filter | ||
|
|
||
| ndim = patch_datas["value"].box.ndim | ||
| nb_ghosts = kwargs["nb_ghosts"] | ||
| sigma = kwargs["sigma"] | ||
| ds = np.asarray(patch_datas["value"][:]) | ||
|
|
||
| ds_ = np.full(list(ds.shape), np.nan) | ||
|
|
||
| gf_ = gaussian_filter(ds, sigma=sigma) | ||
| select = tuple([slice(nb_ghosts, -nb_ghosts) for _ in range(ndim)]) | ||
| ds_[select] = np.asarray(gf_[select]) | ||
|
|
||
| return ( | ||
| {"name": "value", "data": ds_, "centering": patch_datas["value"].centerings}, | ||
| ) | ||
|
|
||
|
|
||
| def _compute_gaussian_filter_on_vectorfield(patch_datas, **kwargs): | ||
| from scipy.ndimage import gaussian_filter | ||
|
|
||
| ref_name = next(iter(patch_datas.keys())) | ||
|
|
||
| ndim = patch_datas[ref_name].box.ndim | ||
| nb_ghosts = kwargs["nb_ghosts"] | ||
| sigma = kwargs["sigma"] | ||
| ds_x = np.asarray(patch_datas["x"][:]) | ||
| ds_y = np.asarray(patch_datas["y"][:]) | ||
| ds_z = np.asarray(patch_datas["z"][:]) | ||
|
|
||
| dsx_ = np.full(list(ds_x.shape), np.nan) | ||
| dsy_ = np.full(list(ds_y.shape), np.nan) | ||
| dsz_ = np.full(list(ds_z.shape), np.nan) | ||
|
|
||
| gfx_ = gaussian_filter(ds_x, sigma=sigma) | ||
| gfy_ = gaussian_filter(ds_y, sigma=sigma) | ||
| gfz_ = gaussian_filter(ds_z, sigma=sigma) | ||
|
|
||
| select = tuple([slice(nb_ghosts, -nb_ghosts) for _ in range(ndim)]) | ||
|
|
||
| dsx_[select] = np.asarray(gfx_[select]) | ||
| dsy_[select] = np.asarray(gfy_[select]) | ||
| dsz_[select] = np.asarray(gfz_[select]) | ||
|
|
||
| return ( | ||
| {"name": "x", "data": dsx_, "centering": patch_datas["x"].centerings}, | ||
| {"name": "y", "data": dsy_, "centering": patch_datas["y"].centerings}, | ||
| {"name": "z", "data": dsz_, "centering": patch_datas["z"].centerings}, | ||
| ) | ||
|
|
||
|
|
||
| # def gFilt(hier, **kwargs): | ||
| # sigma = kwargs.get("sigma", 2) | ||
| # | ||
| # # time0 = list(hier.times())[0] | ||
| # # level0 = 0 | ||
| # # p0 = 0 | ||
| # # pd0 = hier.levels(time0)[level0].patches[p0].patch_datas | ||
| # # key0 = list(pd0.keys())[0] | ||
| # # nb_ghosts = np.max(pd0[key0].ghosts_nbr) | ||
| # | ||
| # nb_ghosts = np.max(list(hier.level(0).patches[0].patch_datas.values())[0].ghosts_nbr) | ||
| # | ||
| # if nb_ghosts < sigma : | ||
| # print("nb_ghosts ({0}) < sigma ({1}) : your gaussian filter might be dirty".format(nb_ghosts, sigma)) | ||
| # | ||
| # if hier.ndim == 1: | ||
| # if isinstance(hier, ScalarField) : | ||
| # h = compute_hier_from(_compute_gaussian_filter_on_scalarfield, hier, nb_ghosts=nb_ghosts, sigma=sigma) | ||
| # return ScalarField(h) | ||
| # elif isinstance(hier, VectorField) : | ||
| # h = compute_hier_from(_compute_gaussian_filter_on_vectorfield, hier, nb_ghosts=nb_ghosts, sigma=sigma) | ||
| # return VectorField(h) | ||
| # else: | ||
| # return NotImplemented | ||
| # else: | ||
| # return NotImplemented | ||
|
Comment on lines
+62
to
+87
Check noticeCode scanning / CodeQL Commented-out code Note
This comment appears to contain commented-out code.
|
||
|
|
||
|
|
||
| def gF(hier, **kwargs): | ||
| sigma = kwargs.get("sigma", 4) | ||
| if sigma == 1: | ||
| raise ValueError("sigma value has to be at least 2") | ||
| h_ = hier.__deepcopy__(memo={}) | ||
| ndim = hier.ndim | ||
| n_pad = 4*sigma+1 | ||
|
Comment on lines
+90
to
+96
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Sigma validation is incomplete and
Proposed fix def gF(hier, **kwargs):
sigma = kwargs.get("sigma", 4)
- if sigma == 1:
- raise ValueError("sigma value has to be at least 2")
+ if not np.isscalar(sigma):
+ raise TypeError("sigma must be a scalar value")
+ if sigma < 2:
+ raise ValueError("sigma value has to be at least 2")
h_ = hier.__deepcopy__(memo={})
ndim = hier.ndim
- n_pad = 4*sigma+1
+ n_pad = int(4*sigma+1)🧰 Tools🪛 Ruff (0.15.0)[warning] 93-93: Avoid specifying long messages outside the exception class (TRY003) 🤖 Prompt for AI Agents |
||
| # The gaussian filter is calculated on the box extended by | ||
| # n_pad. Hence, the number of points is large enough so that the value | ||
| # at the last point of the real box is as equal as possible to the one | ||
| # at the first point of the next box... | ||
|
|
||
| for time in h_.times(): | ||
| interp_ = hier.interpol(time) | ||
| for lvl in h_.levels(time).values(): | ||
| for patch in lvl.patches: | ||
| names = list(patch.patch_datas.keys()) | ||
| box = patch.box | ||
|
|
||
| for name in names: | ||
| pdata = patch.patch_datas[name] | ||
| nb_ghosts = pdata.ghosts_nbr | ||
| if not n_pad > nb_ghosts: | ||
| raise ValueError('sigma value is too small') | ||
|
Comment on lines
+112
to
+113
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Inverted guard condition: The check Consider clarifying the intent and using - if not n_pad > nb_ghosts:
- raise ValueError('sigma value is too small')
+ if np.any(n_pad <= nb_ghosts):
+ raise ValueError('sigma value is too small: n_pad must exceed nb_ghosts')🧰 Tools🪛 Ruff (0.15.0)[warning] 113-113: Avoid specifying long messages outside the exception class (TRY003) 🤖 Prompt for AI Agents |
||
|
|
||
| r_ = [] | ||
| for i in range(ndim): | ||
| s_ = np.arange(box.lower[i]-n_pad, box.upper[i]+2+n_pad)*pdata.dl[i] | ||
| r_.append(s_) | ||
|
|
||
| func, _ = interp_[name] | ||
|
|
||
| if ndim == 1: | ||
| data = func(r_[0]) | ||
| elif ndim == 2: | ||
| data = func(r_[0], r_[1]) | ||
| elif ndim == 3: | ||
| data = func(r_[0], r_[1], r_[2]) | ||
| else: | ||
| raise ValueError('unvalid dimension') | ||
|
|
||
|
coderabbitai[bot] marked this conversation as resolved.
|
||
| gf_ = gaussian_filter(data, sigma=sigma) | ||
| select = tuple([slice(n_pad-nb_ghosts[i], -n_pad+nb_ghosts[i]) for i in range(ndim)]) | ||
| pdata.dataset = np.asarray(gf_[select]) | ||
|
|
||
| return h_ | ||
|
|
||
|
|
||
| def peakIds(hier, **kwargs): | ||
| from scipy.signal import find_peaks | ||
|
|
||
| times = list(hier.times()) | ||
| if len(times) == 1: | ||
| time = times[0] | ||
| else: | ||
| raise ValueError('multiple time is not yet implemented') | ||
|
|
||
| # pks_ = np.array([]) | ||
| # hgs_ = np.array([]) | ||
|
|
||
| names_ = kwargs.pop("names", None) | ||
| if names_ is None: | ||
| names_ = list(hier.levels(time)[0].patches[0].patch_datas.keys()) | ||
|
|
||
| # ph_ = kwargs.get('peak_heights', None) | ||
| # if ph_ is None: | ||
| # raise ValueError("the kwarg 'peak_heights' is mandatory for now...") | ||
|
|
||
| for lvl in hier.levels(time).values(): | ||
| for patch in lvl.patches: | ||
| for name in names_: | ||
| pdata = patch.patch_datas[name] | ||
| ds = np.asarray(pdata.dataset) | ||
| # pks = find_peaks(ds, **kwargs) | ||
| # for pk, hg in zip(pks[0], pks[1]['peak_heights']): | ||
| # pks_ = np.append(pks_, np.add(np.multiply(pk, patch.dl), patch.origin)) | ||
| # hgs_ = np.append(hgs_, hg) | ||
|
Comment on lines
+164
to
+166
Check noticeCode scanning / CodeQL Commented-out code Note
This comment appears to contain commented-out code.
|
||
| out = find_peaks(ds, **kwargs) | ||
|
|
||
| # return pks_, hgs_ | ||
| return zip(*out) | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -438,8 +438,9 @@ | |
| label = "L{level}P{patch}".format(level=lvl_nbr, patch=ip) | ||
| marker = kwargs.get("marker", "") | ||
| ls = kwargs.get("ls", "--") | ||
| lw = kwargs.get("lw", 1) | ||
| color = kwargs.get("color", "k") | ||
| ax.plot(x, val, label=label, marker=marker, ls=ls, color=color) | ||
| ax.plot(x, val, label=label, marker=marker, ls=ls, lw=lw, color=color) | ||
|
|
||
| ax.set_title(kwargs.get("title", "")) | ||
| ax.set_xlabel(kwargs.get("xlabel", "x")) | ||
|
|
@@ -614,6 +615,128 @@ | |
|
|
||
| return final, dp(final, **kwargs) | ||
|
|
||
| def interpol(self, time, interp="nearest"): | ||
Check noticeCode scanning / CodeQL Cyclic import Note
Import of module
pyphare.pharesee.hierarchy.hierarchy_utils Error loading related location Loading
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ^^ |
||
| from pyphare.pharesee.hierarchy.hierarchy_utils import flat_finest_field | ||
| from pyphare.pharesee.run.utils import build_interpolator | ||
|
|
||
| nbrGhosts = list(self.level(0, time).patches[0].patch_datas.values())[0].ghosts_nbr | ||
|
|
||
| interp_ = {} | ||
| for qty in self.quantities(): | ||
| box = self.level(0, time).patches[0].patch_datas[qty].box | ||
| dl = self.level(0, time).patches[0].patch_datas[qty].dl | ||
| data, coords = flat_finest_field(self, qty, time=time) | ||
| interp_[qty] = build_interpolator(data, coords, interp, box, dl, qty, nbrGhosts) | ||
| return interp_ | ||
|
coderabbitai[bot] marked this conversation as resolved.
|
||
|
|
||
| def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): | ||
Check noticeCode scanning / CodeQL Explicit returns mixed with implicit (fall through) returns Note
Mixing implicit and explicit returns may indicate an error, as implicit returns always return None.
|
||
| print(f"__array_function__ of PatchHierarchy called for {ufunc.__name__}") | ||
| if method != "__call__": | ||
| return NotImplemented | ||
|
|
||
| final = [] | ||
|
|
||
| times = inputs[0].times() | ||
| for x in inputs: | ||
| assert(times == x.times()) | ||
| if not isinstance(x, PatchHierarchy): | ||
| raise TypeError("this arg should be a PatchHierarchy") | ||
|
|
||
|
|
||
| # for d in inputs[0].patch_levels: | ||
| # print(type(d), d) | ||
|
Comment on lines
+646
to
+647
Check noticeCode scanning / CodeQL Commented-out code Note
This comment appears to contain commented-out code.
|
||
|
|
||
|
|
||
| ils = [key for dict_ in inputs[0].patch_levels for key in dict_] | ||
| h_type = type(inputs[0]) | ||
|
|
||
| all_ = [] | ||
| for i in range(len(times)): | ||
| pls = [] | ||
| for x in inputs: | ||
| pls_ = [] | ||
| for plvl in x.patch_levels[i].values(): | ||
| pls_.append(plvl) | ||
| pls.append(pls_) | ||
|
|
||
| out = [getattr(ufunc, method)(*pl, **kwargs) for pl in zip(*pls)] | ||
|
|
||
| # out est une liste de liste de patchlevel : indice sur le levels (pour 1 temps donne) | ||
| # il faut les remettre dans un dict avec ilvl | ||
|
|
||
| final = {} | ||
| for il, pl in zip(ils, out): | ||
| final[il] = pl | ||
|
|
||
| all_.append(final) | ||
|
|
||
| h_ = PatchHierarchy(all_, | ||
| domain_box=self.domain_box, | ||
| refinement_box=self.refinement_ratio, | ||
| times=self.times(), | ||
| data_files=self.data_files, | ||
| selection_box=self.selection_box) | ||
|
|
||
| from .scalarfield import ScalarField | ||
Check noticeCode scanning / CodeQL Cyclic import Note
Import of module
pyphare.pharesee.hierarchy.scalarfield Error loading related location Loading |
||
| from .vectorfield import VectorField | ||
Check noticeCode scanning / CodeQL Cyclic import Note
Import of module
pyphare.pharesee.hierarchy.vectorfield Error loading related location Loading |
||
|
|
||
| if h_type is ScalarField: | ||
| return ScalarField(h_) | ||
| elif h_type is VectorField: | ||
| return VectorField(h_) | ||
|
|
||
| def __array_function__(self, func, types, args, kwargs): | ||
Check noticeCode scanning / CodeQL Explicit returns mixed with implicit (fall through) returns Note
Mixing implicit and explicit returns may indicate an error, as implicit returns always return None.
|
||
| print(f"__array_function__ of Patch {func.__name__} called for {[getattr(a, 'name', a) for a in args]}") | ||
|
|
||
| final = [] | ||
| others = [] | ||
|
|
||
| times = args[0].times() | ||
| for x in args: | ||
| if isinstance(x, PatchHierarchy): | ||
| assert(times == x.times()) | ||
| else: | ||
| others.append(x) | ||
|
|
||
| ils = [key for dict_ in args[0].patch_levels for key in dict_] | ||
| h_type = type(args[0]) | ||
|
|
||
|
|
||
| all_ = [] | ||
| for i in range(len(times)): | ||
| pls = [] | ||
| for x in args: | ||
| if isinstance(x, PatchHierarchy): | ||
| pls_ = [] | ||
| for plvl in x.patch_levels[i].values(): | ||
| pls_.append(plvl) | ||
| pls.append(pls_) | ||
|
|
||
| out = [] | ||
| for pl in zip(*pls): | ||
| out.append(func(*pl, *others, **kwargs)) | ||
|
|
||
| final = {} | ||
| for il, pl in zip(ils, out): | ||
| final[il] = pl | ||
|
|
||
| all_.append(final) | ||
|
|
||
| h_ = PatchHierarchy(all_, | ||
| domain_box=self.domain_box, | ||
| refinement_box=self.refinement_ratio, | ||
| times=self.times(), | ||
| data_files=self.data_files, | ||
| selection_box=self.selection_box) | ||
|
|
||
| from .scalarfield import ScalarField | ||
Check noticeCode scanning / CodeQL Cyclic import Note
Import of module
pyphare.pharesee.hierarchy.scalarfield Error loading related location Loading |
||
| from .vectorfield import VectorField | ||
Check noticeCode scanning / CodeQL Cyclic import Note
Import of module
pyphare.pharesee.hierarchy.vectorfield Error loading related location Loading |
||
|
|
||
| if h_type is ScalarField: | ||
| return ScalarField(h_) | ||
| elif h_type is VectorField: | ||
| return VectorField(h_) | ||
|
|
||
|
|
||
| def finest_part_data(hierarchy, time=None): | ||
| """ | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
is there something to do here ?