Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions pyphare/pyphare/core/gridlayout.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
"Pyz": "primal",
"Pzz": "primal",
"tags": "dual",
"value": "primal",
},
"y": {
"Bx": "dual",
Expand Down Expand Up @@ -79,6 +80,7 @@
"Pyz": "primal",
"Pzz": "primal",
"tags": "dual",
"value": "primal",
},
"z": {
"Bx": "dual",
Expand Down Expand Up @@ -114,6 +116,7 @@
"Pyz": "primal",
"Pzz": "primal",
"tags": "dual",
"value": "primal",
},
}
yee_centering_lower = {
Expand Down
6 changes: 3 additions & 3 deletions pyphare/pyphare/pharein/simulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,10 +420,10 @@ def get_max_ghosts():
largest_patch_size = kwargs.get("largest_patch_size", None)

# to prevent primal ghost box overlaps of non adjacent patches, we need smallest_patch_size * 2 + 1
smallest_patch_size = phare_utilities.np_array_ify(max_ghosts, ndim) * 2 + 1
smallest_patch_size = phare_utilities.np_array_ify(max_ghosts, ndim) + 1
# TORM next lines after https://github.com/llnl/SAMRAI/issues/311
min_per_interp = [6, 9, 9] # SAMRAI BORDER BUG
smallest_patch_size = phare_utilities.np_array_ify(min_per_interp[interp - 1], ndim)
# min_per_interp = [3, 5, 5] # SAMRAI BORDER BUG
# smallest_patch_size = phare_utilities.np_array_ify(min_per_interp[interp - 1], ndim)
if "smallest_patch_size" in kwargs and kwargs["smallest_patch_size"] is not None:
smallest_patch_size = phare_utilities.np_array_ify(
kwargs["smallest_patch_size"], ndim
Expand Down
3 changes: 2 additions & 1 deletion pyphare/pyphare/pharesee/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from ..core import box as boxm
from .hierarchy.patchdata import FieldData
from .hierarchy.hierarchy_utils import is_root_lvl

from pyphare.core.phare_utilities import listify, is_scalar

Expand Down Expand Up @@ -342,6 +341,8 @@ def level_ghost_boxes(hierarchy, quantities, levelNbrs=[], time=None):
levelNbrs : limit working set of hierarchy levels to those requested, if scalar, returns just that level
time : the simulation time to access the appropriate data for the requested time
"""
from .hierarchy.hierarchy_utils import is_root_lvl # avoid cyclic imports

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
pyphare.pharesee.hierarchy.hierarchy_utils
begins an import cycle.

quantities = listify(quantities)

levelNbrs_is_scalar = is_scalar(levelNbrs)
Expand Down
173 changes: 168 additions & 5 deletions pyphare/pyphare/pharesee/hierarchy/hierarchy_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,10 +147,10 @@
patch_levels_per_time = []
for t in reference_hier.times():
patch_levels = {}
for ilvl in range(reference_hier.levelNbr()):
patch_levels[ilvl] = PatchLevel(
ilvl, new_patches_from(compute, hierarchies, ilvl, t, **kwargs)
)
for ilvl in reference_hier.levels(t).keys():
patches = new_patches_from(compute, hierarchies, ilvl, t, **kwargs)
if patches:
patch_levels[ilvl] = PatchLevel(ilvl, patches)
patch_levels_per_time.append(patch_levels)
return PatchHierarchy(
patch_levels_per_time,
Expand Down Expand Up @@ -182,6 +182,9 @@

def new_patches_from(compute, hierarchies, ilvl, t, **kwargs):
reference_hier = hierarchies[0]
if ilvl not in reference_hier.levels(t):
return []

new_patches = []
ref_patches = reference_hier.level(ilvl, time=t).patches
for ip, current_patch in enumerate(ref_patches):
Expand Down Expand Up @@ -596,7 +599,7 @@
phut.assert_fp_any_all_close(ref[:], cmp[:], atol=1e-16)
except AssertionError as e:
print(e)
return self.failed[0][0]
return self.failed[0][0] if self.failed else "=="

def __call__(self, reason, ref=None, cmp=None):
self.failed.append((reason, ref, cmp))
Expand All @@ -612,6 +615,66 @@
return reversed(self.failed)


def overlap_diff_hierarchy(hier, time):
"""
This function creates a hierarchy whose values are the maximum difference
there can be for each node existing in multiple patches's ghost boxes
"""
import pyphare.core.box as boxm
from pyphare.pharesee.geometry import hierarchy_overlaps
Comment thread
PhilipDeegan marked this conversation as resolved.
Comment thread
github-advanced-security[bot] marked this conversation as resolved.
Fixed

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
pyphare.pharesee.geometry
begins an import cycle.

diff_hier = zeros_like(hier, time=time)

def diff_patch_for(box, ilvl):
for patch in diff_hier.levels(time)[ilvl]:
if patch.box == box:
return patch
raise RuntimeError("Patch not found")

found = 0
for ilvl, overlaps in hierarchy_overlaps(hier, time).items():
for overlap in overlaps:
pd1, pd2 = overlap["pdatas"]
ovrlp_box = overlap["box"]
offsets = overlap["offset"]
patch0, patch1 = overlap["patches"]
name = overlap["name"]

box_pd1 = boxm.amr_to_local(
ovrlp_box, boxm.shift(pd1.ghost_box, offsets[0])
)
box_pd2 = boxm.amr_to_local(
ovrlp_box, boxm.shift(pd2.ghost_box, offsets[1])
)

slice1 = boxm.select(pd1.dataset, box_pd1)
slice2 = boxm.select(pd2.dataset, box_pd2)

diff = np.abs(slice1 - slice2)

diff_patch0 = diff_patch_for(patch0.box, ilvl)
diff_patch1 = diff_patch_for(patch1.box, ilvl)

diff_data0 = diff_patch0.patch_datas[name].dataset
diff_data1 = diff_patch1.patch_datas[name].dataset

dif0 = boxm.select(diff_data0, box_pd1)
dif1 = boxm.select(diff_data1, box_pd2)

if len(np.nonzero(diff)[0]):
boxm.DataSelector(diff_data0)[box_pd1] = np.maximum(dif0, diff)
boxm.DataSelector(diff_data1)[box_pd2] = np.maximum(dif1, diff)

assert len(np.nonzero(diff_patch0.patch_datas[name].dataset)[0])
assert len(np.nonzero(diff_patch1.patch_datas[name].dataset)[0])
found = 1

if found:
assert has_non_zero(diff_hier, time=time)

return diff_hier


def hierarchy_compare(this, that, atol=1e-16):
eqr = EqualityReport()

Expand Down Expand Up @@ -699,3 +762,103 @@
else:
raise RuntimeError("unexpected state")
return cier


def zero_patch_hierarchy_like(hier, **kwargs):
from copy import deepcopy

times = phut.listify(kwargs.get("time", hier.times()))

cpy = deepcopy(hier)
cpy.time_hier = {}
for time in times:
cpy.time_hier[format_timestamp(time)] = deepcopy(hier.levels(time))
for lvl in cpy.levels(time).values():
for patch in lvl:
for key, pd in patch.patch_datas.items():
patch.patch_datas[key] = zeros_like(pd)
assert not has_non_zero(cpy, time=time)
return cpy


def zero_field_data_like(field_data):
from copy import deepcopy

cpy = deepcopy(field_data)
assert id(cpy.dataset) == id(field_data.dataset)
cpy.dataset = np.zeros(field_data.dataset.shape)
assert id(cpy.dataset) != id(field_data.dataset)
return cpy


def zeros_like(that, **kwargs):
if issubclass(type(that), PatchHierarchy):
return zero_patch_hierarchy_like(that, **kwargs)
if issubclass(type(that), FieldData):
return zero_field_data_like(that, **kwargs)
raise RuntimeError(
"Cannot resolve type to zeros_like, consider updating if required"
)


def field_data_has_non_zero(field_data):
return bool(len(np.nonzero(field_data.dataset[:])[0]))


def patch_hierarchy_has_non_zero(hier, time):
for ilvl, lvl in hier.levels(time).items():
for patch in lvl:
for pd in patch.patch_datas.values():
if has_non_zero(pd):
return True
return False


def has_non_zero(that, time=None):
if issubclass(type(that), PatchHierarchy):
return patch_hierarchy_has_non_zero(that, time)
if issubclass(type(that), FieldData):
return field_data_has_non_zero(that)
raise RuntimeError(
"Cannot resolve type to has_non_zero, consider updating if required"
)


def max_from_field_data(field_data):
return np.max(field_data.dataset[:])


def max_from_patch_hierarchy(hier, time, qty):
vals = {}
for ilvl, lvl in hier.levels(time).items():
for patch in lvl:
for key, pd in patch.patch_datas.items():
if qty is None or key == qty:
if ilvl in vals:
vals[ilvl] = max(max_from(pd), vals[ilvl])
else:
vals[ilvl] = max_from(pd)

return vals


def max_from(that, time=None, qty=None):
if issubclass(type(that), PatchHierarchy):
return max_from_patch_hierarchy(that, time, qty)
if issubclass(type(that), FieldData):
return max_from_field_data(that)
raise RuntimeError("Cannot resolve type to max_from, consider updating if required")


def min_max_patch_shape(hier, time, qty=None):
time_hier = hier.levels(time)
val = {ilvl: [10000, 0] for ilvl in time_hier.keys()}

for ilvl, lvl in time_hier.items():
for patch in lvl:
for key, pd in patch.patch_datas.items():
if qty is None or key == qty:
val[ilvl][0] = min(np.min(pd.dataset.shape), val[ilvl][0])
val[ilvl][1] = max(np.max(pd.dataset.shape), val[ilvl][1])
Comment on lines +853 to +862
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

This reports data-array shapes, not patch shapes.

Lines 861-862 use pd.dataset.shape, so the values printed as "patch shapes" in tools/diff_diags.py include ghost cells and centering-dependent extra nodes. If this is meant to help reason about the minimum AMR patch size, it will be misleading; use patch.box.shape or rename the helper/output accordingly.

🧭 Suggested fix
     for ilvl, lvl in time_hier.items():
         for patch in lvl:
-            for key, pd in patch.patch_datas.items():
-                if qty is None or key == qty:
-                    val[ilvl][0] = min(np.min(pd.dataset.shape), val[ilvl][0])
-                    val[ilvl][1] = max(np.max(pd.dataset.shape), val[ilvl][1])
+            if qty is None or qty in patch.patch_datas:
+                patch_shape = patch.box.shape
+                val[ilvl][0] = min(np.min(patch_shape), val[ilvl][0])
+                val[ilvl][1] = max(np.max(patch_shape), val[ilvl][1])
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@pyphare/pyphare/pharesee/hierarchy/hierarchy_utils.py` around lines 853 -
862, min_max_patch_shape is computing "patch shapes" using pd.dataset.shape
(data array sizes including ghosts/centering), which is misleading; change the
measurement to use the AMR patch geometry (patch.box.shape) when you want true
patch cell counts, or alternatively rename the helper/output to indicate it's
reporting dataset (array) shapes. Update the loop in min_max_patch_shape to read
sizes from patch.box.shape (or adjust the function name/docstring) and keep the
qty filter logic the same so the reported min/max reflect actual patch.box.shape
values rather than pd.dataset.shape.


return val
3 changes: 2 additions & 1 deletion pyphare/pyphare/pharesee/run/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,8 @@ def all_times(self):
time = np.zeros(len(time_keys))
for it, t in enumerate(time_keys):
time[it] = float(t)
ts[quantities_per_file[basename]] = time
if basename in quantities_per_file:
ts[quantities_per_file[basename]] = time
ff.close()
return ts

Expand Down
1 change: 1 addition & 0 deletions src/amr/resources_manager/amr_utils.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
#include <SAMRAI/hier/BoxOverlap.h>
#include <SAMRAI/hier/HierarchyNeighbors.h>
#include <SAMRAI/geom/CartesianPatchGeometry.h>
#include <stdexcept>

#include <algorithm>
#include <stdexcept>
Expand Down
5 changes: 4 additions & 1 deletion src/amr/wrappers/hierarchy.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,9 @@ class HierarchyRestarter
void closeRestartFile() { SamraiLifeCycle::getRestartManager()->closeRestartFile(); }

NO_DISCARD bool isFromRestart() const
{ return SamraiLifeCycle::getRestartManager()->isFromRestart(); }
{
return SamraiLifeCycle::getRestartManager()->isFromRestart();
}

private:
std::optional<std::string> static restartFilePath(auto const& dict)
Expand All @@ -100,6 +102,7 @@ class HierarchyRestarter
return std::nullopt;
}


std::optional<std::string> _restartFilePath; // only set if we have a restart to load
initializer::PHAREDict sim_dict;
};
Expand Down
2 changes: 1 addition & 1 deletion src/core/data/field/field_box.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ void operate_on_fields(auto& dst, auto const& src)
}



template<typename Field_t>
template<typename Operator>
void FieldBox<Field_t>::set_from(std::vector<value_type> const& vec, std::size_t seek)
Expand All @@ -79,6 +78,7 @@ void FieldBox<Field_t>::set_from(std::vector<value_type> const& vec, std::size_t
Operator{field(*dst_it)}(vec[seek]);
}


template<typename Field_t>
void FieldBox<Field_t>::append_to(std::vector<value_type>& vec)
{
Expand Down
1 change: 1 addition & 0 deletions src/core/data/tensorfield/tensorfield.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@ class TensorField
NO_DISCARD auto& physicalQuantity() const { return qty_; }
NO_DISCARD auto static constexpr size() { return N; }


private:
auto static _get_index_for(Component component)
{
Expand Down
1 change: 1 addition & 0 deletions src/core/errors.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

#include "core/def.hpp"

#include <stdexcept>
#include <string>
#include <sstream>
#include <iostream>
Expand Down
1 change: 0 additions & 1 deletion tests/core/data/field/test_field.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@




namespace PHARE::core
{
template<std::size_t dim>
Expand Down
17 changes: 8 additions & 9 deletions tests/functional/harris/harris_2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@
import numpy as np
from pathlib import Path

import pyphare.pharein as ph

from pyphare import cpp

import pyphare.pharein as ph
from pyphare.pharesee.run import Run
from pyphare.simulator.simulator import Simulator
from pyphare.simulator.simulator import startMPI
Expand All @@ -19,9 +17,9 @@


cells = (200, 100)
time_step = 0.005
final_time = 50
timestamps = np.arange(0, final_time + time_step, final_time / 5)
time_step = 0.001
final_time = 0.01
timestamps = np.arange(0, final_time + time_step, time_step)
diag_dir = "phare_outputs/harris"


Expand All @@ -34,15 +32,16 @@ def config():
cells=cells,
dl=(0.40, 0.40),
refinement="tagging",
max_nbr_levels=2,
max_nbr_levels=3,
hyper_resistivity=0.002,
resistivity=0.001,
diag_options={
"format": "phareh5",
"options": {"dir": diag_dir, "mode": "overwrite"},
"options": {"dir": diag_dir, "mode": "overwrite", "fine_dump_lvl_max": 10},
},
strict=True,
nesting_buffer=1,
tag_buffer=3,
)

def density(x, y):
Expand Down Expand Up @@ -215,7 +214,7 @@ def tearDown(self):
ph.global_vars.sim = None

def test_run(self):
self.register_diag_dir_for_cleanup(diag_dir)
# self.register_diag_dir_for_cleanup(diag_dir)
Simulator(config()).run().reset()
if cpp.mpi_rank() == 0:
plot_dir = Path(f"{diag_dir}_plots") / str(cpp.mpi_size())
Expand Down
Loading
Loading