Skip to content

Commit

Permalink
Merge pull request #77 from ax3l/fix-np-2.0-fixed-size-str-1.1.X
Browse files Browse the repository at this point in the history
Numpy 2.0: `np.string_` -> `np.bytes_`
  • Loading branch information
ax3l committed Jun 18, 2024
2 parents dd7518d + bf6fe49 commit 34609e6
Show file tree
Hide file tree
Showing 2 changed files with 76 additions and 76 deletions.
78 changes: 39 additions & 39 deletions openpmd_validator/check_h5.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def test_attr(f, v, request, name, is_type=None, type_format=None):
type_format: (numpy or python) data type
Used with is_type to specify numpy ndarray dtypes or a
base np.string_ format regex. Can be a list of data types
base np.bytes_ format regex. Can be a list of data types
for ndarrays where at least one data type must match.
Returns
Expand All @@ -257,7 +257,7 @@ def test_attr(f, v, request, name, is_type=None, type_format=None):
# test type
if is_type is not None:
type_format_names = None
if not type_format is None and not is_type is np.string_ and \
if not type_format is None and not is_type is np.bytes_ and \
not isinstance(type_format, Iterable):
type_format = [type_format]
type_format_names = map(lambda x: x.__name__, type_format)
Expand All @@ -266,8 +266,8 @@ def test_attr(f, v, request, name, is_type=None, type_format=None):
is_type_names = "' or '".join(map(lambda x: str(x.__name__), is_type))
# add for each type in is_type -> wrong, need to add this at the comparison level!
if type(value) in is_type:
# np.string_ format or general ndarray dtype text
if type(value) is np.string_ and type_format is not None:
# np.bytes_ format or general ndarray dtype text
if type(value) is np.bytes_ and type_format is not None:
regEx = re.compile(type_format) # Python3 only: re.ASCII
if regEx.match(value.decode()) :
result_array = np.array([0,0])
Expand Down Expand Up @@ -404,15 +404,15 @@ def check_root_attr(f, v):

# STANDARD.md
# required
result_array += test_attr(f, v, "required", "openPMD", np.string_, "^[0-9]+\.[0-9]+\.[0-9]+$")
result_array += test_attr(f, v, "required", "openPMD", np.bytes_, "^[0-9]+\.[0-9]+\.[0-9]+$")
result_array += test_attr(f, v, "required", "openPMDextension", np.uint32)
result_array += test_attr(f, v, "required", "basePath", np.string_, "^\/data\/\%T\/$")
result_array += test_attr(f, v, "required", "iterationEncoding", np.string_, "^groupBased|fileBased$")
result_array += test_attr(f, v, "required", "iterationFormat", np.string_)
result_array += test_attr(f, v, "required", "basePath", np.bytes_, "^\/data\/\%T\/$")
result_array += test_attr(f, v, "required", "iterationEncoding", np.bytes_, "^groupBased|fileBased$")
result_array += test_attr(f, v, "required", "iterationFormat", np.bytes_)

# optional but required for data
result_array += test_attr(f, v, "optional", "meshesPath", np.string_, "^.*\/$")
result_array += test_attr(f, v, "optional", "particlesPath", np.string_, "^.*\/$")
result_array += test_attr(f, v, "optional", "meshesPath", np.bytes_, "^.*\/$")
result_array += test_attr(f, v, "optional", "particlesPath", np.bytes_, "^.*\/$")

# groupBased iteration encoding needs to match basePath
if result_array[0] == 0 :
Expand All @@ -423,17 +423,17 @@ def check_root_attr(f, v):
result_array += np.array([1,0])

# recommended
result_array += test_attr(f, v, "recommended", "author", np.string_)
result_array += test_attr(f, v, "recommended", "software", np.string_)
result_array += test_attr(f, v, "recommended", "author", np.bytes_)
result_array += test_attr(f, v, "recommended", "software", np.bytes_)
result_array += test_attr(f, v, "recommended",
"softwareVersion", np.string_)
result_array += test_attr(f, v, "recommended", "date", np.string_,
"softwareVersion", np.bytes_)
result_array += test_attr(f, v, "recommended", "date", np.bytes_,
"^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2} [\+|-][0-9]{4}$")

# optional
result_array += test_attr(f, v, "optional", "softwareDependencies", np.string_)
result_array += test_attr(f, v, "optional", "machine", np.string_)
result_array += test_attr(f, v, "optional", "comment", np.string_)
result_array += test_attr(f, v, "optional", "softwareDependencies", np.bytes_)
result_array += test_attr(f, v, "optional", "machine", np.bytes_)
result_array += test_attr(f, v, "optional", "comment", np.bytes_)

return(result_array)

Expand Down Expand Up @@ -614,20 +614,20 @@ def check_meshes(f, iteration, v, extensionStates):
result_array += test_attr(field, v, "required",
"gridUnitSI", np.float64)
result_array += test_attr(field, v, "required",
"dataOrder", np.string_)
"dataOrder", np.bytes_)
result_array += test_attr(field, v, "required",
"axisLabels", np.ndarray, np.string_)
"axisLabels", np.ndarray, np.bytes_)
# Specific check for geometry
geometry_test = test_attr(field, v, "required", "geometry", np.string_)
geometry_test = test_attr(field, v, "required", "geometry", np.bytes_)
result_array += geometry_test
# geometryParameters is required when using thetaMode
if geometry_test[0] == 0 and field.attrs["geometry"] == b"thetaMode" :
result_array += test_attr(field, v, "required",
"geometryParameters", np.string_)
"geometryParameters", np.bytes_)
# otherwise it is optional
else :
result_array += test_attr(field, v, "optional",
"geometryParameters", np.string_)
"geometryParameters", np.bytes_)

# Attributes of the record's components
if is_scalar_record(field) : # If the record is a scalar field
Expand All @@ -647,53 +647,53 @@ def check_meshes(f, iteration, v, extensionStates):
if extensionStates['ED-PIC'] and len(list_meshes) > 0:
# Check the attributes associated with the field solver
result_array += test_attr(f[full_meshes_path], v, "required",
"fieldSolver", np.string_)
"fieldSolver", np.bytes_)
valid, field_solver = get_attr(f[full_meshes_path], "fieldSolver")
if (valid == True) and (field_solver in ["other", "GPSTD"]) :
result_array += test_attr(f[full_meshes_path], v, "required",
"fieldSolverParameters", np.string_)
"fieldSolverParameters", np.bytes_)

# Check for the attributes associated with the field boundaries
result_array += test_attr(f[full_meshes_path], v, "required",
"fieldBoundary", np.ndarray, np.string_)
"fieldBoundary", np.ndarray, np.bytes_)
valid, field_boundary = get_attr(f[full_meshes_path], "fieldBoundary")
if (valid == True) and (np.any(field_boundary == b"other")) :
result_array += test_attr(f[full_meshes_path], v, "required",
"fieldBoundaryParameters", np.ndarray, np.string_)
"fieldBoundaryParameters", np.ndarray, np.bytes_)

# Check for the attributes associated with the field boundaries
result_array += test_attr(f[full_meshes_path], v, "required",
"particleBoundary", np.ndarray, np.string_)
"particleBoundary", np.ndarray, np.bytes_)
valid, particle_boundary = get_attr(f[full_meshes_path], "particleBoundary")
if (valid == True) and (np.any(particle_boundary == b"other")) :
result_array += test_attr(f[full_meshes_path], v, "required",
"particleBoundaryParameters", np.ndarray, np.string_)
"particleBoundaryParameters", np.ndarray, np.bytes_)

# Check the attributes associated with the current smoothing
result_array += test_attr(f[full_meshes_path], v, "required",
"currentSmoothing", np.string_)
"currentSmoothing", np.bytes_)
valid, current_smoothing = get_attr(f[full_meshes_path], "currentSmoothing")
if (valid == True) and (current_smoothing != b"none") :
result_array += test_attr(f[full_meshes_path], v, "required",
"currentSmoothingParameters", np.string_)
"currentSmoothingParameters", np.bytes_)

# Check the attributes associated with the charge conservation
result_array += test_attr(f[full_meshes_path], v, "required",
"chargeCorrection", np.string_)
"chargeCorrection", np.bytes_)
valid, charge_correction = get_attr(f[full_meshes_path], "chargeCorrection")
if valid == True and charge_correction != b"none":
result_array += test_attr(f[full_meshes_path], v, "required",
"chargeCorrectionParameters", np.string_)
"chargeCorrectionParameters", np.bytes_)

# Check for the attributes of each record
for field_name in list_meshes :
field = f[full_meshes_path + field_name.encode('ascii')]
result_array + test_attr(field, v, "required",
"fieldSmoothing", np.string_)
"fieldSmoothing", np.bytes_)
valid, field_smoothing = get_attr(field, "fieldSmoothing")
if (valid == True) and (field_smoothing != b"none") :
result_array += test_attr(field,v, "required",
"fieldSmoothingParameters", np.string_)
"fieldSmoothingParameters", np.bytes_)
return(result_array)


Expand Down Expand Up @@ -827,19 +827,19 @@ def check_particles(f, iteration, v, extensionStates) :
result_array += test_attr(species, v, "required",
"particleShape", [np.single, np.double, np.longdouble])
result_array += test_attr(species, v, "required",
"currentDeposition", np.string_)
"currentDeposition", np.bytes_)
result_array += test_attr(species, v, "required",
"particlePush", np.string_)
"particlePush", np.bytes_)
result_array += test_attr(species, v, "required",
"particleInterpolation", np.string_)
"particleInterpolation", np.bytes_)

# Check for the attributes associated with the particle smoothing
result_array += test_attr(species, v, "required",
"particleSmoothing", np.string_)
"particleSmoothing", np.bytes_)
valid, particle_smoothing = get_attr(species, "particleSmoothing")
if valid == True and particle_smoothing != b"none":
result_array += test_attr(species, v, "required",
"particleSmoothingParameters", np.string_)
"particleSmoothingParameters", np.bytes_)

# Check attributes of each record of the particle
for record in list(species.keys()) :
Expand Down
74 changes: 37 additions & 37 deletions openpmd_validator/createExamples_h5.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ def get_basePath(f, iteration):
-------
A string with a in-file path.
"""
iteration_str = np.string_(str(iteration))
return np.string_(f.attrs["basePath"]).replace(b"%T", iteration_str)
iteration_str = np.bytes_(str(iteration))
return np.bytes_(f.attrs["basePath"]).replace(b"%T", iteration_str)

def setup_base_path(f, iteration):
"""
Expand Down Expand Up @@ -67,7 +67,7 @@ def get_software_dependencies():
Returns the software dependencies of this script as a semicolon
separated string.
"""
return np.string_(
return np.bytes_(
"python@{0}.{1}.{2};".format(
sys.version_info.major,
sys.version_info.minor,
Expand All @@ -92,25 +92,25 @@ def setup_root_attr(f):
ext_list = [["ED-PIC", np.uint32(1)]]

# Required attributes
f.attrs["openPMD"] = np.string_("1.1.0")
f.attrs["openPMD"] = np.bytes_("1.1.0")
f.attrs["openPMDextension"] = ext_list[0][1] # ED-PIC extension is used
f.attrs["basePath"] = np.string_("/data/%T/")
f.attrs["meshesPath"] = np.string_("meshes/")
f.attrs["particlesPath"] = np.string_("particles/")
f.attrs["iterationEncoding"] = np.string_("groupBased")
f.attrs["iterationFormat"] = np.string_("/data/%T/")
f.attrs["basePath"] = np.bytes_("/data/%T/")
f.attrs["meshesPath"] = np.bytes_("meshes/")
f.attrs["particlesPath"] = np.bytes_("particles/")
f.attrs["iterationEncoding"] = np.bytes_("groupBased")
f.attrs["iterationFormat"] = np.bytes_("/data/%T/")

# Recommended attributes
f.attrs["author"] = np.string_("Axel Huebl <a.huebl@hzdr.de>")
f.attrs["software"] = np.string_("openPMD Example Script")
f.attrs["softwareVersion"] = np.string_("1.1.0.4")
f.attrs["author"] = np.bytes_("Axel Huebl <a.huebl@hzdr.de>")
f.attrs["software"] = np.bytes_("openPMD Example Script")
f.attrs["softwareVersion"] = np.bytes_("1.1.0.4")
f.attrs["softwareDependencies"] = get_software_dependencies()
f.attrs["machine"] = np.string_(socket.gethostname())
f.attrs["date"] = np.string_(
f.attrs["machine"] = np.bytes_(socket.gethostname())
f.attrs["date"] = np.bytes_(
datetime.datetime.now(tzlocal()).strftime('%Y-%m-%d %H:%M:%S %z'))

# Optional
f.attrs["comment"] = np.string_("This is a dummy file for test purposes.")
f.attrs["comment"] = np.bytes_("This is a dummy file for test purposes.")


def write_rho_cylindrical(meshes, mode0, mode1):
Expand All @@ -133,17 +133,17 @@ def write_rho_cylindrical(meshes, mode0, mode1):
(The first axis corresponds to r, and the second axis corresponds to z)
"""
# Path to the rho meshes, within the h5py file
full_rho_path = np.string_("rho")
full_rho_path = np.bytes_("rho")
meshes.create_dataset( full_rho_path, (3, mode0.shape[0], mode0.shape[1]), \
dtype=np.float32)
rho = meshes[full_rho_path]
rho.attrs["comment"] = np.string_(
rho.attrs["comment"] = np.bytes_(
"Density of electrons in azimuthal decomposition")

# Create the dataset (cylindrical with azimuthal modes up to m=1)
# The first axis has size 2m+1
rho.attrs["geometry"] = np.string_("thetaMode")
rho.attrs["geometryParameters"] = np.string_("m=1; imag=+")
rho.attrs["geometry"] = np.bytes_("thetaMode")
rho.attrs["geometryParameters"] = np.bytes_("m=1; imag=+")

# Add information on the units of the data
rho.attrs["unitSI"] = np.float64(1.0)
Expand All @@ -160,7 +160,7 @@ def write_rho_cylindrical(meshes, mode0, mode1):
rho.attrs["gridGlobalOffset"] = np.array([0.0, 0.0], dtype=np.float32)
rho.attrs["position"] = np.array([0.0, 0.0], dtype=np.float32)
rho.attrs["gridUnitSI"] = np.float64(1.0)
rho.attrs["dataOrder"] = np.string_("C")
rho.attrs["dataOrder"] = np.bytes_("C")
rho.attrs["axisLabels"] = np.array([b"r",b"z"])

# Add specific information for PIC simulations
Expand Down Expand Up @@ -199,11 +199,11 @@ def write_b_2d_cartesian(meshes, data_ez):
B.create_dataset(b"z", data_ez.shape, dtype=np.float32)

# Write the common metadata for the group
B.attrs["geometry"] = np.string_("cartesian")
B.attrs["geometry"] = np.bytes_("cartesian")
B.attrs["gridSpacing"] = np.array([1.0, 1.0], dtype=np.float32) # dx, dy
B.attrs["gridGlobalOffset"] = np.array([0.0, 0.0], dtype=np.float32)
B.attrs["gridUnitSI"] = np.float64(1.0)
B.attrs["dataOrder"] = np.string_("C")
B.attrs["dataOrder"] = np.bytes_("C")
B.attrs["axisLabels"] = np.array([b"x",b"y"])
B.attrs["unitDimension"] = \
np.array([0.0, 1.0, -2.0, -1.0, 0.0, 0.0, 0.0 ], dtype=np.float64)
Expand Down Expand Up @@ -261,11 +261,11 @@ def write_e_2d_cartesian(meshes, data_ex, data_ey, data_ez ):
E.create_dataset(b"z", data_ez.shape, dtype=np.float32)

# Write the common metadata for the group
E.attrs["geometry"] = np.string_("cartesian")
E.attrs["geometry"] = np.bytes_("cartesian")
E.attrs["gridSpacing"] = np.array([1.0, 1.0], dtype=np.float32) # dx, dy
E.attrs["gridGlobalOffset"] = np.array([0.0, 0.0], dtype=np.float32)
E.attrs["gridUnitSI"] = np.float64(1.0)
E.attrs["dataOrder"] = np.string_("C")
E.attrs["dataOrder"] = np.bytes_("C")
E.attrs["axisLabels"] = np.array([b"x",b"y"])
E.attrs["unitDimension"] = \
np.array([1.0, 1.0, -3.0, -1.0, 0.0, 0.0, 0.0 ], dtype=np.float64)
Expand Down Expand Up @@ -307,9 +307,9 @@ def add_EDPIC_attr_meshes(field):
and Dataset for scalar meshes)
"""
field.attrs["fieldSmoothing"] = np.string_("none")
field.attrs["fieldSmoothing"] = np.bytes_("none")
# field.attrs["fieldSmoothingParameters"] = \
# np.string_("period=10;numPasses=4;compensator=true")
# np.bytes_("period=10;numPasses=4;compensator=true")


def add_EDPIC_attr_particles(particle):
Expand All @@ -324,13 +324,13 @@ def add_EDPIC_attr_particles(particle):
"""
particle.attrs["particleShape"] = 3.0
particle.attrs["currentDeposition"] = np.string_("Esirkepov")
# particle.attrs["currentDepositionParameters"] = np.string_("")
particle.attrs["particlePush"] = np.string_("Boris")
particle.attrs["particleInterpolation"] = np.string_("uniform")
particle.attrs["particleSmoothing"] = np.string_("none")
particle.attrs["currentDeposition"] = np.bytes_("Esirkepov")
# particle.attrs["currentDepositionParameters"] = np.bytes_("")
particle.attrs["particlePush"] = np.bytes_("Boris")
particle.attrs["particleInterpolation"] = np.bytes_("uniform")
particle.attrs["particleSmoothing"] = np.bytes_("none")
# particle.attrs["particleSmoothingParameters"] = \
# np.string_("period=1;numPasses=2;compensator=false")
# np.bytes_("period=1;numPasses=2;compensator=false")


def write_meshes(f, iteration):
Expand All @@ -339,15 +339,15 @@ def write_meshes(f, iteration):
meshes = f[full_meshes_path]

# Extension: Additional attributes for ED-PIC
meshes.attrs["fieldSolver"] = np.string_("Yee")
meshes.attrs["fieldSolver"] = np.bytes_("Yee")
meshes.attrs["fieldBoundary"] = np.array(
[b"periodic", b"periodic", b"open", b"open"])
meshes.attrs["particleBoundary"] = np.array(
[b"periodic", b"periodic", b"absorbing", b"absorbing"])
meshes.attrs["currentSmoothing"] = np.string_("Binomial")
meshes.attrs["currentSmoothing"] = np.bytes_("Binomial")
meshes.attrs["currentSmoothingParameters"] = \
np.string_("period=1;numPasses=2;compensator=false")
meshes.attrs["chargeCorrection"] = np.string_("none")
np.bytes_("period=1;numPasses=2;compensator=false")
meshes.attrs["chargeCorrection"] = np.bytes_("none")

# (Here the data is randomly generated, but in an actual simulation,
# this would be replaced by the simulation data.)
Expand Down Expand Up @@ -375,7 +375,7 @@ def write_particles(f, iteration):

globalNumParticles = 128 # example number of all particles

electrons.attrs["comment"] = np.string_("My first electron species")
electrons.attrs["comment"] = np.bytes_("My first electron species")

# Extension: ED-PIC Attributes
# required
Expand Down

0 comments on commit 34609e6

Please sign in to comment.