Skip to content

Commit

Permalink
fixup! STY: manual fixes for newly flagged violations of UP031
Browse files Browse the repository at this point in the history
  • Loading branch information
neutrinoceros committed Nov 23, 2024
1 parent 2eccc0b commit 1c6b552
Show file tree
Hide file tree
Showing 10 changed files with 23 additions and 25 deletions.
8 changes: 4 additions & 4 deletions yt/data_objects/construction_data_containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2591,7 +2591,7 @@ def _export_ply(
]
f.write(b"ply\n")
f.write(b"format binary_little_endian 1.0\n")
line = "element vertex %i\n" % (nv)
line = f"element vertex {nv}\n"
f.write(line.encode("latin-1"))
f.write(b"property float x\n")
f.write(b"property float y\n")
Expand All @@ -2612,7 +2612,7 @@ def _export_ply(
)
else:
v = np.empty(self.vertices.shape[1], dtype=vs[:3])
line = "element face %i\n" % (nv / 3)
line = f"element face {nv/3}\n"
f.write(line.encode("latin-1"))
f.write(b"property list uchar int vertex_indices\n")
if color_field is not None and sample_type == "face":
Expand Down Expand Up @@ -2751,9 +2751,9 @@ def export_sketchfab(
# to a file.
if self.vertices.shape[1] > 1e7:
tfi = 0
fn = "temp_model_%03i.ply" % tfi
fn = f"temp_model_{tfi:03}.ply"
while os.path.exists(fn):
fn = "temp_model_%03i.ply" % tfi
fn = f"temp_model_{tfi:03}.ply"
tfi += 1
open(fn, "wb").write(ply_file.read())
raise YTTooManyVertices(self.vertices.shape[1], fn)
Expand Down
4 changes: 2 additions & 2 deletions yt/data_objects/index_subobjects/unstructured_mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def _check_consistency(self):
mylog.debug("Connectivity is consistent.")

def __repr__(self):
return "UnstructuredMesh_%04i" % (self.mesh_id)
return f"UnstructuredMesh_{self.mesh_id:04}"

def get_global_startindex(self):
"""
Expand Down Expand Up @@ -151,7 +151,7 @@ class SemiStructuredMesh(UnstructuredMesh):
_container_fields = ("dx", "dy", "dz")

def __repr__(self):
return "SemiStructuredMesh_%04i" % (self.mesh_id)
return f"SemiStructuredMesh_{self.mesh_id:04}"

def _generate_container_field(self, field):
if self._current_chunk is None:
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/level_sets/clump_handling.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def save_as_dataset(self, filename=None, fields=None):
"""

ds = self.data.ds
keyword = "%s_clump_%d" % (str(ds), self.clump_id)
keyword = f"{ds}_clump_{self.clump_id}"
filename = get_output_filename(filename, keyword, ".h5")

# collect clump info fields
Expand Down
4 changes: 2 additions & 2 deletions yt/data_objects/particle_trajectories.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ def trajectory_from_index(self, index):
"""
mask = np.isin(self.indices, (index,), assume_unique=True)
if not np.any(mask):
print("The particle index %d is not in the list!" % (index))
print(f"The particle index {index} is not in the list!")
raise IndexError
fields = sorted(self.field_data.keys())
traj = {}
Expand Down Expand Up @@ -375,7 +375,7 @@ def write_out(self, filename_base):
[self.times[it]] + [self[field][ix, it] for field in fields]
)
)
fid = open(filename_base + "_%d.dat" % self.indices[ix], "w")
fid = open(f"{filename_base}_{self.indices[ix]}.dat", "w")
fid.writelines(outlines)
fid.close()
del fid
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/region_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def _create_slice(self, slice_tuple):
dim = self.ds.dimensionality
if dim < 2:
raise ValueError(
"Can not create a slice from data with dimensionality '%d'" % dim
f"Can not create a slice from data with dimensionality '{dim}'"
)
if dim == 2:
coord = self.ds.domain_center[2]
Expand Down
4 changes: 2 additions & 2 deletions yt/data_objects/selection_objects/data_selection_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ def __init__(self, ds, field_parameters, data_source=None):
if data_source._dimensionality < self._dimensionality:
raise RuntimeError(
"Attempted to construct a DataContainer with a data_source "
"of lower dimensionality (%u vs %u)"
% (data_source._dimensionality, self._dimensionality)
"of lower dimensionality "
f"({data_source._dimensionality} vs {self._dimensionality})"
)
self.field_parameters.update(data_source.field_parameters)
self.quantities = DerivedQuantityCollection(self)
Expand Down
6 changes: 3 additions & 3 deletions yt/fields/interpolated_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ def add_interpolated_field(

if len(axes_fields) != len(axes_data) or len(axes_fields) != len(table_data.shape):
raise RuntimeError(
"Data dimension mismatch: data is %d, "
"%d axes data provided, and %d axes fields provided."
% (len(table_data.shape), len(axes_data), len(axes_fields))
f"Data dimension mismatch: data is {len(table_data.shape)}, "
f"{len(axes_data)} axes data provided, "
f"and {len(axes_fields)} axes fields provided."
)

int_class = _int_class[len(table_data.shape)]
Expand Down
4 changes: 2 additions & 2 deletions yt/fields/xray_emission_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@


def _get_data_file(table_type, data_dir=None):
data_file = "%s_emissivity_v%d.h5" % (table_type, data_version[table_type])
data_file = f"{table_type}_emissivity_v{data_version[table_type]}.h5"
if data_dir is None:
supp_data_dir = ytcfg.get("yt", "supp_data_dir")
data_dir = supp_data_dir if os.path.exists(supp_data_dir) else "."
Expand All @@ -43,7 +43,7 @@ def __str__(self):

class ObsoleteDataException(YTException):
def __init__(self, table_type):
data_file = "%s_emissivity_v%d.h5" % (table_type, data_version[table_type])
data_file = f"{table_type}_emissivity_v{data_version[table_type]}.h5"
self.msg = "X-ray emissivity data is out of date.\n"
self.msg += f"Download the latest data from {data_url}/{data_file}."

Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/adaptahop/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def __init__(self, ptype, particle_identifier, parent_ds, halo_ds):
super().__init__(parent_ds, {})

def __repr__(self):
return "%s_%s_%09d" % (self.ds, self.ptype, self.particle_identifier)
return f"{self.ds}_{self.ptype}_{self.particle_identifier:09}"

def __getitem__(self, key):
return self.region[key]
Expand Down
12 changes: 5 additions & 7 deletions yt/frontends/amrex/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,9 +196,7 @@ def _generate_particle_fields(self, extra_field_names):
self.known_int_fields = self.known_int_fields[0 : self.num_int_base]

# these are extra integer fields
extra_int_fields = [
"particle_int_comp%d" % i for i in range(self.num_int_extra)
]
extra_int_fields = [f"particle_int_comp{i}" for i in range(self.num_int_extra)]
self.known_int_fields.extend(
[(self.particle_type, field) for field in extra_int_fields]
)
Expand All @@ -216,7 +214,7 @@ def _generate_particle_fields(self, extra_field_names):
assert len(extra_field_names) == self.num_real_extra
else:
extra_field_names = [
"particle_real_comp%d" % i for i in range(self.num_real_extra)
f"particle_real_comp{i}" for i in range(self.num_real_extra)
]

self.known_real_fields.extend(
Expand Down Expand Up @@ -1478,7 +1476,7 @@ def __init__(self, header_fn):
if len(line) == 1:
line = f.readline()
continue
self.data["species_%d" % i] = [float(val) for val in line]
self.data[f"species_{i}"] = [float(val) for val in line]
i = i + 1
line = f.readline()

Expand All @@ -1497,8 +1495,8 @@ def __init__(self, ds, dataset_type="boxlib_native"):
for key, val in self.warpx_header.data.items():
if key.startswith("species_"):
i = int(key.split("_")[-1])
charge_name = "particle%.1d_charge" % i
mass_name = "particle%.1d_mass" % i
charge_name = f"particle{i}_charge"
mass_name = f"particle{i}_mass"
self.parameters[charge_name] = val[0]
self.parameters[mass_name] = val[1]

Expand Down

0 comments on commit 1c6b552

Please sign in to comment.