Skip to content

Commit

Permalink
comply with ruff linter
Browse files Browse the repository at this point in the history
  • Loading branch information
kmario23 committed Oct 20, 2024
1 parent dc19ad9 commit 0df2b74
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 27 deletions.
2 changes: 0 additions & 2 deletions pdebench/models/fno/fno.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
#!/usr/bin/env python3

"""
FNO. Implementation taken and modified from
https://github.com/zongyi-li/fourier_neural_operator
Expand Down
8 changes: 4 additions & 4 deletions pdebench/models/fno/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def run_training(

model_path = model_name + ".pt"

total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
# total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
# print(f"Total parameters = {total_params}")

optimizer = torch.optim.Adam(
Expand Down Expand Up @@ -209,7 +209,7 @@ def run_training(

for ep in range(start_epoch, epochs):
model.train()
t1 = default_timer()
# t1 = default_timer()
train_l2_step = 0
train_l2_full = 0
for xx, yy, grid in train_loader:
Expand Down Expand Up @@ -337,7 +337,7 @@ def run_training(
model_path,
)

t2 = default_timer()
# t2 = default_timer()
scheduler.step()
# print(
# "epoch: {0}, loss: {1:.5f}, t2-t1: {2:.5f}, trainL2: {3:.5f}, testL2: {4:.5f}".format(
Expand All @@ -348,4 +348,4 @@ def run_training(

if __name__ == "__main__":
run_training()
print("Done.")
# print("Done.")
13 changes: 7 additions & 6 deletions pdebench/models/fno/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@

import math as mt
import os
from pathlib import Path

import h5py
import numpy as np
Expand Down Expand Up @@ -182,9 +183,9 @@ def __init__(
"""

# Define path to files
root_path = os.path.join(os.path.abspath(saved_folder), filename)
root_path = Path(Path(saved_folder).resolve()) / filename
if filename[-2:] != "h5":
print(".HDF5 file extension is assumed hereafter")
# print(".HDF5 file extension is assumed hereafter")

with h5py.File(root_path, "r") as f:
keys = list(f.keys())
Expand Down Expand Up @@ -242,7 +243,7 @@ def __init__(
self.grid = torch.tensor(
self.grid[::reduced_resolution], dtype=torch.float
).unsqueeze(-1)
print(self.data.shape)
# print(self.data.shape)
if len(idx_cfd) == 4: # 2D
self.data = np.zeros(
[
Expand Down Expand Up @@ -463,7 +464,7 @@ def __init__(
]

elif filename[-2:] == "h5": # SWE-2D (RDB)
print(".H5 file extension is assumed hereafter")
# print(".H5 file extension is assumed hereafter")

with h5py.File(root_path, "r") as f:
keys = list(f.keys())
Expand Down Expand Up @@ -548,7 +549,7 @@ def __init__(
"""

# Define path to files
self.file_path = os.path.abspath(saved_folder + filename + ".h5")
self.file_path = Path(saved_folder + filename + ".h5").resolve()

# Extract list of seeds
with h5py.File(self.file_path, "r") as h5_file:
Expand Down Expand Up @@ -577,7 +578,7 @@ def __getitem__(self, idx):

# convert to [x1, ..., xd, t, v]
permute_idx = list(range(1, len(data.shape) - 1))
permute_idx.extend(list([0, -1]))
permute_idx.extend([0, -1])
data = data.permute(permute_idx)

# Extract spatial dimension of data
Expand Down
14 changes: 7 additions & 7 deletions pdebench/models/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,13 +331,13 @@ def metrics(
for t in range(initial_step, yy.shape[-2]):
inp = xx.reshape(inp_shape)
temp_shape = [0, -1]
temp_shape.extend([i for i in range(1, len(inp.shape) - 1)])
temp_shape.extend(list(range(1, len(inp.shape) - 1)))
inp = inp.permute(temp_shape)

y = yy[..., t : t + 1, :]

temp_shape = [0]
temp_shape.extend([i for i in range(2, len(inp.shape))])
temp_shape.extend(list(range(2, len(inp.shape))))
temp_shape.append(1)
im = model(inp).permute(temp_shape).unsqueeze(-2)
pred = torch.cat((pred, im), -2)
Expand Down Expand Up @@ -372,7 +372,7 @@ def metrics(
err_BD += _err_BD
err_F += _err_F

mean_dim = [i for i in range(len(yy.shape) - 2)]
mean_dim = list(range(len(yy.shape) - 2))
mean_dim.append(-1)
mean_dim = tuple(mean_dim)
val_l2_time += torch.sqrt(
Expand Down Expand Up @@ -429,7 +429,7 @@ def metrics(
err_BD += _err_BD
err_F += _err_F

mean_dim = [i for i in range(len(yy.shape) - 2)]
mean_dim = list(range(len(yy.shape) - 2))
mean_dim.append(-1)
mean_dim = tuple(mean_dim)
val_l2_time += torch.sqrt(
Expand Down Expand Up @@ -584,7 +584,7 @@ class LpLoss:
"""

def __init__(self, p=2, reduction="mean"):
super(LpLoss, self).__init__()
super().__init__()
# Dimension and Lp-norm type are positive
assert p > 0
self.p = p
Expand All @@ -611,7 +611,7 @@ class FftLpLoss:
"""

def __init__(self, p=2, reduction="mean"):
super(FftLpLoss, self).__init__()
super().__init__()
# Dimension and Lp-norm type are positive
assert p > 0
self.p = p
Expand Down Expand Up @@ -661,7 +661,7 @@ class FftMseLoss:
"""

def __init__(self, reduction="mean"):
super(FftMseLoss, self).__init__()
super().__init__()
# Dimension and Lp-norm type are positive
self.reduction = reduction

Expand Down
8 changes: 3 additions & 5 deletions pdebench/models/unet/unet.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
#!/usr/bin/env python3

"""
U-Net. Implementation taken and modified from
https://github.com/mateuszbuda/brain-segmentation-pytorch
Expand Down Expand Up @@ -28,7 +26,7 @@

class UNet1d(nn.Module):
def __init__(self, in_channels=3, out_channels=1, init_features=32):
super(UNet1d, self).__init__()
super().__init__()

features = init_features
self.encoder1 = UNet1d._block(in_channels, features, name="enc1")
Expand Down Expand Up @@ -121,7 +119,7 @@ def _block(in_channels, features, name):

class UNet2d(nn.Module):
def __init__(self, in_channels=3, out_channels=1, init_features=32):
super(UNet2d, self).__init__()
super().__init__()

features = init_features
self.encoder1 = UNet2d._block(in_channels, features, name="enc1")
Expand Down Expand Up @@ -214,7 +212,7 @@ def _block(in_channels, features, name):

class UNet3d(nn.Module):
def __init__(self, in_channels=3, out_channels=1, init_features=32):
super(UNet3d, self).__init__()
super().__init__()

features = init_features
self.encoder1 = UNet3d._block(in_channels, features, name="enc1")
Expand Down
7 changes: 4 additions & 3 deletions pdebench/models/unet/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@

import math as mt
import os
from pathlib import Path

import h5py
import numpy as np
Expand Down Expand Up @@ -180,7 +181,7 @@ def __init__(
"""

# Define path to files
root_path = os.path.abspath(saved_folder + filename)
root_path = Path(saved_folder + filename).resolve()
assert filename[-2:] != "h5", "HDF5 data is assumed!!"

with h5py.File(root_path, "r") as f:
Expand Down Expand Up @@ -437,7 +438,7 @@ def __init__(
"""

# Define path to files
self.file_path = os.path.abspath(saved_folder + filename + ".h5")
self.file_path = Path(saved_folder + filename + ".h5").resolve()

# Extract list of seeds
with h5py.File(self.file_path, "r") as h5_file:
Expand Down Expand Up @@ -466,7 +467,7 @@ def __getitem__(self, idx):

# convert to [x1, ..., xd, t, v]
permute_idx = list(range(1, len(data.shape) - 1))
permute_idx.extend(list([0, -1]))
permute_idx.extend([0, -1])
data = data.permute(permute_idx)

return data[..., : self.initial_step, :], data

0 comments on commit 0df2b74

Please sign in to comment.