From 0df2b7412db7240e9f7dfd6740821a33a1451269 Mon Sep 17 00:00:00 2001 From: kmario23 Date: Sun, 20 Oct 2024 18:07:16 +0000 Subject: [PATCH] comply with ruff linter --- pdebench/models/fno/fno.py | 2 -- pdebench/models/fno/train.py | 8 ++++---- pdebench/models/fno/utils.py | 13 +++++++------ pdebench/models/metrics.py | 14 +++++++------- pdebench/models/unet/unet.py | 8 +++----- pdebench/models/unet/utils.py | 7 ++++--- 6 files changed, 25 insertions(+), 27 deletions(-) diff --git a/pdebench/models/fno/fno.py b/pdebench/models/fno/fno.py index 7524db8..3e50e09 100644 --- a/pdebench/models/fno/fno.py +++ b/pdebench/models/fno/fno.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 - """ FNO. Implementation taken and modified from https://github.com/zongyi-li/fourier_neural_operator diff --git a/pdebench/models/fno/train.py b/pdebench/models/fno/train.py index 02b707a..25a6912 100644 --- a/pdebench/models/fno/train.py +++ b/pdebench/models/fno/train.py @@ -145,7 +145,7 @@ def run_training( model_path = model_name + ".pt" - total_params = sum(p.numel() for p in model.parameters() if p.requires_grad) + # total_params = sum(p.numel() for p in model.parameters() if p.requires_grad) # print(f"Total parameters = {total_params}") optimizer = torch.optim.Adam( @@ -209,7 +209,7 @@ def run_training( for ep in range(start_epoch, epochs): model.train() - t1 = default_timer() + # t1 = default_timer() train_l2_step = 0 train_l2_full = 0 for xx, yy, grid in train_loader: @@ -337,7 +337,7 @@ def run_training( model_path, ) - t2 = default_timer() + # t2 = default_timer() scheduler.step() # print( # "epoch: {0}, loss: {1:.5f}, t2-t1: {2:.5f}, trainL2: {3:.5f}, testL2: {4:.5f}".format( @@ -348,4 +348,4 @@ def run_training( if __name__ == "__main__": run_training() - print("Done.") + # print("Done.") diff --git a/pdebench/models/fno/utils.py b/pdebench/models/fno/utils.py index 01b507e..a98cbe9 100644 --- a/pdebench/models/fno/utils.py +++ b/pdebench/models/fno/utils.py @@ -150,6 +150,7 @@ import math as mt import os +from pathlib import Path import h5py import numpy as np @@ -182,9 +183,9 @@ def __init__( """ # Define path to files - root_path = os.path.join(os.path.abspath(saved_folder), filename) + root_path = Path(Path(saved_folder).resolve()) / filename if filename[-2:] != "h5": - print(".HDF5 file extension is assumed hereafter") + # print(".HDF5 file extension is assumed hereafter") with h5py.File(root_path, "r") as f: keys = list(f.keys()) @@ -242,7 +243,7 @@ def __init__( self.grid = torch.tensor( self.grid[::reduced_resolution], dtype=torch.float ).unsqueeze(-1) - print(self.data.shape) + # print(self.data.shape) if len(idx_cfd) == 4: # 2D self.data = np.zeros( [ @@ -463,7 +464,7 @@ def __init__( ] elif filename[-2:] == "h5": # SWE-2D (RDB) - print(".H5 file extension is assumed hereafter") + # print(".H5 file extension is assumed hereafter") with h5py.File(root_path, "r") as f: keys = list(f.keys()) @@ -548,7 +549,7 @@ def __init__( """ # Define path to files - self.file_path = os.path.abspath(saved_folder + filename + ".h5") + self.file_path = Path(saved_folder + filename + ".h5").resolve() # Extract list of seeds with h5py.File(self.file_path, "r") as h5_file: @@ -577,7 +578,7 @@ def __getitem__(self, idx): # convert to [x1, ..., xd, t, v] permute_idx = list(range(1, len(data.shape) - 1)) - permute_idx.extend(list([0, -1])) + permute_idx.extend([0, -1]) data = data.permute(permute_idx) # Extract spatial dimension of data diff --git a/pdebench/models/metrics.py b/pdebench/models/metrics.py index 913faab..ab16704 100644 --- a/pdebench/models/metrics.py +++ b/pdebench/models/metrics.py @@ -331,13 +331,13 @@ def metrics( for t in range(initial_step, yy.shape[-2]): inp = xx.reshape(inp_shape) temp_shape = [0, -1] - temp_shape.extend([i for i in range(1, len(inp.shape) - 1)]) + temp_shape.extend(list(range(1, len(inp.shape) - 1))) inp = inp.permute(temp_shape) y = yy[..., t : t + 1, :] temp_shape = [0] - temp_shape.extend([i for i in range(2, len(inp.shape))]) + temp_shape.extend(list(range(2, len(inp.shape)))) temp_shape.append(1) im = model(inp).permute(temp_shape).unsqueeze(-2) pred = torch.cat((pred, im), -2) @@ -372,7 +372,7 @@ def metrics( err_BD += _err_BD err_F += _err_F - mean_dim = [i for i in range(len(yy.shape) - 2)] + mean_dim = list(range(len(yy.shape) - 2)) mean_dim.append(-1) mean_dim = tuple(mean_dim) val_l2_time += torch.sqrt( @@ -429,7 +429,7 @@ def metrics( err_BD += _err_BD err_F += _err_F - mean_dim = [i for i in range(len(yy.shape) - 2)] + mean_dim = list(range(len(yy.shape) - 2)) mean_dim.append(-1) mean_dim = tuple(mean_dim) val_l2_time += torch.sqrt( @@ -584,7 +584,7 @@ class LpLoss: """ def __init__(self, p=2, reduction="mean"): - super(LpLoss, self).__init__() + super().__init__() # Dimension and Lp-norm type are positive assert p > 0 self.p = p @@ -611,7 +611,7 @@ class FftLpLoss: """ def __init__(self, p=2, reduction="mean"): - super(FftLpLoss, self).__init__() + super().__init__() # Dimension and Lp-norm type are positive assert p > 0 self.p = p @@ -661,7 +661,7 @@ class FftMseLoss: """ def __init__(self, reduction="mean"): - super(FftMseLoss, self).__init__() + super().__init__() # Dimension and Lp-norm type are positive self.reduction = reduction diff --git a/pdebench/models/unet/unet.py b/pdebench/models/unet/unet.py index f06e7e1..95dfb1a 100644 --- a/pdebench/models/unet/unet.py +++ b/pdebench/models/unet/unet.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 - """ U-Net. Implementation taken and modified from https://github.com/mateuszbuda/brain-segmentation-pytorch @@ -28,7 +26,7 @@ class UNet1d(nn.Module): def __init__(self, in_channels=3, out_channels=1, init_features=32): - super(UNet1d, self).__init__() + super().__init__() features = init_features self.encoder1 = UNet1d._block(in_channels, features, name="enc1") @@ -121,7 +119,7 @@ def _block(in_channels, features, name): class UNet2d(nn.Module): def __init__(self, in_channels=3, out_channels=1, init_features=32): - super(UNet2d, self).__init__() + super().__init__() features = init_features self.encoder1 = UNet2d._block(in_channels, features, name="enc1") @@ -214,7 +212,7 @@ def _block(in_channels, features, name): class UNet3d(nn.Module): def __init__(self, in_channels=3, out_channels=1, init_features=32): - super(UNet3d, self).__init__() + super().__init__() features = init_features self.encoder1 = UNet3d._block(in_channels, features, name="enc1") diff --git a/pdebench/models/unet/utils.py b/pdebench/models/unet/utils.py index d8c61cf..4175b10 100644 --- a/pdebench/models/unet/utils.py +++ b/pdebench/models/unet/utils.py @@ -150,6 +150,7 @@ import math as mt import os +from pathlib import Path import h5py import numpy as np @@ -180,7 +181,7 @@ def __init__( """ # Define path to files - root_path = os.path.abspath(saved_folder + filename) + root_path = Path(saved_folder + filename).resolve() assert filename[-2:] != "h5", "HDF5 data is assumed!!" with h5py.File(root_path, "r") as f: @@ -437,7 +438,7 @@ def __init__( """ # Define path to files - self.file_path = os.path.abspath(saved_folder + filename + ".h5") + self.file_path = Path(saved_folder + filename + ".h5").resolve() # Extract list of seeds with h5py.File(self.file_path, "r") as h5_file: @@ -466,7 +467,7 @@ def __getitem__(self, idx): # convert to [x1, ..., xd, t, v] permute_idx = list(range(1, len(data.shape) - 1)) - permute_idx.extend(list([0, -1])) + permute_idx.extend([0, -1]) data = data.permute(permute_idx) return data[..., : self.initial_step, :], data