Skip to content

Commit

Permalink
Linter fixes
Browse files Browse the repository at this point in the history
Ave linters !

Signed-off-by: Denis Barakhtanov <[email protected]>
  • Loading branch information
0xE0F committed Jan 7, 2025
1 parent df2df24 commit 91c5ab5
Show file tree
Hide file tree
Showing 10 changed files with 67 additions and 68 deletions.
2 changes: 1 addition & 1 deletion src/client/pydaos/torch/Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ plt.show()

### Checkpoint interface

Torch framwork provides a way to save and load model's checkpoints: `torch.save` and `torch.load` functions are used to save and load the model state dictionary.
Torch framework provides a way to save and load model's checkpoints: `torch.save` and `torch.load` functions are used to save and load the model state dictionary.
The `torch.save` function expects a state dictionary object and a file like object `Union[str, PathLike, BinaryIO, IO[bytes]]`.
To implement such interface, `pydaos.torch.WriteBuffer` class is introduced, which is a wrapper around `io.BufferedIOBase` object, behaving like a writable stream.
It accomulates the data in the buffer and writes it to the DAOS container when the close method is called.
Expand Down
6 changes: 3 additions & 3 deletions src/client/pydaos/torch/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# (C) Copyright 2024 Intel Corporation.
# (C) Copyright 2024 Google LLC
# (C) Copyright 2024 Enakta Labs Ltd
# (C) Copyright 2024-2025 Intel Corporation.
# (C) Copyright 2024-2025 Google LLC
# (C) Copyright 2024-2025 Enakta Labs Ltd
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
Expand Down
7 changes: 2 additions & 5 deletions src/client/pydaos/torch/torch_api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#
# (C) Copyright 2024 Google LLC
# (C) Copyright 2024 Enakta Labs Ltd
# (C) Copyright 2024-2025 Google LLC
# (C) Copyright 2024-2025 Enakta Labs Ltd
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
Expand Down Expand Up @@ -307,16 +307,13 @@ def closed(self):
"""Return True if the file is closed."""
return self._closed

@property
def writable(self):
"""Return True if the file is writable."""
return True

@property
def readable(self):
return False

@property
def seekable(self):
"""Return True if the file is seekable."""
return False
Expand Down
6 changes: 3 additions & 3 deletions src/client/pydaos/torch/torch_shim.c
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/**
* (C) Copyright 2019-2024 Intel Corporation.
* (C) Copyright 2024 Google LLC
* (C) Copyright 2024 Enakta Labs Ltd
* (C) Copyright 2019-2025 Intel Corporation.
* (C) Copyright 2024-2025 Google LLC
* (C) Copyright 2024-2025 Enakta Labs Ltd
*
* SPDX-License-Identifier: BSD-2-Clause-Patent
*/
Expand Down
22 changes: 12 additions & 10 deletions src/tests/ftest/directory_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,9 +112,11 @@ def get_probe(self):
needle_name = os.path.basename(needle_path)
return needle_name, needle_path

def set_file_size(self, min=0, max=0):
self._file_size_min = min
self._file_size_max = max
def set_file_size(self, fmin=0, fmax=0):
""" Set the minimum and maximum file size """

self._file_size_min = fmin
self._file_size_max = fmax

def _create_dir_tree(self, current_path, current_height):
"""Create the actual directory tree using depth-first search approach.
Expand All @@ -130,7 +132,7 @@ def _create_dir_tree(self, current_path, current_height):

# create files
for _ in range(self._files_per_node):
self._mktemp_file(dir=current_path, suffix=".file")
self._mktemp_file(where=current_path, suffix=".file")

# create nested directories
for _ in range(self._subdirs_per_node):
Expand All @@ -149,7 +151,7 @@ def _created_remaining_needles(self):
for count in range(self._needles_count):
new_path = os.path.dirname(random.choice(self._needles_paths)) # nosec
suffix = f"_{count:05d}.needle"
self._mktemp_file(dir=new_path, prefix=self._needles_prefix, suffix=suffix)
self._mktemp_file(where=new_path, prefix=self._needles_prefix, suffix=suffix)

def _create_needle(self, current_path, current_height):
"""Create a *.needle file if we reach the bottom of the tree.
Expand All @@ -166,17 +168,17 @@ def _create_needle(self, current_path, current_height):

self._needles_count -= 1
suffix = "_{:05d}.needle".format(self._needles_count)
file_name = self._mktemp_file(dir=current_path, prefix=self._needles_prefix, suffix=suffix)
file_name = self._mktemp_file(where=current_path, prefix=self._needles_prefix, suffix=suffix)

Check failure on line 171 in src/tests/ftest/directory_tree.py

View workflow job for this annotation

GitHub Actions / Flake8 check

E501 line too long (101 > 100 characters)

Check warning on line 171 in src/tests/ftest/directory_tree.py

View workflow job for this annotation

GitHub Actions / Pylint check

line-too-long, Line too long (101/100)
self._needles_paths.append(file_name)

def _mktemp_file(self, dir=None, prefix=None, suffix=None):
def _mktemp_file(self, where=None, prefix=None, suffix=None):
"""Create a temporary file.
If the file size is 0, the file will be empty.
If the file size is greater than 0, the file will be filled with random data.
If min and max file size are different, the file size will be a random between min and max.
"""

fd, fname = tempfile.mkstemp(dir=dir, prefix=prefix, suffix=suffix)
fd, fname = tempfile.mkstemp(dir=where, prefix=prefix, suffix=suffix)
if self._file_size_min == 0:
os.close(fd)
return fname
Expand All @@ -190,8 +192,8 @@ def _mktemp_file(self, dir=None, prefix=None, suffix=None):
return fname



def _populate_dir_tree(path, height, subdirs_per_node, files_per_node, needles, prefix, file_size_min, file_size_max):
def _populate_dir_tree(path, height, subdirs_per_node, files_per_node, needles, prefix,
file_size_min, file_size_max):
"""Create a directory tree and its needle files.
Args:
Expand Down
3 changes: 2 additions & 1 deletion src/tests/ftest/pytorch/checkpoint.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""
(C) Copyright 2025 Intel Corporation.
(C) Copyright 2025 Google LLC
SPDX-License-Identifier: BSD-2-Clause-Patent
Expand All @@ -19,7 +20,7 @@ class PytorchCheckpointTest(TestWithServers):
def test_checkpoint(self):
"""Test Pytorch Checkpoint interface
Test Description: Ensure that wirting and reading a checkpoint works as expected.
Test Description: Ensure that writing and reading a checkpoint works as expected.
:avocado: tags=all,full_regression
:avocado: tags=vm
Expand Down
32 changes: 16 additions & 16 deletions src/tests/ftest/pytorch/checkpoint.yaml
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
hosts:
test_servers: 1
test_clients: 1
test_servers: 1
test_clients: 1
server_config:
name: daos_server
engines_per_host: 1
engines:
name: daos_server
engines_per_host: 1
engines:
0:
targets: 4
nr_xs_helpers: 0
storage:
0:
targets: 4
nr_xs_helpers: 0
storage:
0:
class: ram
scm_mount: /mnt/daos
system_ram_reserved: 1
class: ram
scm_mount: /mnt/daos
system_ram_reserved: 1
pool:
size: 1G
size: 1G
container:
type: POSIX
control_method: daos
type: POSIX
control_method: daos

checkpoint:
writes: 100
writes: 100
10 changes: 4 additions & 6 deletions src/tests/ftest/pytorch/map_dataset.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""
(C) Copyright 2025 Intel Corporation.
(C) Copyright 2025 Google LLC
SPDX-License-Identifier: BSD-2-Clause-Patent
Expand Down Expand Up @@ -78,7 +79,7 @@ def test_map_style_dataset(self):
def test_dataloader(self):
"""Test Map Style Dataset with DataLoader.
Test Description: Ensure that the dataloader can read all the samples that were seeded.
Test Description: Ensure that the DataLoader can read all the samples that were seeded.
:avocado: tags=all,full_regression
:avocado: tags=vm
Expand All @@ -96,7 +97,7 @@ def test_dataloader(self):
subdirs = self.params.get("subdirs", "/run/dataloader/*")
files_per_node = self.params.get("files_per_node", "/run/dataloader/*")

# Dataloader requires that samples are of the same size
# DataLoader requires that samples are of the same size
file_min_size = self.params.get("file_min_size", "/run/dataloader/*", 4096)
file_max_size = self.params.get("file_max_size", "/run/dataloader/*", 4096)

Expand Down Expand Up @@ -148,10 +149,7 @@ def _test_dataloader(self, pool, container, hashes, batch_size, processes):

if hashes != actual:
self.fail(
f"dataloader with nproc={processes} and bs={batch_size} did not fetch all samples")
else:
self.log.info(
f"dataloader with nproc={processes} and bs={batch_size} fetched all samples")
f"DataLoader with nproc={processes} and bs={batch_size} did not fetch all samples")

def _create_test_files(self, path, height, subdirs, files_per_node, min_size, max_size):
"""Create a directory tree"""
Expand Down
46 changes: 23 additions & 23 deletions src/tests/ftest/pytorch/map_dataset.yaml
Original file line number Diff line number Diff line change
@@ -1,32 +1,32 @@
hosts:
test_servers: 1
test_clients: 1
test_servers: 1
test_clients: 1
server_config:
name: daos_server
engines_per_host: 1
engines:
name: daos_server
engines_per_host: 1
engines:
0:
targets: 4
nr_xs_helpers: 0
storage:
0:
targets: 4
nr_xs_helpers: 0
storage:
0:
class: ram
scm_mount: /mnt/daos
system_ram_reserved: 1
class: ram
scm_mount: /mnt/daos
system_ram_reserved: 1
pool:
size: 1G
size: 1G
container:
type: POSIX
control_method: daos
type: POSIX
control_method: daos

map_style_dataset:
tree_height: 4
subdirs: 3
files_per_node: 5
tree_height: 4
subdirs: 3
files_per_node: 5

dataloader:
tree_height: 3
subdirs: 3
files_per_node: 8
processes: [0, 1, 2, 3, 4, 8]
batch_size: [2, 4, 8, 16]
tree_height: 3
subdirs: 3
files_per_node: 8
processes: [0, 1, 2, 3, 4, 8]
batch_size: [2, 4, 8, 16]
1 change: 1 addition & 0 deletions utils/cq/words.dict
Original file line number Diff line number Diff line change
Expand Up @@ -409,6 +409,7 @@ scancel
scm
scons
scontrol
seekable
sharedctypes
shlex
simul
Expand Down

0 comments on commit 91c5ab5

Please sign in to comment.