Skip to content

Commit f47726d

Browse files
cleaning with pylint
1 parent 8570a58 commit f47726d

23 files changed

+221
-228
lines changed

.github/workflows/test.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ jobs:
3030
pip install -r *.egg-info/requires.txt
3131
- name: Analysing the code with pylint
3232
run: |
33-
pylint --unsafe-load-any-extension=y --disable=fixme $(git ls-files '*.py') || true
33+
pylint --unsafe-load-any-extension=y --disable=fixme $(git ls-files "pytest_parallel/*.py" "test/*.py") || true
3434
3535
build:
3636
needs: [pylint]

.slurm_draft/worker.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,13 @@
1111
test_idx = int(sys.argv[3])
1212

1313
comm = MPI.COMM_WORLD
14-
print(f'start at {scheduler_ip}@{server_port} test {test_idx} at rank {comm.Get_rank()}/{comm.Get_size()} exec on {socket.gethostname()} - ',datetime.datetime.now())
14+
print(f'start at {scheduler_ip}@{server_port} test {test_idx} at rank {comm.rank}/{comm.size} exec on {socket.gethostname()} - ',datetime.datetime.now())
1515

16-
if comm.Get_rank() == 0:
16+
if comm.rank == 0:
1717
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
1818
s.connect((scheduler_ip, server_port))
1919
#time.sleep(10+5*test_idx)
20-
#msg = f'Hello from test {test_idx} at rank {comm.Get_rank()}/{comm.Get_size()} exec on {socket.gethostname()}'
20+
#msg = f'Hello from test {test_idx} at rank {comm.rank}/{comm.size} exec on {socket.gethostname()}'
2121
#socket_utils.send(s, msg)
2222
info = {
2323
'test_idx': test_idx,

pytest_parallel/gather_report.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,8 @@ def gather_report_on_local_rank_0(report):
4545
del report.sub_comm # No need to keep it in the report
4646
# Furthermore we need to serialize the report
4747
# and mpi4py does not know how to serialize report.sub_comm
48-
i_sub_rank = sub_comm.Get_rank()
49-
n_sub_rank = sub_comm.Get_size()
48+
i_sub_rank = sub_comm.rank
49+
n_sub_rank = sub_comm.size
5050

5151
if (
5252
report.outcome != "skipped"

pytest_parallel/mpi_reporter.py

+17-20
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
import pytest
21
import sys
2+
3+
import pytest
34
from mpi4py import MPI
45

56
from .algo import partition, lower_bound
@@ -11,7 +12,7 @@
1112

1213
def mark_skip(item):
1314
comm = MPI.COMM_WORLD
14-
n_rank = comm.Get_size()
15+
n_rank = comm.size
1516
n_proc_test = get_n_proc_for_test(item)
1617
skip_msg = f"Not enough procs to execute: {n_proc_test} required but only {n_rank} available"
1718
item.add_marker(pytest.mark.skip(reason=skip_msg), append=False)
@@ -38,8 +39,7 @@ def create_sub_comm_of_size(global_comm, n_proc, mpi_comm_creation_function):
3839
assert 0, 'Unknown MPI communicator creation function. Available: `MPI_Comm_create`, `MPI_Comm_split`'
3940

4041
def create_sub_comms_for_each_size(global_comm, mpi_comm_creation_function):
41-
i_rank = global_comm.Get_rank()
42-
n_rank = global_comm.Get_size()
42+
n_rank = global_comm.size
4343
sub_comms = [None] * n_rank
4444
for i in range(0,n_rank):
4545
n_proc = i+1
@@ -48,8 +48,7 @@ def create_sub_comms_for_each_size(global_comm, mpi_comm_creation_function):
4848

4949

5050
def add_sub_comm(items, global_comm, test_comm_creation, mpi_comm_creation_function):
51-
i_rank = global_comm.Get_rank()
52-
n_rank = global_comm.Get_size()
51+
n_rank = global_comm.size
5352

5453
# Strategy 'by_rank': create one sub-communicator by size, from sequential (size=1) to n_rank
5554
if test_comm_creation == 'by_rank':
@@ -109,7 +108,7 @@ def pytest_runtestloop(self, session) -> bool:
109108
_ = yield
110109
# prevent return value being non-zero (ExitCode.NO_TESTS_COLLECTED)
111110
# when no test run on non-master
112-
if self.global_comm.Get_rank() != 0 and session.testscollected == 0:
111+
if self.global_comm.rank != 0 and session.testscollected == 0:
113112
session.testscollected = 1
114113
return True
115114

@@ -132,7 +131,7 @@ def pytest_runtest_logreport(self, report):
132131

133132

134133
def prepare_items_to_run(items, comm):
135-
i_rank = comm.Get_rank()
134+
i_rank = comm.rank
136135

137136
items_to_run = []
138137

@@ -164,7 +163,7 @@ def prepare_items_to_run(items, comm):
164163

165164

166165
def items_to_run_on_this_proc(items_by_steps, items_to_skip, comm):
167-
i_rank = comm.Get_rank()
166+
i_rank = comm.rank
168167

169168
items = []
170169

@@ -200,14 +199,13 @@ def pytest_runtestloop(self, session) -> bool:
200199
and not session.config.option.continue_on_collection_errors
201200
):
202201
raise session.Interrupted(
203-
"%d error%s during collection"
204-
% (session.testsfailed, "s" if session.testsfailed != 1 else "")
202+
f"{session.testsfailed} error{'s' if session.testsfailed != 1 else ''} during collection"
205203
)
206204

207205
if session.config.option.collectonly:
208206
return True
209207

210-
n_workers = self.global_comm.Get_size()
208+
n_workers = self.global_comm.size
211209

212210
add_n_procs(session.items)
213211

@@ -217,12 +215,12 @@ def pytest_runtestloop(self, session) -> bool:
217215
items_by_steps, items_to_skip, self.global_comm
218216
)
219217

220-
for i, item in enumerate(items):
218+
for item in items:
221219
nextitem = None
222220
run_item_test(item, nextitem, session)
223221

224222
# prevent return value being non-zero (ExitCode.NO_TESTS_COLLECTED) when no test run on non-master
225-
if self.global_comm.Get_rank() != 0 and session.testscollected == 0:
223+
if self.global_comm.rank != 0 and session.testscollected == 0:
226224
session.testscollected = 1
227225
return True
228226

@@ -244,8 +242,8 @@ def pytest_runtest_logreport(self, report):
244242
gather_report_on_local_rank_0(report)
245243

246244
# master ranks of each sub_comm must send their report to rank 0
247-
if sub_comm.Get_rank() == 0: # only master are concerned
248-
if self.global_comm.Get_rank() != 0: # if master is not global master, send
245+
if sub_comm.rank == 0: # only master are concerned
246+
if self.global_comm.rank != 0: # if master is not global master, send
249247
self.global_comm.send(report, dest=0)
250248
elif report.master_running_proc != 0: # else, recv if test run remotely
251249
# In the line below, MPI.ANY_TAG will NOT clash with communications outside the framework because self.global_comm is private
@@ -342,7 +340,7 @@ def wait_test_to_complete(items_to_run, session, available_procs, inter_comm):
342340
for sub_rank in sub_ranks:
343341
if sub_rank != first_rank_done:
344342
rank_original_idx = inter_comm.recv(source=sub_rank, tag=WORK_DONE_TAG)
345-
assert (rank_original_idx == original_idx) # sub_rank is supposed to have worked on the same test
343+
assert rank_original_idx == original_idx # sub_rank is supposed to have worked on the same test
346344

347345
# the procs are now available
348346
for sub_rank in sub_ranks:
@@ -406,8 +404,7 @@ def pytest_runtestloop(self, session) -> bool:
406404
and not session.config.option.continue_on_collection_errors
407405
):
408406
raise session.Interrupted(
409-
"%d error%s during collection"
410-
% (session.testsfailed, "s" if session.testsfailed != 1 else "")
407+
f"{session.testsfailed} error{'s' if session.testsfailed != 1 else ''} during collection"
411408
)
412409

413410
if session.config.option.collectonly:
@@ -499,7 +496,7 @@ def pytest_runtest_logreport(self, report):
499496
sub_comm = report.sub_comm
500497
gather_report_on_local_rank_0(report)
501498

502-
if sub_comm.Get_rank() == 0: # if local master proc, send
499+
if sub_comm.rank == 0: # if local master proc, send
503500
# The idea of the scheduler is the following:
504501
# The server schedules test over clients
505502
# A client executes the test then report to the server it is done

pytest_parallel/plugin.py

+13-12
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,12 @@
77
import tempfile
88
from pathlib import Path
99
import argparse
10+
1011
import pytest
1112
from _pytest.terminal import TerminalReporter
1213

1314
class PytestParallelError(ValueError):
14-
pass
15+
pass
1516

1617
# --------------------------------------------------------------------------
1718
def pytest_addoption(parser):
@@ -104,9 +105,9 @@ def pytest_configure(config):
104105
slurm_file = config.getoption('slurm_file')
105106
slurm_export_env = config.getoption('slurm_export_env')
106107
detach = config.getoption('detach')
107-
if scheduler != 'slurm' and scheduler != 'shell':
108+
if not scheduler in ['slurm', 'shell']:
108109
assert not is_worker, f'Internal pytest_parallel error `--_worker` not available with`--scheduler={scheduler}`'
109-
if (scheduler == 'slurm' or scheduler == 'shell') and not is_worker:
110+
if scheduler in ['slurm', 'shell'] and not is_worker:
110111
if n_workers is None:
111112
raise PytestParallelError(f'You need to specify `--n-workers` when `--scheduler={scheduler}`')
112113
if scheduler != 'slurm':
@@ -119,7 +120,7 @@ def pytest_configure(config):
119120
if slurm_file is not None:
120121
raise PytestParallelError('Option `--slurm-file` only available when `--scheduler=slurm`')
121122

122-
if (scheduler == 'shell' or scheduler == 'slurm') and not is_worker:
123+
if scheduler in ['shell', 'slurm'] and not is_worker:
123124
from mpi4py import MPI
124125
if MPI.COMM_WORLD.size != 1:
125126
err_msg = 'Do not launch `pytest_parallel` on more that one process when `--scheduler=shell` or `--scheduler=slurm`.\n' \
@@ -142,7 +143,7 @@ def pytest_configure(config):
142143
raise PytestParallelError('You cannot specify `--slurm-init-cmds` together with `--slurm-file`')
143144

144145
if '-n=' in slurm_options or '--ntasks=' in slurm_options:
145-
raise PytestParallelError('Do not specify `-n/--ntasks` in `--slurm-options` (it is deduced from the `--n-worker` value).')
146+
raise PytestParallelError('Do not specify `-n/--ntasks` in `--slurm-options` (it is deduced from the `--n-worker` value).')
146147

147148
from .slurm_scheduler import SlurmScheduler
148149

@@ -154,7 +155,7 @@ def pytest_configure(config):
154155
## pull apart `--slurm-options` for special treatement
155156
main_invoke_params = main_invoke_params.replace(f'--slurm-options={slurm_options}', '')
156157
for file_or_dir in config.option.file_or_dir:
157-
main_invoke_params = main_invoke_params.replace(file_or_dir, '')
158+
main_invoke_params = main_invoke_params.replace(file_or_dir, '')
158159
slurm_option_list = slurm_options.split() if slurm_options is not None else []
159160
slurm_conf = {
160161
'options' : slurm_option_list,
@@ -172,7 +173,7 @@ def pytest_configure(config):
172173
# reconstruct complete invoke string
173174
main_invoke_params = _invoke_params(config.invocation_params.args)
174175
for file_or_dir in config.option.file_or_dir:
175-
main_invoke_params = main_invoke_params.replace(file_or_dir, '')
176+
main_invoke_params = main_invoke_params.replace(file_or_dir, '')
176177
plugin = ShellStaticScheduler(main_invoke_params, n_workers, detach)
177178
else:
178179
from mpi4py import MPI
@@ -190,7 +191,7 @@ def pytest_configure(config):
190191
elif scheduler == 'dynamic':
191192
inter_comm = spawn_master_process(global_comm)
192193
plugin = DynamicScheduler(global_comm, inter_comm)
193-
elif (scheduler == 'slurm' or scheduler == 'shell') and is_worker:
194+
elif scheduler in ['shell', 'slurm'] and is_worker:
194195
scheduler_ip_address = config.getoption('_scheduler_ip_address')
195196
scheduler_port = config.getoption('_scheduler_port')
196197
session_folder = config.getoption('_session_folder')
@@ -209,7 +210,7 @@ def pytest_configure(config):
209210

210211
# Pytest relies on having a terminal reporter to decide on how to create error messages, see #12
211212
# Hence, register a terminal reporter that outputs to /dev/null
212-
null_file = open(os.devnull,'w')
213+
null_file = open(os.devnull,'w', encoding='utf-8')
213214
terminal_reporter = TerminalReporter(config, null_file)
214215
config.pluginmanager.register(terminal_reporter, "terminalreporter")
215216

@@ -238,16 +239,16 @@ def __init__(self, comm):
238239
def __enter__(self):
239240
from mpi4py import MPI
240241
if self.comm != MPI.COMM_NULL: # TODO DEL once non-participating rank do not participate in fixtures either
241-
rank = self.comm.Get_rank()
242+
rank = self.comm.rank
242243
self.tmp_dir = tempfile.TemporaryDirectory() if rank == 0 else None
243244
self.tmp_path = Path(self.tmp_dir.name) if rank == 0 else None
244245
return self.comm.bcast(self.tmp_path, root=0)
245246

246-
def __exit__(self, type, value, traceback):
247+
def __exit__(self, ex_type, ex_value, traceback):
247248
from mpi4py import MPI
248249
if self.comm != MPI.COMM_NULL: # TODO DEL once non-participating rank do not participate in fixtures either
249250
self.comm.barrier()
250-
if self.comm.Get_rank() == 0:
251+
if self.comm.rank == 0:
251252
self.tmp_dir.cleanup()
252253

253254

pytest_parallel/process_worker.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
1-
import pytest
1+
from pathlib import Path
2+
import pickle
23

4+
import pytest
35
from mpi4py import MPI
46

5-
from pathlib import Path
6-
import pickle
77
from .utils.items import get_n_proc_for_test, run_item_test
88
from .gather_report import gather_report_on_local_rank_0
99

@@ -32,18 +32,18 @@ def pytest_runtestloop(self, session) -> bool:
3232

3333
# check there is no file from a previous run
3434
if comm.rank == 0:
35-
for when in {'fatal_error', 'setup', 'call', 'teardown'}:
35+
for when in ['fatal_error', 'setup', 'call', 'teardown']:
3636
path = self._file_path(when)
3737
assert not path.exists(), f'INTERNAL FATAL ERROR in pytest_parallel: file "{path}" should not exist at this point'
3838

3939
# check the number of procs matches the one specified by the test
4040
if comm.size != test_comm_size: # fatal error, SLURM and MPI do not interoperate correctly
4141
if comm.rank == 0:
4242
error_info = f'FATAL ERROR in pytest_parallel with slurm scheduling: test `{item.nodeid}`' \
43-
f' uses a `comm` of size {test_comm_size} but was launched with size {comm.Get_size()}.\n' \
43+
f' uses a `comm` of size {test_comm_size} but was launched with size {comm.size}.\n' \
4444
f' This generally indicates that `srun` does not interoperate correctly with MPI.'
4545
file_path = self._file_path('fatal_error')
46-
with open(file_path, "w") as f:
46+
with open(file_path, 'w', encoding='utf-8') as f:
4747
f.write(error_info)
4848
return True
4949

@@ -55,7 +55,7 @@ def pytest_runtestloop(self, session) -> bool:
5555
assert 0, f'{item.test_info["fatal_error"]}'
5656

5757
return True
58-
58+
5959
@pytest.hookimpl(hookwrapper=True)
6060
def pytest_runtest_makereport(self, item):
6161
"""
@@ -77,5 +77,5 @@ def pytest_runtest_logreport(self, report):
7777
'longrepr': report.longrepr,
7878
'duration': report.duration, }
7979
if sub_comm.rank == 0:
80-
with open(self._file_path(report.when), "wb") as f:
80+
with open(self._file_path(report.when), 'wb') as f:
8181
f.write(pickle.dumps(report_info))

0 commit comments

Comments
 (0)