Skip to content

Commit

Permalink
Devops: Update pylint version (#903)
Browse files Browse the repository at this point in the history
Besides bumping the version of `pylint` in `pyproject.toml`:

* Set the `pylint` `repo` to `local` in the pre-commit config.
* Remove the deprecated `bad-continuation` check from the disabled checks.
* Disable the `fixme` check to avoid complaints about `TODO` statements.
* Disable the `use-dict-literal` check; using literal dictionaries is slightly faster,
  but I find the `dict()` calls easier to read in some cases.
  • Loading branch information
mbercx authored Apr 6, 2023
1 parent 7f53c96 commit 9f4142d
Show file tree
Hide file tree
Showing 14 changed files with 73 additions and 48 deletions.
6 changes: 4 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,13 @@ repos:
)$
additional_dependencies: ['toml']

- repo: https://github.com/PyCQA/pylint
rev: v2.12.2
- repo: local
hooks:
- id: pylint
name: pylint
entry: pylint
language: system
types: [python]
exclude: *exclude_files

- repo: https://github.com/PyCQA/pydocstyle
Expand Down
9 changes: 5 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ docs = [
]
pre-commit = [
'pre-commit~=2.17',
'pylint~=2.12.2',
'pylint~=2.17.2',
'pylint-aiida~=0.1.1',
'toml',
]
Expand Down Expand Up @@ -162,17 +162,18 @@ generated-members = 'self.exit_codes.*'

[tool.pylint.messages_control]
disable = [
'bad-continuation',
'duplicate-code',
'locally-disabled',
'logging-format-interpolation',
'fixme',
'inconsistent-return-statements',
'import-outside-toplevel',
'locally-disabled',
'logging-format-interpolation',
'no-else-raise',
'too-many-arguments',
'too-many-ancestors',
'too-many-branches',
'too-many-locals',
'use-dict-literal',
]

[tool.pylint.basic]
Expand Down
7 changes: 4 additions & 3 deletions src/aiida_quantumespresso/calculations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -518,7 +518,7 @@ def _generate_PWCPinputdata(cls, parameters, settings, pseudos, structure, kpoin
# Note the (idx+1) to convert to fortran 1-based lists
mapping_species = {sp_name: (idx + 1) for idx, sp_name in enumerate(mapping_species)}
# I add the first line
sorted_atomic_species_card_list = (['ATOMIC_SPECIES\n'] + list(sorted_atomic_species_card_list))
sorted_atomic_species_card_list = ['ATOMIC_SPECIES\n'] + list(sorted_atomic_species_card_list)
atomic_species_card = ''.join(sorted_atomic_species_card_list)
# Free memory
del sorted_atomic_species_card_list
Expand Down Expand Up @@ -624,6 +624,8 @@ def _generate_PWCPinputdata(cls, parameters, settings, pseudos, structure, kpoin
input_params['SYSTEM']['ntyp'] = len(structure.kinds)

# ============ I prepare the k-points =============
kpoints_card = ''

if cls._use_kpoints:
try:
mesh, offset = kpoints.get_kpoints_mesh()
Expand Down Expand Up @@ -744,8 +746,7 @@ def _generate_PWCPinputdata(cls, parameters, settings, pseudos, structure, kpoin
# Write cards now
inputfile += atomic_species_card
inputfile += atomic_positions_card
if cls._use_kpoints:
inputfile += kpoints_card
inputfile += kpoints_card
inputfile += cell_parameters_card
if hubbard_card is not None:
inputfile += hubbard_card
Expand Down
5 changes: 3 additions & 2 deletions src/aiida_quantumespresso/calculations/epw.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def define(cls, spec):
spec.input('parent_folder_ph', valid_type=orm.RemoteData, help='the folder of a completed `PhCalculation`')
# yapf: enable

def prepare_for_submission(self, folder): # pylint: disable=too-many-statements,too-many-branches
def prepare_for_submission(self, folder):
"""Prepare the calculation job for submission by transforming input nodes into input files.
In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that
Expand All @@ -68,6 +68,8 @@ def prepare_for_submission(self, folder): # pylint: disable=too-many-statements
:return: :class:`~aiida.common.datastructures.CalcInfo` instance.
"""

# pylint: disable=too-many-statements,too-many-branches

def test_offset(offset):
"""Check if the grid has an offset."""
if any(i != 0. for i in offset):
Expand All @@ -76,7 +78,6 @@ def test_offset(offset):
'at the level of epw.x'
)

# pylint: disable=too-many-statements,too-many-branches
local_copy_list = []
remote_copy_list = []
remote_symlink_list = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ def get_spectra_by_element(elements_list, equivalent_sites_data, **kwargs):
]

spectra_by_element[element] = np.column_stack((
sum([array[:, 0] for array in corrected_spectra]) / len(corrected_spectra),
sum([array[:, 1] for array in corrected_spectra])
sum(array[:, 0] for array in corrected_spectra) / len(corrected_spectra),
sum(array[:, 1] for array in corrected_spectra)
))

all_final_spectra = {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def spectra_broadening(points, label='cls_spectra'):
final_spectra_y_labels = []
final_spectra_y_units = []

total_multiplicity = sum([i[0] for i in points[element]])
total_multiplicity = sum(i[0] for i in points[element])

final_spectra = orm.XyData()
max_core_level_shift = points[element][-1][1]
Expand Down
2 changes: 1 addition & 1 deletion src/aiida_quantumespresso/calculations/matdyn.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def _validate_inputs(value, _):
if parameters.get('INPUT', {}).get('flfrc', None) is not None:
return '`INPUT.flfrc` is set automatically from the `force_constants` input.'

def generate_input_file(self, parameters):
def generate_input_file(self, parameters): # pylint: disable=arguments-differ
"""Generate namelist input_file content given a dict of parameters.
:param parameters: 'dict' containing the fortran namelists and parameters to be used.
Expand Down
2 changes: 1 addition & 1 deletion src/aiida_quantumespresso/calculations/xspectra.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def define(cls, spec):
message='The spectrum data file could not be read using NumPy genfromtxt'
)

def generate_input_file(self, parameters):
def generate_input_file(self, parameters): # pylint: disable=arguments-differ
"""Add kpoint handling to the inherited method.
This checks that the offset for the mesh is in a valid format, converts the offset
Expand Down
6 changes: 3 additions & 3 deletions src/aiida_quantumespresso/cli/utils/validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,11 @@ def validate_hubbard_parameters(structure, parameters, hubbard_u=None, hubbard_v

try:
hubbard_file = load_node(pk=hubbard_file_pk)
except exceptions.NotExistent:
ValueError(f'{hubbard_file_pk} is not a valid pk')
except exceptions.NotExistent as exc:
raise ValueError(f'{hubbard_file_pk} is not a valid pk') from exc
else:
if not isinstance(hubbard_file, SinglefileData):
ValueError(f'Node<{hubbard_file_pk}> is not a SinglefileData but {type(hubbard_file)}')
raise ValueError(f'Node<{hubbard_file_pk}> is not a SinglefileData but {type(hubbard_file)}')

parameters['SYSTEM']['lda_plus_u'] = True
parameters['SYSTEM']['lda_plus_u_kind'] = 2
Expand Down
2 changes: 1 addition & 1 deletion src/aiida_quantumespresso/data/force_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def parse_q2r_force_constants_file(lines, also_force_constants=False):
parsed_data['atom_list'] = atom_list

# read lrigid (flag for dielectric constant and effective charges
has_done_electric_field = (lines[current_line].split()[0] == 'T')
has_done_electric_field = lines[current_line].split()[0] == 'T'
parsed_data['has_done_electric_field'] = has_done_electric_field
current_line += 1

Expand Down
2 changes: 0 additions & 2 deletions src/aiida_quantumespresso/parsers/parse_xml/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@

class XMLParseError(Exception):
"""Raised when the XML output could not be parsed."""
pass


class XMLUnsupportedFormatError(Exception):
"""Raised when the XML output has an unsupported format."""
pass
6 changes: 3 additions & 3 deletions src/aiida_quantumespresso/utils/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,14 +152,14 @@ def get_pw_parallelization_parameters(
time_single_cpu = np.prod(fft_grid) * nspin * nkpoints * niterations * scaling_law[0] * nbands**scaling_law[1]

# The number of nodes is the maximum number we can use that is dividing nkpoints
num_machines = max([m for m in range(1, max_num_machines + 1) if nkpoints % m == 0])
num_machines = max(m for m in range(1, max_num_machines + 1) if nkpoints % m == 0)

# If possible try to make number of kpoints even by changing the number of machines
if (
num_machines == 1 and nkpoints > 6 and max_num_machines > 1 and
time_single_cpu / default_num_mpiprocs_per_machine > target_time_seconds
):
num_machines = max([m for m in range(1, max_num_machines + 1) if (nkpoints + 1) % m == 0])
num_machines = max(m for m in range(1, max_num_machines + 1) if (nkpoints + 1) % m == 0)

# Now we will try to decrease the number of processes per machine (by not more than one fourth)
# until we manage to get an efficient plane wave parallelization
Expand All @@ -185,7 +185,7 @@ def get_pw_parallelization_parameters(

# Increase the number of machines in case of memory problem during initialization
if calculation.get_scheduler_stderr() and 'OOM' in calculation.get_scheduler_stderr():
num_machines = max([i for i in range(num_machines, max_num_machines + 1) if i % num_machines == 0])
num_machines = max(i for i in range(num_machines, max_num_machines + 1) if i % num_machines == 0)

estimated_time = time_single_cpu / (num_mpiprocs_per_machine * num_machines)
max_wallclock_seconds = min(ceil(estimated_time / round_interval) * round_interval, max_wallclock_seconds)
Expand Down
2 changes: 1 addition & 1 deletion src/aiida_quantumespresso/utils/validation/trajectory.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,6 @@ def verify_convergence_stress(
except (KeyError, IndexError) as exception:
raise ValueError('the `stress` array does not exist or the given index exceeds the length.') from exception

pressure = (numpy.trace(stress) / 3.)
pressure = numpy.trace(stress) / 3.

return abs(pressure - reference_pressure) < threshold
66 changes: 44 additions & 22 deletions tests/parsers/test_pp.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,11 +137,16 @@ def test_pp_default_1d(
data_array = results['output_data'].get_array('data')
coords_array = results['output_data'].get_array('x_coordinates')
units_array = results['output_data'].get_array('x_coordinates_units')
num_regression.check({
'data_array': data_array,
'coords_array': coords_array
},
default_tolerance=dict(atol=0, rtol=1e-18))
num_regression.check(
data_dict={
'data_array': data_array,
'coords_array': coords_array
},
default_tolerance={
'atol': 0,
'rtol': 1e-18
}
)
data_regression.check({'parameters': results['output_parameters'].get_dict(), 'units_array': units_array.tolist()})


Expand Down Expand Up @@ -170,12 +175,17 @@ def test_pp_default_1d_spherical(
data_units_array = results['output_data'].get_array('data_units')
data_int_units_array = results['output_data'].get_array('integrated_data_units')
coords_units_array = results['output_data'].get_array('x_coordinates_units')
num_regression.check({
'data_array': data_array,
'data_array_int': data_array_int,
'coords_array': coords_array
},
default_tolerance=dict(atol=0, rtol=1e-18))
num_regression.check(
data_dict={
'data_array': data_array,
'data_array_int': data_array_int,
'coords_array': coords_array
},
default_tolerance={
'atol': 0,
'rtol': 1e-18
}
)
data_regression.check({
'parameters': results['output_parameters'].get_dict(),
'data_units_array': data_units_array.tolist(),
Expand Down Expand Up @@ -204,11 +214,16 @@ def test_pp_default_2d(
coords_array = results['output_data'].get_array('xy_coordinates').flatten()
data_units_array = results['output_data'].get_array('data_units')
coords_units_array = results['output_data'].get_array('xy_coordinates_units')
num_regression.check({
'data_array': data_array,
'coords_array': coords_array
},
default_tolerance=dict(atol=0, rtol=1e-18))
num_regression.check(
data_dict={
'data_array': data_array,
'coords_array': coords_array
},
default_tolerance={
'atol': 0,
'rtol': 1e-18
}
)
data_regression.check({
'parameters': results['output_parameters'].get_dict(),
'data_units': data_units_array.tolist(),
Expand Down Expand Up @@ -236,7 +251,10 @@ def test_pp_default_polar(
data_units_array = results['output_data'].get_array('data_units')
num_regression.check({
'data_array': data_array,
}, default_tolerance=dict(atol=0, rtol=1e-18))
}, default_tolerance={
'atol': 0,
'rtol': 1e-18
})
data_regression.check({
'parameters': results['output_parameters'].get_dict(),
'data_units': data_units_array.tolist(),
Expand All @@ -263,11 +281,15 @@ def test_pp_default_3d(
voxel_array = results['output_data'].get_array('voxel').flatten()
data_units_array = results['output_data'].get_array('data_units')
coordinates_units_array = results['output_data'].get_array('coordinates_units')
num_regression.check({
'data_array': data_array,
'voxel_array': voxel_array,
},
default_tolerance=dict(atol=0, rtol=1e-18))
num_regression.check(
data_dict={
'data_array': data_array,
'voxel_array': voxel_array
}, default_tolerance={
'atol': 0,
'rtol': 1e-18
}
)
data_regression.check({
'parameters': results['output_parameters'].get_dict(),
'data_units': data_units_array.tolist(),
Expand Down

0 comments on commit 9f4142d

Please sign in to comment.