Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

style: Update to Black v20.8 formatting #1048

Merged
merged 12 commits into from
Aug 31, 2020
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/psf/black
rev: stable
rev: 20.8b1
kratsg marked this conversation as resolved.
Show resolved Hide resolved
hooks:
- id: black
language_version: python3
8 changes: 6 additions & 2 deletions src/pyhf/cli/patchset.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ def cli():
@cli.command()
@click.argument('patchset', default='-')
@click.option(
'--name', help='The name of the patch to extract.', default=None,
'--name',
help='The name of the patch to extract.',
default=None,
)
@click.option(
'--output-file',
Expand Down Expand Up @@ -65,7 +67,9 @@ def extract(patchset, name, output_file, with_metadata):
@click.argument('background-only', default='-')
@click.argument('patchset', default='-')
@click.option(
'--name', help='The name of the patch to extract.', default=None,
'--name',
help='The name of the patch to extract.',
default=None,
)
@click.option(
'--output-file',
Expand Down
6 changes: 5 additions & 1 deletion src/pyhf/cli/spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,11 @@ def digest(workspace, algorithm, output_json):
}

if output_json:
output = json.dumps(digests, indent=4, sort_keys=True,)
output = json.dumps(
digests,
indent=4,
sort_keys=True,
)
else:
output = '\n'.join(
f"{hash_alg}:{digest}" for hash_alg, digest in digests.items()
Expand Down
8 changes: 4 additions & 4 deletions src/pyhf/constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,10 @@ def _precompute(self):
self.access_field = tensorlib.astensor(self._access_field, dtype='int')

def has_pdf(self):
'''
"""
Returns:
flag (`bool`): Whether the model has a Gaussian Constraint
'''
"""
return bool(self.param_viewer.index_selection)

def make_pdf(self, pars):
Expand Down Expand Up @@ -213,10 +213,10 @@ def _precompute(self):
self.batched_factors = tensorlib.astensor(self._batched_factors)

def has_pdf(self):
'''
"""
Returns:
flag (`bool`): Whether the model has a Gaussian Constraint
'''
"""
return bool(self.param_viewer.index_selection)

def make_pdf(self, pars):
Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/histosys.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,10 @@ def _precompute(self):
)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return

Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/lumi.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,10 @@ def _precompute(self):
self.lumi_default = tensorlib.ones(self.lumi_mask.shape)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return

Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/normfactor.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,10 @@ def _precompute(self):
self.normfactor_default = tensorlib.ones(self.normfactor_mask.shape)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return
tensorlib, _ = get_backend()
Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/normsys.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,10 @@ def _precompute(self):
)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return

Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/shapefactor.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,10 +131,10 @@ def _precompute(self):
self.sample_ones = tensorlib.ones(tensorlib.shape(self.shapefactor_mask)[1])

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
if not self.param_viewer.index_selection:
return

Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/modifiers/shapesys.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,10 @@ def finalize(self, pdfconfig):
pdfconfig.param_set(pname).auxdata = default_backend.tolist(factors)

def apply(self, pars):
'''
"""
Returns:
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
'''
"""
tensorlib, _ = get_backend()
if not self.param_viewer.index_selection:
return
Expand Down
8 changes: 7 additions & 1 deletion src/pyhf/optimize/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,13 @@ def __init__(self, **kwargs):
)

def _internal_minimize(
self, func, x0, do_grad=False, bounds=None, fixed_vals=None, options={},
self,
func,
x0,
do_grad=False,
bounds=None,
fixed_vals=None,
options={},
):

minimizer = self._get_minimizer(
Expand Down
13 changes: 11 additions & 2 deletions src/pyhf/parameters/paramview.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,14 @@ def _tensorviewer_from_parmap(par_map, batch_size):
names, slices, _ = list(
zip(
*sorted(
[(k, v['slice'], v['slice'].start,) for k, v in par_map.items()],
[
(
k,
v['slice'],
v['slice'].start,
)
for k, v in par_map.items()
],
key=lambda x: x[2],
)
)
Expand All @@ -18,7 +25,9 @@ def _tensorviewer_from_parmap(par_map, batch_size):


def extract_index_access(
baseviewer, subviewer, indices,
baseviewer,
subviewer,
indices,
):
tensorlib, _ = get_backend()

Expand Down
7 changes: 6 additions & 1 deletion src/pyhf/pdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,12 @@ def _paramset_requirements_from_modelspec(spec, channel_nbins):

def _nominal_and_modifiers_from_spec(config, spec):
default_data_makers = {
'histosys': lambda: {'hi_data': [], 'lo_data': [], 'nom_data': [], 'mask': [],},
'histosys': lambda: {
'hi_data': [],
'lo_data': [],
'nom_data': [],
'mask': [],
},
'lumi': lambda: {'mask': []},
'normsys': lambda: {'hi': [], 'lo': [], 'nom_data': [], 'mask': []},
'normfactor': lambda: {'mask': []},
Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/tensor/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def _tensorviewer_from_slices(target_slices, names, batch_size):


def _tensorviewer_from_sizes(sizes, names, batch_size):
'''
"""
Creates a _Tensorviewer based on tensor sizes.

the TV will be able to stitch together data with
Expand All @@ -83,7 +83,7 @@ def _tensorviewer_from_sizes(sizes, names, batch_size):
tv.stitch([foo[slice1],foo[slice2],foo[slice3])

and split them again accordingly.
'''
"""
target_slices = []
start = 0
for sz in sizes:
Expand Down
3 changes: 2 additions & 1 deletion src/pyhf/tensor/tensorflow_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,8 @@ def normal_cdf(self, x, mu=0.0, sigma=1):
TensorFlow Tensor: The CDF
"""
normal = tfp.distributions.Normal(
self.astensor(mu, dtype='float'), self.astensor(sigma, dtype='float'),
self.astensor(mu, dtype='float'),
self.astensor(sigma, dtype='float'),
)
return normal.cdf(x)

Expand Down
3 changes: 2 additions & 1 deletion src/pyhf/writexml.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,8 @@ def writexml(spec, specdir, data_rootdir, resultprefix):
Path(specdir).parent.joinpath('HistFactorySchema.dtd'),
)
combination = ET.Element(
"Combination", OutputFilePrefix=str(Path(specdir).joinpath(resultprefix)),
"Combination",
OutputFilePrefix=str(Path(specdir).joinpath(resultprefix)),
)

with uproot.recreate(
Expand Down
4 changes: 2 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,11 +144,11 @@ def interpcode(request):

@pytest.fixture(scope='function')
def datadir(tmpdir, request):
'''
"""
Fixture responsible for searching a folder with the same name of test
module and, if available, moving all contents to a temporary directory so
tests can use them freely.
'''
"""
# this gets the module name (e.g. /path/to/pyhf/tests/test_schema.py)
# and then gets the directory by removing the suffix (e.g. /path/to/pyhf/tests/test_schema)
test_dir = pathlib.Path(request.module.__file__).with_suffix('')
Expand Down
6 changes: 5 additions & 1 deletion tests/test_backend_consistency.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,11 @@ def test_hypotest_qmu_tilde(
pyhf.set_backend(backend)

qmu_tilde = pyhf.infer.test_statistics.qmu_tilde(
1.0, data, pdf, pdf.config.suggested_init(), pdf.config.suggested_bounds(),
1.0,
data,
pdf,
pdf.config.suggested_init(),
pdf.config.suggested_bounds(),
)
test_statistic.append(qmu_tilde)

Expand Down
30 changes: 24 additions & 6 deletions tests/test_combined_modifiers.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,7 +396,10 @@ def test_shapesys(backend):
par_map={
'dummy1': {
'paramset': paramset(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1,
inits=[0],
bounds=[[0, 10]],
fixed=False,
),
'slice': slice(0, 1),
},
Expand Down Expand Up @@ -424,7 +427,10 @@ def test_shapesys(backend):
},
'dummy2': {
'paramset': paramset(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1,
inits=[0],
bounds=[[0, 10]],
fixed=False,
),
'slice': slice(4, 5),
},
Expand Down Expand Up @@ -495,13 +501,19 @@ def test_normfactor(backend):
par_map={
'mu1': {
'paramset': unconstrained(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1,
inits=[0],
bounds=[[0, 10]],
fixed=False,
),
'slice': slice(0, 1),
},
'mu2': {
'paramset': unconstrained(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1,
inits=[0],
bounds=[[0, 10]],
fixed=False,
),
'slice': slice(1, 2),
},
Expand Down Expand Up @@ -575,7 +587,10 @@ def test_shapesys_zero(backend):
par_map={
'SigXsecOverSM': {
'paramset': paramset(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1,
inits=[0],
bounds=[[0, 10]],
fixed=False,
),
'slice': slice(0, 1),
},
Expand Down Expand Up @@ -669,7 +684,10 @@ def test_shapefactor(backend):
par_map={
'shapefac1': {
'paramset': unconstrained(
n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False,
n_parameters=1,
inits=[0],
bounds=[[0, 10]],
fixed=False,
),
'slice': slice(0, 1),
},
Expand Down
4 changes: 2 additions & 2 deletions tests/test_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,11 +120,11 @@ def test_hypotest_return_expected_set(tmpdir, hypotest_args):


def test_inferapi_pyhf_independence():
'''
"""
pyhf.infer should eventually be factored out so it should be
infependent from pyhf internals. This is testing that
a much simpler model still can run through pyhf.infer.hypotest
'''
"""
from pyhf import get_backend

class _NonPyhfConfig(object):
Expand Down
12 changes: 10 additions & 2 deletions tests/test_optim.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@ def rosen(x):

@pytest.mark.parametrize('do_stitch', [False, True], ids=['no_stitch', 'do_stitch'])
@pytest.mark.parametrize(
'precision', ['32b', '64b'], ids=['32b', '64b'],
'precision',
['32b', '64b'],
ids=['32b', '64b'],
)
@pytest.mark.parametrize(
'tensorlib',
Expand Down Expand Up @@ -141,7 +143,13 @@ def test_optimizer_mixin_extra_kwargs(optimizer):
@pytest.mark.parametrize(
'backend,backend_new',
itertools.permutations(
[('numpy', False), ('pytorch', True), ('tensorflow', True), ('jax', True),], 2
[
('numpy', False),
('pytorch', True),
('tensorflow', True),
('jax', True),
],
2,
),
ids=lambda pair: f'{pair[0]}',
)
Expand Down
5 changes: 4 additions & 1 deletion tests/test_patchset.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,10 @@ def patch():

@pytest.mark.parametrize(
'patchset_file',
['patchset_bad_empty_patches.json', 'patchset_bad_no_version.json',],
[
'patchset_bad_empty_patches.json',
'patchset_bad_no_version.json',
],
)
def test_patchset_invalid_spec(datadir, patchset_file):
patchsetspec = json.load(open(datadir.join(patchset_file)))
Expand Down
Loading