Skip to content

Commit

Permalink
Use Black code style (#322)
Browse files Browse the repository at this point in the history
* Add black as a dependency for the development environment on installations with Python 3.6+
* Add pyproject.toml to configure Black using --skip-string-normalization option to preserve single quotes
* Apply Black to the entire code base
* Add black --check --diff --verbose . to CI to ensure that the code style is Black
* Add Git pre-commit hooks to the developer environment with pre-commit and add .pre-commit-config.yaml
* Add pre-commit setup to the development environment setup docs
* Add Black code style badge to README
  • Loading branch information
matthewfeickert authored and lukasheinrich committed Oct 21, 2018
1 parent 700033d commit f7d27c7
Show file tree
Hide file tree
Showing 63 changed files with 2,507 additions and 1,666 deletions.
6 changes: 6 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
repos:
- repo: https://github.com/ambv/black
rev: stable
hooks:
- id: black
language_version: python3.6
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ install:
script:
- pyflakes pyhf
- pytest -r sx --ignore tests/benchmarks/ --ignore tests/test_notebooks.py
- if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then black --check --diff --verbose .; fi
after_success: coveralls

# always test (on both 'push' and 'pr' builds in Travis)
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
[![Build Status](https://travis-ci.org/diana-hep/pyhf.svg?branch=master)](https://travis-ci.org/diana-hep/pyhf)
[![Docker Automated](https://img.shields.io/docker/automated/pyhf/pyhf.svg)](https://hub.docker.com/r/pyhf/pyhf/)
[![Coverage Status](https://coveralls.io/repos/github/diana-hep/pyhf/badge.svg?branch=master)](https://coveralls.io/github/diana-hep/pyhf?branch=master) [![Code Health](https://landscape.io/github/diana-hep/pyhf/master/landscape.svg?style=flat)](https://landscape.io/github/diana-hep/pyhf/master)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black)

[![Docs](https://img.shields.io/badge/docs-master-blue.svg)](https://diana-hep.github.io/pyhf)
[![Binder](https://mybinder.org/badge.svg)](https://mybinder.org/v2/gh/diana-hep/pyhf/master?filepath=docs%2Fexamples%2Fnotebooks%2Fbinderexample%2FStatisticalAnalysis.ipynb)
Expand Down
29 changes: 20 additions & 9 deletions binder/trigger_binder.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,15 @@
from selenium.webdriver.support.expected_conditions import staleness_of


class SeleniumSession():
class SeleniumSession:
def __init__(self, args):
self.options = Options()
self.options.set_headless()
self.options.add_argument('--no-sandbox')
if args.chromedriver_path is not None:
self.browser = webdriver.Chrome(
args.chromedriver_path, chrome_options=self.options)
args.chromedriver_path, chrome_options=self.options
)
else:
self.browser = webdriver.Chrome(chrome_options=self.options)

Expand All @@ -39,13 +40,23 @@ def main(args):

if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose', dest='is_verbose',
action='store_true',
help='Print out more information')
parser.add_argument('--chromedriver-path', dest='chromedriver_path',
type=str, default=None, help='System path to ChromeDriver')
parser.add_argument('--url', dest='url',
type=str, default=None, help='URL for Selinium to open')
parser.add_argument(
'-v',
'--verbose',
dest='is_verbose',
action='store_true',
help='Print out more information',
)
parser.add_argument(
'--chromedriver-path',
dest='chromedriver_path',
type=str,
default=None,
help='System path to ChromeDriver',
)
parser.add_argument(
'--url', dest='url', type=str, default=None, help='URL for Selinium to open'
)
args = parser.parse_args()

main(args)
60 changes: 35 additions & 25 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,15 @@
#
import os
import sys

sys.path.insert(0, os.path.abspath('..'))


def setup(app):
app.add_stylesheet('https://cdnjs.cloudflare.com/ajax/libs/github-fork-ribbon-css/0.2.2/gh-fork-ribbon.min.css')
app.add_stylesheet(
'https://cdnjs.cloudflare.com/ajax/libs/github-fork-ribbon-css/0.2.2/gh-fork-ribbon.min.css'
)


# -- General configuration ------------------------------------------------

Expand Down Expand Up @@ -257,29 +262,31 @@ def setup(app):
# -- Options for LaTeX output ---------------------------------------------

latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',

# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',

# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',

# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}

# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pyhf.tex', u'pyhf Documentation',
u'Lukas Heinrich, Matthew Feickert', 'manual'),
(
master_doc,
'pyhf.tex',
u'pyhf Documentation',
u'Lukas Heinrich, Matthew Feickert',
'manual',
)
]

# The name of an image file (relative to this directory) to place at the top of
Expand Down Expand Up @@ -319,10 +326,7 @@ def setup(app):

# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pyhf', u'pyhf Documentation',
[author], 1)
]
man_pages = [(master_doc, 'pyhf', u'pyhf Documentation', [author], 1)]

# If true, show URL addresses after external links.
#
Expand All @@ -335,9 +339,15 @@ def setup(app):
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pyhf', u'pyhf Documentation',
author, 'pyhf', 'One line description of project.',
'Miscellaneous'),
(
master_doc,
'pyhf',
u'pyhf Documentation',
author,
'pyhf',
'One line description of project.',
'Miscellaneous',
)
]

# Documents to append as an appendix to all manuals.
Expand Down
4 changes: 4 additions & 0 deletions docs/development.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,7 @@ Developing
To develop, we suggest using `virtual environments <https://virtualenvwrapper.readthedocs.io/en/latest/>`__ together with ``pip`` or using `pipenv <https://pipenv.readthedocs.io/en/latest/>`__. To get all necessary packages for development::

pip install --ignore-installed -U -e .[complete]

Then setup the Git pre-commit hook for `Black <https://github.com/ambv/black>`__ by running::

pre-commit install
23 changes: 18 additions & 5 deletions pyhf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from . import tensor, optimize
from .version import __version__
from . import events

tensorlib = tensor.numpy_backend()
default_backend = tensorlib
optimizer = optimize.scipy_optimizer()
Expand Down Expand Up @@ -49,24 +50,36 @@ def set_backend(backend, custom_optimizer=None):
optimizer_changed = False

if backend.name == 'tensorflow':
new_optimizer = custom_optimizer if custom_optimizer else optimize.tflow_optimizer(backend)
new_optimizer = (
custom_optimizer if custom_optimizer else optimize.tflow_optimizer(backend)
)
if tensorlib.name == 'tensorflow':
tensorlib_changed |= bool(backend.session != tensorlib.session)
elif backend.name == 'pytorch':
new_optimizer = custom_optimizer if custom_optimizer else optimize.pytorch_optimizer(tensorlib=backend)
new_optimizer = (
custom_optimizer
if custom_optimizer
else optimize.pytorch_optimizer(tensorlib=backend)
)
# TODO: Add support for mxnet_optimizer()
# elif tensorlib.name == 'mxnet':
# new_optimizer = custom_optimizer if custom_optimizer else mxnet_optimizer()
else:
new_optimizer = custom_optimizer if custom_optimizer else optimize.scipy_optimizer()
new_optimizer = (
custom_optimizer if custom_optimizer else optimize.scipy_optimizer()
)

optimizer_changed = bool(optimizer != new_optimizer)
# set new backend
tensorlib = backend
optimizer = new_optimizer
# trigger events
if tensorlib_changed: events.trigger("tensorlib_changed")()
if optimizer_changed: events.trigger("optimizer_changed")()
if tensorlib_changed:
events.trigger("tensorlib_changed")()
if optimizer_changed:
events.trigger("optimizer_changed")()


from .pdf import Model

__all__ = ['Model', 'utils', 'modifiers', '__version__']
60 changes: 47 additions & 13 deletions pyhf/commandline.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import logging
logging.basicConfig()
log = logging.getLogger(__name__)

import click
import json
Expand All @@ -14,16 +12,29 @@
from .pdf import Model
from .version import __version__

logging.basicConfig()
log = logging.getLogger(__name__)


@click.group(context_settings=dict(help_option_names=['-h', '--help']))
@click.version_option(version=__version__)
def pyhf():
pass


@pyhf.command()
@click.argument('entrypoint-xml', type=click.Path(exists=True))
@click.option('--basedir', help='The base directory for the XML files to point relative to.', type=click.Path(exists=True), default=os.getcwd())
@click.option('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)
@click.option(
'--basedir',
help='The base directory for the XML files to point relative to.',
type=click.Path(exists=True),
default=os.getcwd(),
)
@click.option(
'--output-file',
help='The location of the output json file. If not specified, prints to screen.',
default=None,
)
@click.option('--track-progress/--hide-progress', default=True)
def xml2json(entrypoint_xml, basedir, output_file, track_progress):
""" Entrypoint XML: The top-level XML file for the PDF definition. """
Expand All @@ -36,6 +47,7 @@ def xml2json(entrypoint_xml, basedir, output_file, track_progress):
log.debug("Written to {0:s}".format(output_file))
sys.exit(0)


@pyhf.command()
@click.argument('workspace', default='-')
@click.argument('xmlfile', default='-')
Expand All @@ -45,12 +57,19 @@ def json2xml(workspace, xmlfile, specroot, dataroot):
with click.open_file(workspace, 'r') as specstream:
d = json.load(specstream)
with click.open_file(xmlfile, 'w') as outstream:
outstream.write(writexml.writexml(d, specroot, dataroot,'').decode('utf-8'))
outstream.write(
writexml.writexml(d, specroot, dataroot, '').decode('utf-8')
)
sys.exit(0)


@pyhf.command()
@click.argument('workspace', default='-')
@click.option('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)
@click.option(
'--output-file',
help='The location of the output json file. If not specified, prints to screen.',
default=None,
)
@click.option('--measurement', default=None)
@click.option('-p', '--patch', multiple=True)
@click.option('--qualify-names/--no-qualify-names', default=False)
Expand All @@ -60,10 +79,14 @@ def cls(workspace, output_file, measurement, qualify_names, patch):
measurements = d['toplvl']['measurements']
measurement_names = [m['name'] for m in measurements]
measurement_index = 0

log.debug('measurements defined:\n\t{0:s}'.format('\n\t'.join(measurement_names)))
if measurement and measurement not in measurement_names:
log.error('no measurement by name \'{0:s}\' exists, pick from one of the valid ones above'.format(measurement))
log.error(
'no measurement by name \'{0:s}\' exists, pick from one of the valid ones above'.format(
measurement
)
)
sys.exit(1)
else:
if not measurement and len(measurements) > 1:
Expand All @@ -72,16 +95,27 @@ def cls(workspace, output_file, measurement, qualify_names, patch):
elif measurement:
measurement_index = measurement_names.index(measurement)

log.debug('calculating CLs for measurement {0:s}'.format(measurements[measurement_index]['name']))
spec = {'channels':d['channels']}
log.debug(
'calculating CLs for measurement {0:s}'.format(
measurements[measurement_index]['name']
)
)
spec = {'channels': d['channels']}
for p in patch:
with click.open_file(p, 'r') as read_file:
p = jsonpatch.JsonPatch(json.loads(read_file.read()))
spec = p.apply(spec)
p = Model(spec, poiname=measurements[measurement_index]['config']['poi'], qualify_names=qualify_names)
observed = sum((d['data'][c] for c in p.config.channels),[]) + p.config.auxdata
p = Model(
spec,
poiname=measurements[measurement_index]['config']['poi'],
qualify_names=qualify_names,
)
observed = sum((d['data'][c] for c in p.config.channels), []) + p.config.auxdata
result = runOnePoint(1.0, observed, p)
result = {'CLs_obs': result[-2].tolist()[0], 'CLs_exp': result[-1].ravel().tolist()}
result = {
'CLs_obs': result[-2].tolist()[0],
'CLs_exp': result[-1].ravel().tolist(),
}
if output_file is None:
print(json.dumps(result, indent=4, sort_keys=True))
else:
Expand Down
Loading

0 comments on commit f7d27c7

Please sign in to comment.