Skip to content

Commit

Permalink
Apply Black to pyhf/
Browse files Browse the repository at this point in the history
  • Loading branch information
matthewfeickert committed Oct 21, 2018
1 parent ed40fb7 commit 067a303
Show file tree
Hide file tree
Showing 11 changed files with 621 additions and 331 deletions.
23 changes: 18 additions & 5 deletions pyhf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from . import tensor, optimize
from .version import __version__
from . import events

tensorlib = tensor.numpy_backend()
default_backend = tensorlib
optimizer = optimize.scipy_optimizer()
Expand Down Expand Up @@ -49,24 +50,36 @@ def set_backend(backend, custom_optimizer=None):
optimizer_changed = False

if backend.name == 'tensorflow':
new_optimizer = custom_optimizer if custom_optimizer else optimize.tflow_optimizer(backend)
new_optimizer = (
custom_optimizer if custom_optimizer else optimize.tflow_optimizer(backend)
)
if tensorlib.name == 'tensorflow':
tensorlib_changed |= bool(backend.session != tensorlib.session)
elif backend.name == 'pytorch':
new_optimizer = custom_optimizer if custom_optimizer else optimize.pytorch_optimizer(tensorlib=backend)
new_optimizer = (
custom_optimizer
if custom_optimizer
else optimize.pytorch_optimizer(tensorlib=backend)
)
# TODO: Add support for mxnet_optimizer()
# elif tensorlib.name == 'mxnet':
# new_optimizer = custom_optimizer if custom_optimizer else mxnet_optimizer()
else:
new_optimizer = custom_optimizer if custom_optimizer else optimize.scipy_optimizer()
new_optimizer = (
custom_optimizer if custom_optimizer else optimize.scipy_optimizer()
)

optimizer_changed = bool(optimizer != new_optimizer)
# set new backend
tensorlib = backend
optimizer = new_optimizer
# trigger events
if tensorlib_changed: events.trigger("tensorlib_changed")()
if optimizer_changed: events.trigger("optimizer_changed")()
if tensorlib_changed:
events.trigger("tensorlib_changed")()
if optimizer_changed:
events.trigger("optimizer_changed")()


from .pdf import Model

__all__ = ['Model', 'utils', 'modifiers', '__version__']
60 changes: 47 additions & 13 deletions pyhf/commandline.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import logging
logging.basicConfig()
log = logging.getLogger(__name__)

import click
import json
Expand All @@ -14,16 +12,29 @@
from .pdf import Model
from .version import __version__

logging.basicConfig()
log = logging.getLogger(__name__)


@click.group(context_settings=dict(help_option_names=['-h', '--help']))
@click.version_option(version=__version__)
def pyhf():
pass


@pyhf.command()
@click.argument('entrypoint-xml', type=click.Path(exists=True))
@click.option('--basedir', help='The base directory for the XML files to point relative to.', type=click.Path(exists=True), default=os.getcwd())
@click.option('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)
@click.option(
'--basedir',
help='The base directory for the XML files to point relative to.',
type=click.Path(exists=True),
default=os.getcwd(),
)
@click.option(
'--output-file',
help='The location of the output json file. If not specified, prints to screen.',
default=None,
)
@click.option('--track-progress/--hide-progress', default=True)
def xml2json(entrypoint_xml, basedir, output_file, track_progress):
""" Entrypoint XML: The top-level XML file for the PDF definition. """
Expand All @@ -36,6 +47,7 @@ def xml2json(entrypoint_xml, basedir, output_file, track_progress):
log.debug("Written to {0:s}".format(output_file))
sys.exit(0)


@pyhf.command()
@click.argument('workspace', default='-')
@click.argument('xmlfile', default='-')
Expand All @@ -45,12 +57,19 @@ def json2xml(workspace, xmlfile, specroot, dataroot):
with click.open_file(workspace, 'r') as specstream:
d = json.load(specstream)
with click.open_file(xmlfile, 'w') as outstream:
outstream.write(writexml.writexml(d, specroot, dataroot,'').decode('utf-8'))
outstream.write(
writexml.writexml(d, specroot, dataroot, '').decode('utf-8')
)
sys.exit(0)


@pyhf.command()
@click.argument('workspace', default='-')
@click.option('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)
@click.option(
'--output-file',
help='The location of the output json file. If not specified, prints to screen.',
default=None,
)
@click.option('--measurement', default=None)
@click.option('-p', '--patch', multiple=True)
@click.option('--qualify-names/--no-qualify-names', default=False)
Expand All @@ -60,10 +79,14 @@ def cls(workspace, output_file, measurement, qualify_names, patch):
measurements = d['toplvl']['measurements']
measurement_names = [m['name'] for m in measurements]
measurement_index = 0

log.debug('measurements defined:\n\t{0:s}'.format('\n\t'.join(measurement_names)))
if measurement and measurement not in measurement_names:
log.error('no measurement by name \'{0:s}\' exists, pick from one of the valid ones above'.format(measurement))
log.error(
'no measurement by name \'{0:s}\' exists, pick from one of the valid ones above'.format(
measurement
)
)
sys.exit(1)
else:
if not measurement and len(measurements) > 1:
Expand All @@ -72,16 +95,27 @@ def cls(workspace, output_file, measurement, qualify_names, patch):
elif measurement:
measurement_index = measurement_names.index(measurement)

log.debug('calculating CLs for measurement {0:s}'.format(measurements[measurement_index]['name']))
spec = {'channels':d['channels']}
log.debug(
'calculating CLs for measurement {0:s}'.format(
measurements[measurement_index]['name']
)
)
spec = {'channels': d['channels']}
for p in patch:
with click.open_file(p, 'r') as read_file:
p = jsonpatch.JsonPatch(json.loads(read_file.read()))
spec = p.apply(spec)
p = Model(spec, poiname=measurements[measurement_index]['config']['poi'], qualify_names=qualify_names)
observed = sum((d['data'][c] for c in p.config.channels),[]) + p.config.auxdata
p = Model(
spec,
poiname=measurements[measurement_index]['config']['poi'],
qualify_names=qualify_names,
)
observed = sum((d['data'][c] for c in p.config.channels), []) + p.config.auxdata
result = runOnePoint(1.0, observed, p)
result = {'CLs_obs': result[-2].tolist()[0], 'CLs_exp': result[-1].ravel().tolist()}
result = {
'CLs_obs': result[-2].tolist()[0],
'CLs_exp': result[-1].ravel().tolist(),
}
if output_file is None:
print(json.dumps(result, indent=4, sort_keys=True))
else:
Expand Down
137 changes: 100 additions & 37 deletions pyhf/constraints.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
from . import get_backend, default_backend
from . import events


class gaussian_constraint_combined(object):
def __init__(self,pdfconfig):
def __init__(self, pdfconfig):
# iterate over all constraints order doesn't matter....

self.par_indices = list(range(len(pdfconfig.suggested_init())))
self.data_indices = list(range(len(pdfconfig.auxdata)))
self.parset_and_slice = [
(pdfconfig.param_set(cname),pdfconfig.par_slice(cname))
(pdfconfig.param_set(cname), pdfconfig.par_slice(cname))
for cname in pdfconfig.auxdata_order
]
self._precompute()
Expand All @@ -20,11 +21,12 @@ def _precompute(self):
normal_constraint_data = []
normal_constraint_mean_indices = []
normal_constraint_sigmas = []
for parset,parslice in self.parset_and_slice:
for parset, parslice in self.parset_and_slice:
end_index = start_index + parset.n_parameters
thisauxdata = self.data_indices[start_index:end_index]
start_index = end_index
if not parset.pdf_type == 'normal': continue
if not parset.pdf_type == 'normal':
continue

# many constraints are defined on a unit gaussian
# but we reserved the possibility that a paramset
Expand All @@ -34,39 +36,66 @@ def _precompute(self):
try:
normal_constraint_sigmas.append(parset.sigmas)
except AttributeError:
normal_constraint_sigmas.append([1.]*len(thisauxdata))
normal_constraint_sigmas.append([1.0] * len(thisauxdata))

normal_constraint_data.append(thisauxdata)
normal_constraint_mean_indices.append(self.par_indices[parslice])

if normal_constraint_mean_indices:
normal_mean_idc = default_backend.concatenate(list(map(lambda x: default_backend.astensor(x,dtype = 'int'),normal_constraint_mean_indices)))
normal_sigmas = default_backend.concatenate(list(map(default_backend.astensor,normal_constraint_sigmas)))
normal_data = default_backend.concatenate(list(map(lambda x: default_backend.astensor(x,dtype = 'int'),normal_constraint_data)))

self.normal_data = tensorlib.astensor(default_backend.tolist(normal_data),dtype = 'int')
self.normal_sigmas = tensorlib.astensor(default_backend.tolist(normal_sigmas))
self.normal_mean_idc = tensorlib.astensor(default_backend.tolist(normal_mean_idc),dtype = 'int')
normal_mean_idc = default_backend.concatenate(
list(
map(
lambda x: default_backend.astensor(x, dtype='int'),
normal_constraint_mean_indices,
)
)
)
normal_sigmas = default_backend.concatenate(
list(map(default_backend.astensor, normal_constraint_sigmas))
)
normal_data = default_backend.concatenate(
list(
map(
lambda x: default_backend.astensor(x, dtype='int'),
normal_constraint_data,
)
)
)

self.normal_data = tensorlib.astensor(
default_backend.tolist(normal_data), dtype='int'
)
self.normal_sigmas = tensorlib.astensor(
default_backend.tolist(normal_sigmas)
)
self.normal_mean_idc = tensorlib.astensor(
default_backend.tolist(normal_mean_idc), dtype='int'
)
else:
self.normal_data, self.normal_sigmas, self.normal_mean_idc = None, None, None
self.normal_data, self.normal_sigmas, self.normal_mean_idc = (
None,
None,
None,
)

def logpdf(self,auxdata,pars):
def logpdf(self, auxdata, pars):
if self.normal_data is None:
return 0
tensorlib, _ = get_backend()
normal_data = tensorlib.gather(auxdata,self.normal_data)
normal_means = tensorlib.gather(pars,self.normal_mean_idc)
normal = tensorlib.normal_logpdf(normal_data,normal_means,self.normal_sigmas)
normal_data = tensorlib.gather(auxdata, self.normal_data)
normal_means = tensorlib.gather(pars, self.normal_mean_idc)
normal = tensorlib.normal_logpdf(normal_data, normal_means, self.normal_sigmas)
return tensorlib.sum(normal)


class poisson_constraint_combined(object):
def __init__(self,pdfconfig):
def __init__(self, pdfconfig):
# iterate over all constraints order doesn't matter....

self.par_indices = list(range(len(pdfconfig.suggested_init())))
self.data_indices = list(range(len(pdfconfig.auxdata)))
self.mod_and_slice = [
(pdfconfig.param_set(cname),pdfconfig.par_slice(cname))
(pdfconfig.param_set(cname), pdfconfig.par_slice(cname))
for cname in pdfconfig.auxdata_order
]
self._precompute()
Expand All @@ -79,11 +108,12 @@ def _precompute(self):
poisson_constraint_data = []
poisson_constraint_rate_indices = []
poisson_constraint_rate_factors = []
for parset,parslice in self.mod_and_slice:
for parset, parslice in self.mod_and_slice:
end_index = start_index + parset.n_parameters
thisauxdata = self.data_indices[start_index:end_index]
start_index = end_index
if not parset.pdf_type == 'poisson': continue
if not parset.pdf_type == 'poisson':
continue

poisson_constraint_data.append(thisauxdata)
poisson_constraint_rate_indices.append(self.par_indices[parslice])
Expand All @@ -95,29 +125,62 @@ def _precompute(self):
try:
poisson_constraint_rate_factors.append(parset.factors)
except AttributeError:
poisson_constraint_rate_factors.append(default_backend.shape(self.par_indices[parslice]))

poisson_constraint_rate_factors.append(
default_backend.shape(self.par_indices[parslice])
)

if poisson_constraint_rate_indices:
poisson_rate_idc = default_backend.concatenate(list(map(lambda x: default_backend.astensor(x,dtype = 'int'), poisson_constraint_rate_indices)))
poisson_rate_fac = default_backend.concatenate(list(map(lambda x: default_backend.astensor(x,dtype = 'float'), poisson_constraint_rate_factors)))
poisson_data = default_backend.concatenate(list(map(lambda x: default_backend.astensor(x,dtype = 'int'), poisson_constraint_data)))

self.poisson_data = tensorlib.astensor(default_backend.tolist(poisson_data),dtype = 'int')
self.poisson_rate_idc = tensorlib.astensor(default_backend.tolist(poisson_rate_idc),dtype = 'int')
self.poisson_rate_fac = tensorlib.astensor(default_backend.tolist(poisson_rate_fac),dtype = 'float')
poisson_rate_idc = default_backend.concatenate(
list(
map(
lambda x: default_backend.astensor(x, dtype='int'),
poisson_constraint_rate_indices,
)
)
)
poisson_rate_fac = default_backend.concatenate(
list(
map(
lambda x: default_backend.astensor(x, dtype='float'),
poisson_constraint_rate_factors,
)
)
)
poisson_data = default_backend.concatenate(
list(
map(
lambda x: default_backend.astensor(x, dtype='int'),
poisson_constraint_data,
)
)
)

self.poisson_data = tensorlib.astensor(
default_backend.tolist(poisson_data), dtype='int'
)
self.poisson_rate_idc = tensorlib.astensor(
default_backend.tolist(poisson_rate_idc), dtype='int'
)
self.poisson_rate_fac = tensorlib.astensor(
default_backend.tolist(poisson_rate_fac), dtype='float'
)
else:
self.poisson_rate_idc, self.poisson_data, self.poisson_rate_fac = None, None, None
self.poisson_rate_idc, self.poisson_data, self.poisson_rate_fac = (
None,
None,
None,
)

def logpdf(self,auxdata,pars):
def logpdf(self, auxdata, pars):
if self.poisson_data is None:
return 0
tensorlib, _ = get_backend()
poisson_data = tensorlib.gather(auxdata,self.poisson_data)
poisson_rate_base = tensorlib.gather(pars,self.poisson_rate_idc)
poisson_factors = self.poisson_rate_fac
poisson_data = tensorlib.gather(auxdata, self.poisson_data)
poisson_rate_base = tensorlib.gather(pars, self.poisson_rate_idc)
poisson_factors = self.poisson_rate_fac

poisson_rate = tensorlib.product(
tensorlib.stack([poisson_rate_base, poisson_factors]), axis=0)
poisson = tensorlib.poisson_logpdf(poisson_data,poisson_rate)
tensorlib.stack([poisson_rate_base, poisson_factors]), axis=0
)
poisson = tensorlib.poisson_logpdf(poisson_data, poisson_rate)
return tensorlib.sum(poisson)
Loading

0 comments on commit 067a303

Please sign in to comment.