Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

improved errors, logging and compilation #15

Merged
merged 2 commits into from
Jan 15, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 22 additions & 28 deletions kapitan/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@
import logging
import os
import sys
from functools import partial
import multiprocessing
import traceback
import yaml

from kapitan.utils import jsonnet_file, PrettyDumper, flatten_dict, searchvar
from kapitan.targets import compile_target_file
from kapitan.targets import compile_targets
from kapitan.resources import search_imports, resource_callbacks, inventory_reclass
from kapitan.version import PROJECT_NAME, DESCRIPTION, VERSION
from kapitan.secrets import secret_gpg_backend, secret_gpg_write, secret_gpg_reveal
from kapitan.errors import KapitanError

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -133,8 +133,7 @@ def main():
ext_vars=ext_vars)
if args.output == 'yaml':
json_obj = json.loads(json_output)
yaml_output = yaml.safe_dump(json_obj, default_flow_style=False)
print yaml_output
yaml.safe_dump(json_obj, sys.stdout, default_flow_style=False)
elif json_output:
print json_output
elif cmd == 'compile':
Expand All @@ -148,31 +147,26 @@ def main():
search_path = os.path.abspath(args.search_path)
gpg_obj = secret_gpg_backend()
if args.target_file:
pool = multiprocessing.Pool(args.parallelism)
worker = partial(compile_target_file,
search_path=search_path,
output_path=args.output_path,
prune=(not args.no_prune),
secrets_path=args.secrets_path,
secrets_reveal=args.reveal,
gpg_obj=gpg_obj)
try:
pool.map(worker, args.target_file)
except RuntimeError:
# if compile worker fails, terminate immediately
pool.terminate()
raise
compile_targets(args.target_file, search_path, args.output_path, args.parallelism,
prune=(not args.no_prune), secrets_path=args.secrets_path,
secrets_reveal=args.reveal, gpg_obj=gpg_obj)
else:
logger.error("Nothing to compile")
logger.error("Error: Nothing to compile")
elif cmd == 'inventory':
inv = inventory_reclass(args.inventory_path)
if args.target_name != '':
inv = inv['nodes'][args.target_name]
if args.flat:
inv = flatten_dict(inv)
print yaml.dump(inv, width=10000)
else:
print yaml.dump(inv, Dumper=PrettyDumper, default_flow_style=False)
try:
logging.basicConfig(level=logging.INFO, format="%(message)s")
inv = inventory_reclass(args.inventory_path)
if args.target_name != '':
inv = inv['nodes'][args.target_name]
if args.flat:
inv = flatten_dict(inv)
yaml.dump(inv, sys.stdout, width=10000)
else:
yaml.dump(inv, sys.stdout, Dumper=PrettyDumper, default_flow_style=False)
except Exception as e:
if not isinstance(e, KapitanError):
logger.error("\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
traceback.print_exc()
elif cmd == 'searchvar':
searchvar(args.searchvar, args.inventory_path)
elif cmd == 'secrets':
Expand Down
27 changes: 27 additions & 0 deletions kapitan/errors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Copyright 2017 The Kapitan Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"kapitan error classes"

class KapitanError(Exception):
"generic kapitan error"
pass

class CompileError(KapitanError):
"compile error"
pass

class InventoryError(KapitanError):
"inventory error"
pass
39 changes: 24 additions & 15 deletions kapitan/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,12 @@
import os
import reclass
import reclass.core
from reclass.errors import ReclassException, NotFoundError
import yaml

from kapitan.utils import render_jinja2_file, memoize
from kapitan import __file__ as kapitan_install_path
from kapitan.errors import CompileError, InventoryError

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -62,12 +64,15 @@ def jinja2_render_file(search_path, name, ctx):
ctx = json.loads(ctx)
_full_path = os.path.join(search_path, name)
logger.debug("jinja2_render_file trying file %s", _full_path)
if os.path.exists(_full_path):
logger.debug("jinja2_render_file found file at %s", _full_path)
return render_jinja2_file(_full_path, ctx)
# default IOError if we reach here
raise IOError("Could not find file %s" % name)

try:
if os.path.exists(_full_path):
logger.debug("jinja2_render_file found file at %s", _full_path)
return render_jinja2_file(_full_path, ctx)
else:
raise IOError("Could not find file %s" % name)
except Exception as e:
logger.error("Jsonnet jinja2 failed to render %s: %s", _full_path, str(e))
raise CompileError(e)

def read_file(search_path, name):
"return content of file in name"
Expand Down Expand Up @@ -166,12 +171,16 @@ def inventory_reclass(inventory_path):
if ex.errno == errno.ENOENT:
logger.debug("Using reclass inventory config defaults")

storage = reclass.get_storage(reclass_config['storage_type'], reclass_config['nodes_uri'],
reclass_config['classes_uri'], default_environment='base')
class_mappings = reclass_config.get('class_mappings') # this defaults to None (disabled)
_reclass = reclass.core.Core(storage, class_mappings)

inv = _reclass.inventory()

logger.debug("reclass inventory: %s", inv)
return inv
try:
storage = reclass.get_storage(reclass_config['storage_type'], reclass_config['nodes_uri'],
reclass_config['classes_uri'], default_environment='base')
class_mappings = reclass_config.get('class_mappings') # this defaults to None (disabled)
_reclass = reclass.core.Core(storage, class_mappings)

return _reclass.inventory()
except ReclassException as e:
if isinstance(e, NotFoundError):
logger.error("Inventory reclass error: inventory not found")
else:
logger.error("Inventory reclass error: %s", e.message)
raise InventoryError(e.message)
43 changes: 39 additions & 4 deletions kapitan/targets.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,50 @@
import json
import re
import shutil
from functools import partial
import multiprocessing
import traceback
import tempfile
import jsonschema
import yaml

from kapitan.resources import search_imports, resource_callbacks, inventory
from kapitan.utils import jsonnet_file, jsonnet_prune, render_jinja2_dir, PrettyDumper
from kapitan.secrets import secret_gpg_raw_read, secret_token_from_tag, secret_token_attributes
from kapitan.secrets import SECRET_TOKEN_TAG_PATTERN, secret_gpg_read
from kapitan.errors import KapitanError

logger = logging.getLogger(__name__)

def compile_targets(target_files, search_path, output_path, parallel, **kwargs):
"""
Loads files in target_files and runs compile_target_file() on a
multiprocessing pool with parallel number of processes.
kwargs are passed to compile_target_file()
"""
# temp_path will hold compiled items
temp_path = tempfile.mkdtemp(suffix='.kapitan')
pool = multiprocessing.Pool(parallel)
worker = partial(compile_target_file, search_path=search_path, output_path=temp_path, **kwargs)
try:
pool.map(worker, target_files)
if os.path.exists(output_path):
shutil.rmtree(output_path)
# on success, copy temp_path into output_path
shutil.copytree(temp_path, output_path)
logger.debug("Copied %s into %s", temp_path, output_path)
except Exception as e:
# if compile worker fails, terminate immediately
pool.terminate()
pool.join()
logger.debug("Compile pool terminated")
# only print traceback for errors we don't know about
if not isinstance(e, KapitanError):
logger.error("\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
traceback.print_exc()
finally:
shutil.rmtree(temp_path)
logger.debug("Removed %s", temp_path)

def compile_target_file(target_file, search_path, output_path, **kwargs):
"""
Expand Down Expand Up @@ -68,6 +102,7 @@ def compile_target_file(target_file, search_path, output_path, **kwargs):
compile_jinja2(compile_path_sp, ctx, _output_path, **kwargs)
else:
raise IOError("Path not found in search_path: %s" % obj["path"])
logger.info("Compiled %s", target_file)


def compile_jinja2(path, context, output_path, **kwargs):
Expand Down Expand Up @@ -95,7 +130,7 @@ def compile_jinja2(path, context, output_path, **kwargs):
fp.write(item_value["content"])
mode = item_value["mode"]
os.chmod(full_item_path, mode)
logger.info("Wrote %s with mode %.4o", full_item_path, mode)
logger.debug("Wrote %s with mode %.4o", full_item_path, mode)


def compile_jsonnet(file_path, output_path, search_path, ext_vars, **kwargs):
Expand Down Expand Up @@ -123,21 +158,21 @@ def compile_jsonnet(file_path, output_path, search_path, ext_vars, **kwargs):

if prune:
json_output = jsonnet_prune(json_output)
logger.debug("Pruned output")
logger.debug("Pruned output for: %s", file_path)
for item_key, item_value in json.loads(json_output).iteritems():
# write each item to disk
if output == 'json':
file_path = os.path.join(output_path, '%s.%s' % (item_key, output))
with CompiledFile(file_path, mode="w", secrets_path=secrets_path,
secrets_reveal=secrets_reveal, gpg_obj=gpg_obj) as fp:
json.dump(item_value, fp, indent=4, sort_keys=True)
logger.info("Wrote %s", file_path)
logger.debug("Wrote %s", file_path)
elif output == 'yaml':
file_path = os.path.join(output_path, '%s.%s' % (item_key, "yml"))
with CompiledFile(file_path, mode="w", secrets_path=secrets_path,
secrets_reveal=secrets_reveal, gpg_obj=gpg_obj) as fp:
yaml.dump(item_value, stream=fp, Dumper=PrettyDumper, default_flow_style=False)
logger.info("Wrote %s", file_path)
logger.debug("Wrote %s", file_path)
else:
raise ValueError('output is neither "json" or "yaml"')

Expand Down
28 changes: 20 additions & 8 deletions kapitan/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,13 @@
import logging
import os
import stat
import collections
import jinja2
import _jsonnet as jsonnet
import collections
import yaml

from kapitan.errors import CompileError


logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -64,18 +66,25 @@ def render_jinja2_dir(path, context):
Returns a dict where the is key is the filename (with subpath)
and value is a dict with content and mode
Empty paths will not be rendered
Ignores hidden files (.filename)
"""
rendered = {}
for root, _, files in os.walk(path):
for f in files:
if f.startswith('.'):
logger.debug('render_jinja2_dir: ignoring file %s', f)
continue
render_path = os.path.join(root, f)
logger.debug("render_jinja2_dir rendering %s with context %s",
render_path, context)
logger.debug("render_jinja2_dir rendering %s", render_path)
# get subpath and filename, strip any leading/trailing /
name = render_path[len(os.path.commonprefix([root, path])):].strip('/')
rendered[name] = {"content": render_jinja2_file(render_path, context),
"mode": file_mode(render_path)
}
try:
rendered[name] = {"content": render_jinja2_file(render_path, context),
"mode": file_mode(render_path)
}
except Exception as e:
logger.error("Jinja2 error: failed to render %s: %s", render_path, str(e))
raise CompileError(e)
return rendered


Expand All @@ -90,8 +99,11 @@ def jsonnet_file(file_path, **kwargs):
Evaluate file_path jsonnet file.
kwargs are documented in http://jsonnet.org/implementation/bindings.html
"""
return jsonnet.evaluate_file(file_path, **kwargs)

try:
return jsonnet.evaluate_file(file_path, **kwargs)
except Exception as e:
logger.error("Jsonnet error: failed to compile %s:\n %s", file_path, str(e))
raise CompileError(e)

def jsonnet_prune(jsonnet_str):
"Returns a pruned jsonnet_str"
Expand Down