Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support for additional cache paths. Made cache an optional flag. Cleanup #138

Merged
merged 4 commits into from
Aug 8, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 9 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -286,8 +286,9 @@ usage: kapitan compile [-h] [--search-paths JPATH [JPATH ...]] [--verbose]
[--prune] [--quiet] [--output-path PATH]
[--targets TARGET [TARGET ...]] [--parallelism INT]
[--indent INT] [--secrets-path SECRETS_PATH] [--reveal]
[--inventory-path INVENTORY_PATH]
[--ignore-version-check] [--force-recompile]
[--inventory-path INVENTORY_PATH] [--cache]
[--cache-paths PATH [PATH ...]]
[--ignore-version-check]

optional arguments:
-h, --help show this help message and exit
Expand All @@ -297,7 +298,7 @@ optional arguments:
--prune prune jsonnet output
--quiet set quiet mode, only critical output
--output-path PATH set output path, default is "."
--targets TARGETS [TARGETS ...], -t TARGETS [TARGETS ...]
--targets TARGET [TARGET ...], -t TARGET [TARGET ...]
targets to compile, default is all
--parallelism INT, -p INT
Number of concurrent compile processes, default is 4
Expand All @@ -308,10 +309,13 @@ optional arguments:
data)
--inventory-path INVENTORY_PATH
set inventory path, default is "./inventory"
--cache, -c enable compilation caching to .kapitan_cache, default
is False
--cache-paths PATH [PATH ...]
cache additional paths to .kapitan_cache, default is
[]
--ignore-version-check
ignore the version from .kapitan
--force-recompile, -f
force recompilation of all targets, ignores .kapitan_cache
```

These parameters can also be defined in a local `.kapitan` file, for example:
Expand Down
13 changes: 0 additions & 13 deletions examples/kubernetes/compiled/.kapitan_cache

This file was deleted.

17 changes: 10 additions & 7 deletions kapitan/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,15 +80,10 @@ def main():
default=from_dot_kapitan('compile', 'output-path', '.'),
metavar='PATH',
help='set output path, default is "."')
compile_parser_subgroup = compile_parser.add_mutually_exclusive_group()
compile_parser_subgroup.add_argument('--targets', '-t', help='targets to compile, default is all',
compile_parser.add_argument('--targets', '-t', help='targets to compile, default is all',
type=str, nargs='+',
default=from_dot_kapitan('compile', 'targets', []),
metavar='TARGET')
compile_parser_subgroup.add_argument('--force-recompile', '-f',
help='force recompilation of all targets, ignores .kapitan_cache',
action='store_true',
default=from_dot_kapitan('compile', 'force-recompile', False))
compile_parser.add_argument('--parallelism', '-p', type=int,
default=from_dot_kapitan('compile', 'parallelism', 4),
metavar='INT',
Expand All @@ -106,6 +101,14 @@ def main():
compile_parser.add_argument('--inventory-path',
default=from_dot_kapitan('compile', 'inventory-path', './inventory'),
help='set inventory path, default is "./inventory"')
compile_parser.add_argument('--cache', '-c',
help='enable compilation caching to .kapitan_cache, default is False',
action='store_true',
default=from_dot_kapitan('compile', 'cache', False))
compile_parser.add_argument('--cache-paths', type=str, nargs='+',
default=from_dot_kapitan('compile', 'cache-paths', []),
metavar='PATH',
help='cache additional paths to .kapitan_cache, default is []')
compile_parser.add_argument('--ignore-version-check',
help='ignore the version from .kapitan',
action='store_true',
Expand Down Expand Up @@ -228,7 +231,7 @@ def main():
args.parallelism, args.targets,
prune=(args.prune), secrets_path=args.secrets_path,
secrets_reveal=args.reveal, indent=args.indent,
force_recompile=args.force_recompile)
cache=args.cache, cache_paths=args.cache_paths)

elif cmd == 'inventory':
if args.verbose:
Expand Down
49 changes: 31 additions & 18 deletions kapitan/targets.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,17 +55,20 @@ def compile_targets(inventory_path, search_paths, output_path, parallel, targets
# temp_path will hold compiled items
temp_path = tempfile.mkdtemp(suffix='.kapitan')

generate_inv_cache_hashes(inventory_path, targets)
updated_targets = targets
# If --cache is set
if kwargs.get('cache'):
additional_cache_paths = kwargs.get('cache_paths')
generate_inv_cache_hashes(inventory_path, targets, additional_cache_paths)

changed_targets = targets
if not kwargs.get('force_recompile') and not targets:
changed_targets = get_changed_targets(inventory_path, output_path)
logger.debug("Changed targets since last compilation: %s", changed_targets)
if len(changed_targets) == 0:
logger.info("No changes since last compilation.")
return
if not targets:
updated_targets = changed_targets(inventory_path, output_path)
logger.debug("Changed targets since last compilation: %s", updated_targets)
if len(updated_targets) == 0:
logger.info("No changes since last compilation.")
return

target_objs = load_target_inventory(inventory_path, changed_targets)
target_objs = load_target_inventory(inventory_path, updated_targets)

pool = multiprocessing.Pool(parallel)
# append "compiled" to output_path so we can safely overwrite it
Expand All @@ -84,8 +87,8 @@ def compile_targets(inventory_path, search_paths, output_path, parallel, targets
os.makedirs(compile_path)

# if '-t' is set on compile or only a few changed, only override selected targets
if changed_targets:
for target in changed_targets:
if updated_targets:
for target in updated_targets:
compile_path_target = os.path.join(compile_path, target)
temp_path_target = os.path.join(temp_path, target)

Expand Down Expand Up @@ -119,7 +122,7 @@ def compile_targets(inventory_path, search_paths, output_path, parallel, targets
logger.debug("Removed %s", temp_path)


def generate_inv_cache_hashes(inventory_path, targets):
def generate_inv_cache_hashes(inventory_path, targets, cache_paths):
"""
generates the hashes for the inventory per target and jsonnet/jinja2 folders for caching purposes
struct: {
Expand All @@ -142,9 +145,13 @@ def generate_inv_cache_hashes(inventory_path, targets):

if targets:
for target in targets:
cached.inv_cache['inventory'][target] = {}
cached.inv_cache['inventory'][target]['classes'] = dictionary_hash(inv['nodes'][target]['classes'])
cached.inv_cache['inventory'][target]['parameters'] = dictionary_hash(inv['nodes'][target]['parameters'])
try:
cached.inv_cache['inventory'][target] = {}
cached.inv_cache['inventory'][target]['classes'] = dictionary_hash(inv['nodes'][target]['classes'])
cached.inv_cache['inventory'][target]['parameters'] = dictionary_hash(inv['nodes'][target]['parameters'])
except KeyError as e:
logger.error("'%s' target not found", target)
raise
else:
for target in inv['nodes']:
cached.inv_cache['inventory'][target] = {}
Expand All @@ -162,14 +169,20 @@ def generate_inv_cache_hashes(inventory_path, targets):
if os.path.exists(base_folder) and os.path.isdir(base_folder):
cached.inv_cache['folder'][base_folder] = directory_hash(base_folder)

# Cache additional folders set by --cache-paths
for path in cache_paths:
if path not in cached.inv_cache['folder'].keys():
if os.path.exists(path) and os.path.isdir(path):
cached.inv_cache['folder'][path] = directory_hash(path)

# Most commonly changed but not referenced in input_paths
for common in ('lib', 'vendor', 'secrets'):
if common not in cached.inv_cache['folder'].keys():
if os.path.exists(common) and os.path.isdir(common):
cached.inv_cache['folder'][common] = directory_hash(common)


def get_changed_targets(inventory_path, output_path):
def changed_targets(inventory_path, output_path):
"""returns a list of targets that have changed since last compilation"""
targets = []
inv = inventory_reclass(inventory_path)
Expand All @@ -194,7 +207,7 @@ def get_changed_targets(inventory_path, output_path):
if hash != saved_inv_cache['folder'][key]:
logger.debug("%s folder hash changed, recompiling all targets", key)
return targets_list
except Exception as e:
except KeyError as e:
# Errors usually occur when saved_inv_cache doesn't contain a new folder
# Recompile anyway to be safe
return targets_list
Expand All @@ -207,7 +220,7 @@ def get_changed_targets(inventory_path, output_path):
elif cached.inv_cache['inventory'][target]['parameters'] != saved_inv_cache['inventory'][target]['parameters']:
logger.debug("parameters hash changed in %s, recompiling", target)
targets.append(target)
except Exception as e:
except KeyError as e:
# Errors usually occur when saved_inv_cache doesn't contain a new target
# Recompile anyway to be safe
targets.append(target)
Expand Down
6 changes: 4 additions & 2 deletions kapitan/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,11 +309,13 @@ def directory_hash(directory):
file_path = os.path.join(root, names)
try:
with open(file_path, 'r') as f:
hash.update(sha256(f.read().encode("UTF-8")).hexdigest().encode("UTF-8"))
file_hash = sha256(f.read().encode("UTF-8"))
hash.update(file_hash.hexdigest().encode("UTF-8"))
except Exception as e:
if isinstance(e, UnicodeDecodeError):
with open(file_path, 'rb') as f:
hash.update(sha256(f.read()).hexdigest().encode("UTF-8"))
binary_file_hash = sha256(f.read())
hash.update(binary_file_hash.hexdigest().encode("UTF-8"))
else:
logger.error("utils.directory_hash failed to open %s: %s", file_path, str(e))
raise
Expand Down
2 changes: 1 addition & 1 deletion tests/test_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def setUp(self):
os.chdir(os.getcwd() + '/examples/kubernetes/')

def test_compile(self):
sys.argv = ["kapitan", "compile", "-f"]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should have a test for the cache functionality as well.

Maybe 2 separate tests

  • kapitan compile
  • kapitan compile --cache

Copy link
Member Author

@adrianchifor adrianchifor Aug 7, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Will write a test in another PR

sys.argv = ["kapitan", "compile"]
main()
compiled_dir_hash = directory_hash(os.getcwd() + '/compiled')
test_compiled_dir_hash = directory_hash(os.getcwd() + '/../../tests/test_kubernetes_compiled')
Expand Down
13 changes: 0 additions & 13 deletions tests/test_kubernetes_compiled/.kapitan_cache

This file was deleted.