Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support for additional cache paths. Made cache an optional flag. Cleanup #138

Merged
merged 4 commits into from
Aug 8, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion kapitan/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,10 @@ def main():
help='ignore the version from .kapitan',
action='store_true',
default=from_dot_kapitan('compile', 'ignore-version-check', False))
compile_parser.add_argument('--cache-paths', '-c', type=str, nargs='+',
default=from_dot_kapitan('compile', 'cache-paths', []),
metavar='PATH',
help='cache additional paths to .kapitan_cache, default is []')
compile_parser.add_argument('--force-recompile', '-f',
help='force recompilation of all targets, ignores .kapitan_cache',
action='store_true',
Expand Down Expand Up @@ -227,7 +231,7 @@ def main():
args.parallelism, args.targets,
prune=(args.prune), secrets_path=args.secrets_path,
secrets_reveal=args.reveal, indent=args.indent,
force_recompile=args.force_recompile)
cache_paths=args.cache_paths, force_recompile=args.force_recompile)

elif cmd == 'inventory':
if args.verbose:
Expand Down
31 changes: 19 additions & 12 deletions kapitan/targets.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,17 +55,18 @@ def compile_targets(inventory_path, search_paths, output_path, parallel, targets
# temp_path will hold compiled items
temp_path = tempfile.mkdtemp(suffix='.kapitan')

generate_inv_cache_hashes(inventory_path, targets)
additional_cache_paths = kwargs.get('cache_paths')
generate_inv_cache_hashes(inventory_path, targets, additional_cache_paths)

changed_targets = targets
updated_targets = targets
if not kwargs.get('force_recompile') and not targets:
changed_targets = get_changed_targets(inventory_path, output_path)
logger.debug("Changed targets since last compilation: %s", changed_targets)
if len(changed_targets) == 0:
updated_targets = changed_targets(inventory_path, output_path)
logger.debug("Changed targets since last compilation: %s", updated_targets)
if len(updated_targets) == 0:
logger.info("No changes since last compilation.")
return

target_objs = load_target_inventory(inventory_path, changed_targets)
target_objs = load_target_inventory(inventory_path, updated_targets)

pool = multiprocessing.Pool(parallel)
# append "compiled" to output_path so we can safely overwrite it
Expand All @@ -84,8 +85,8 @@ def compile_targets(inventory_path, search_paths, output_path, parallel, targets
os.makedirs(compile_path)

# if '-t' is set on compile or only a few changed, only override selected targets
if changed_targets:
for target in changed_targets:
if updated_targets:
for target in updated_targets:
compile_path_target = os.path.join(compile_path, target)
temp_path_target = os.path.join(temp_path, target)

Expand Down Expand Up @@ -119,7 +120,7 @@ def compile_targets(inventory_path, search_paths, output_path, parallel, targets
logger.debug("Removed %s", temp_path)


def generate_inv_cache_hashes(inventory_path, targets):
def generate_inv_cache_hashes(inventory_path, targets, cache_paths):
"""
generates the hashes for the inventory per target and jsonnet/jinja2 folders for caching purposes
struct: {
Expand Down Expand Up @@ -162,14 +163,20 @@ def generate_inv_cache_hashes(inventory_path, targets):
if os.path.exists(base_folder) and os.path.isdir(base_folder):
cached.inv_cache['folder'][base_folder] = directory_hash(base_folder)

# Cache additional folders set by --cache-paths
for path in cache_paths:
if path not in cached.inv_cache['folder'].keys():
if os.path.exists(path) and os.path.isdir(path):
cached.inv_cache['folder'][path] = directory_hash(path)

# Most commonly changed but not referenced in input_paths
for common in ('lib', 'vendor', 'secrets'):
if common not in cached.inv_cache['folder'].keys():
if os.path.exists(common) and os.path.isdir(common):
cached.inv_cache['folder'][common] = directory_hash(common)


def get_changed_targets(inventory_path, output_path):
def changed_targets(inventory_path, output_path):
"""returns a list of targets that have changed since last compilation"""
targets = []
inv = inventory_reclass(inventory_path)
Expand All @@ -194,7 +201,7 @@ def get_changed_targets(inventory_path, output_path):
if hash != saved_inv_cache['folder'][key]:
logger.debug("%s folder hash changed, recompiling all targets", key)
return targets_list
except Exception as e:
except KeyError as e:
# Errors usually occur when saved_inv_cache doesn't contain a new folder
# Recompile anyway to be safe
return targets_list
Expand All @@ -207,7 +214,7 @@ def get_changed_targets(inventory_path, output_path):
elif cached.inv_cache['inventory'][target]['parameters'] != saved_inv_cache['inventory'][target]['parameters']:
logger.debug("parameters hash changed in %s, recompiling", target)
targets.append(target)
except Exception as e:
except KeyError as e:
# Errors usually occur when saved_inv_cache doesn't contain a new target
# Recompile anyway to be safe
targets.append(target)
Expand Down
6 changes: 4 additions & 2 deletions kapitan/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,11 +309,13 @@ def directory_hash(directory):
file_path = os.path.join(root, names)
try:
with open(file_path, 'r') as f:
hash.update(sha256(f.read().encode("UTF-8")).hexdigest().encode("UTF-8"))
file_hash = sha256(f.read().encode("UTF-8"))
hash.update(file_hash.hexdigest().encode("UTF-8"))
except Exception as e:
if isinstance(e, UnicodeDecodeError):
with open(file_path, 'rb') as f:
hash.update(sha256(f.read()).hexdigest().encode("UTF-8"))
binary_file_hash = sha256(f.read())
hash.update(binary_file_hash.hexdigest().encode("UTF-8"))
else:
logger.error("utils.directory_hash failed to open %s: %s", file_path, str(e))
raise
Expand Down