Skip to content

Commit

Permalink
add --assume-yes to launch command
Browse files Browse the repository at this point in the history
  • Loading branch information
nchammas committed Dec 14, 2015
1 parent a90edf9 commit b00fd12
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 9 deletions.
4 changes: 3 additions & 1 deletion CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@

## Unreleased

* Nothing yet.
### Added

* Added `--assume-yes` option to the `launch` command. Use `--assume-yes` to tell Flintrock to automatically destroy the cluster if there are problems during launch.


## 0.1.0 - 2015-12-11
Expand Down
19 changes: 12 additions & 7 deletions flintrock/flintrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,6 +467,7 @@ def cli(cli_context, config, provider):
help="Git repository to clone Spark from.",
default='https://github.com/apache/spark.git',
show_default=True)
@click.option('--assume-yes/--no-assume-yes', default=False)
@click.option('--ec2-key-name')
@click.option('--ec2-identity-file',
type=click.Path(exists=True, dir_okay=False),
Expand Down Expand Up @@ -495,6 +496,7 @@ def launch(
spark_version,
spark_git_commit,
spark_git_repository,
assume_yes,
ec2_key_name,
ec2_identity_file,
ec2_instance_type,
Expand Down Expand Up @@ -548,6 +550,7 @@ def launch(
if cli_context.obj['provider'] == 'ec2':
return launch_ec2(
cluster_name=cluster_name, num_slaves=num_slaves, modules=modules,
assume_yes=assume_yes,
key_name=ec2_key_name,
identity_file=ec2_identity_file,
instance_type=ec2_instance_type,
Expand Down Expand Up @@ -732,6 +735,7 @@ def get_ec2_block_device_map(
def launch_ec2(
*,
cluster_name, num_slaves, modules,
assume_yes,
key_name, identity_file,
instance_type,
region,
Expand Down Expand Up @@ -952,13 +956,14 @@ def launch_ec2(
instance_ids=instance_ids)

if cluster_instances:
yes = click.confirm(
text="Do you want to terminate the {c} instances created by this operation?"
.format(c=len(cluster_instances)),
err=True,
default=True)

if yes:
if not assume_yes:
yes = click.confirm(
text="Do you want to terminate the {c} instances created by this operation?"
.format(c=len(cluster_instances)),
err=True,
default=True)

if assume_yes or yes:
print("Terminating instances...", file=sys.stderr)
connection.terminate_instances(
instance_ids=[instance.id for instance in cluster_instances])
Expand Down
2 changes: 1 addition & 1 deletion tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,4 +53,4 @@ py.test tests/test_acceptance.py

Acceptance tests are the most valuable type of test for an orchestration tool like Flintrock, but they also **cost money** (less than $1 for the full test run) and take many minutes to run. Use them judiciously.

Note that in some cases **a failed test run may leave behind running clusters**. You'll need to delete these manually.
Note that, depending on your changes, **a failed test run may leave behind running clusters**. You'll need to delete these manually.
2 changes: 2 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def launch_cluster(cluster_name, instance_type):
'--install-spark',
'--spark-version', SPARK_VERSION,
'--spark-git-commit', '',
'--assume-yes',
'--ec2-instance-type', instance_type])
assert p.returncode == 0

Expand Down Expand Up @@ -82,6 +83,7 @@ def stopped_cluster(request):
'--num-slaves', '1',
'--no-install-hdfs',
'--no-install-spark',
'--assume-yes',
'--ec2-instance-type', 't2.small'])
assert p.returncode == 0

Expand Down

0 comments on commit b00fd12

Please sign in to comment.