Skip to content

Commit a799749

Browse files
author
Mahmood Ali
committed
Merge pull request #5653 from hashicorp/b-s3-virtualbucket-style
docs: update s3 urls to use virtual bucket style
1 parent b7eed99 commit a799749

File tree

10 files changed

+25
-25
lines changed

10 files changed

+25
-25
lines changed

e2e/metrics/input/helloworld.nomad

+2-2
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ job "hello" {
1919
}
2020

2121
artifact {
22-
source = "https://s3.amazonaws.com/nomad-community-demo/hellov1"
22+
source = "https://nomad-community-demo.s3.amazonaws.com/hellov1"
2323
destination = "local/hello"
2424
mode = "file"
2525
}
@@ -47,4 +47,4 @@ job "hello" {
4747
}
4848
}
4949
}
50-
}
50+
}

e2e/terraform/shared/scripts/setup.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ sudo apt-get install -y openjdk-8-jdk
128128
JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")
129129

130130
# Spark
131-
sudo wget -P /ops/examples/spark https://s3.amazonaws.com/nomad-spark/spark-2.2.0-bin-nomad-0.7.0.tgz
131+
sudo wget -P /ops/examples/spark https://nomad-spark.s3.amazonaws.com/spark-2.2.0-bin-nomad-0.7.0.tgz
132132
sudo tar -xvf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark
133133
sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark
134134
sudo chown -R root:root /usr/local/bin/spark

terraform/examples/spark/README.md

+9-9
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,8 @@ spark-submit \
9090
--conf spark.nomad.cluster.monitorUntil=complete \
9191
--conf spark.eventLog.enabled=true \
9292
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
93-
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
94-
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
93+
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
94+
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
9595
```
9696

9797
### Word count (Java)
@@ -105,8 +105,8 @@ spark-submit \
105105
--conf spark.nomad.cluster.monitorUntil=complete \
106106
--conf spark.eventLog.enabled=true \
107107
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
108-
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
109-
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
108+
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
109+
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
110110
hdfs://hdfs.service.consul/foo/history.log
111111
```
112112

@@ -121,8 +121,8 @@ spark-submit \
121121
--conf spark.nomad.cluster.monitorUntil=complete \
122122
--conf spark.eventLog.enabled=true \
123123
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
124-
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
125-
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
124+
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
125+
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
126126
/etc/sudoers hdfs://hdfs.service.consul/foo
127127
```
128128

@@ -134,7 +134,7 @@ Start the shell:
134134
spark-shell \
135135
--master nomad \
136136
--conf spark.executor.instances=4 \
137-
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz
137+
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz
138138
```
139139

140140
Run a few commands:
@@ -155,7 +155,7 @@ Start the shell:
155155
spark-sql \
156156
--master nomad \
157157
--conf spark.executor.instances=4 \
158-
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz jars/spark-sql_2.11-2.1.0-SNAPSHOT.jar
158+
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz jars/spark-sql_2.11-2.1.0-SNAPSHOT.jar
159159
```
160160
161161
Run a few commands:
@@ -178,7 +178,7 @@ Start the shell:
178178
pyspark \
179179
--master nomad \
180180
--conf spark.executor.instances=4 \
181-
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz
181+
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz
182182
```
183183
184184
Run a few commands:

terraform/shared/scripts/setup.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ sudo apt-get install -y openjdk-8-jdk
154154
JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")
155155

156156
# Spark
157-
sudo wget -P /ops/examples/spark https://s3.amazonaws.com/nomad-spark/spark-2.2.0-bin-nomad-0.7.0.tgz
157+
sudo wget -P /ops/examples/spark https://nomad-spark.s3.amazonaws.com/spark-2.2.0-bin-nomad-0.7.0.tgz
158158
sudo tar -xvf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark
159159
sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark
160160
sudo chown -R root:root /usr/local/bin/spark

website/source/api/json-jobs.html.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -883,7 +883,7 @@ Path based style:
883883
{
884884
"Artifacts": [
885885
{
886-
"GetterSource": "https://s3-us-west-2.amazonaws.com/my-bucket-example/my_app.tar.gz",
886+
"GetterSource": "https://my-bucket-example.s3-us-west-2.amazonaws.com/my_app.tar.gz",
887887
}
888888
]
889889
}
@@ -895,7 +895,7 @@ or to override automatic detection in the URL, use the S3-specific syntax
895895
{
896896
"Artifacts": [
897897
{
898-
"GetterSource": "s3::https://s3-eu-west-1.amazonaws.com/my-bucket-example/my_app.tar.gz",
898+
"GetterSource": "s3::https://my-bucket-example.s3-eu-west-1.amazonaws.com/my_app.tar.gz",
899899
}
900900
]
901901
}

website/source/docs/commands/job/dispatch.html.md.erb

+2-2
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,8 @@ passing in a configuration payload via stdin:
6666
```
6767
$ cat << EOF | nomad job dispatch video-encode -
6868
{
69-
"s3-input": "https://s3-us-west-1.amazonaws.com/video-bucket/cb31dabb1",
70-
"s3-output": "https://s3-us-west-1.amazonaws.com/video-bucket/a149adbe3",
69+
"s3-input": "https://video-bucket.s3-us-west-1.amazonaws.com/cb31dabb1",
70+
"s3-output": "https://video-bucket.s3-us-west-1.amazonaws.com/a149adbe3",
7171
"input-codec": "mp4",
7272
"output-codec": "webm",
7373
"quality": "1080p"

website/source/docs/job-specification/artifact.html.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ This example uses path-based notation on a publicly-accessible bucket:
155155

156156
```hcl
157157
artifact {
158-
source = "https://s3-us-west-2.amazonaws.com/my-bucket-example/my_app.tar.gz"
158+
source = "https://my-bucket-example.s3-us-west-2.amazonaws.com/my_app.tar.gz"
159159
}
160160
```
161161

@@ -176,7 +176,7 @@ To force the S3-specific syntax, use the `s3::` prefix:
176176

177177
```hcl
178178
artifact {
179-
source = "s3::https://s3-eu-west-1.amazonaws.com/my-bucket-example/my_app.tar.gz"
179+
source = "s3::https://my-bucket-example.s3-eu-west-1.amazonaws.com/my_app.tar.gz"
180180
}
181181
```
182182

website/source/guides/analytical-workloads/spark/monitoring.html.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -119,8 +119,8 @@ $ spark-submit \
119119
--conf spark.nomad.cluster.monitorUntil=complete \
120120
--conf spark.eventLog.enabled=true \
121121
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
122-
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
123-
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
122+
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
123+
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
124124
```
125125

126126
## Logs

website/source/guides/analytical-workloads/spark/pre.html.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ Install Spark:
5555

5656

5757
```shell
58-
$ wget -O - https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
58+
$ wget -O - https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
5959
| sudo tar xz -C /usr/local
6060
$ export PATH=$PATH:/usr/local/spark-2.1.0-bin-nomad/bin
6161
```
@@ -83,8 +83,8 @@ $ spark-submit \
8383
--master nomad \
8484
--deploy-mode cluster \
8585
--conf spark.executor.instances=4 \
86-
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
87-
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
86+
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
87+
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
8888
```
8989

9090
### Using a Docker Image

website/source/guides/analytical-workloads/spark/submit.html.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ application:
4141
```shell
4242
$ spark-submit --class org.apache.spark.examples.SparkPi \
4343
--master nomad \
44-
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
44+
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
4545
lib/spark-examples*.jar \
4646
10
4747
```

0 commit comments

Comments
 (0)