Skip to content

Commit

Permalink
Merge pull request #54 from CALeDNA/develop
Browse files Browse the repository at this point in the history
reduce aws egress
  • Loading branch information
hector-baez authored Feb 18, 2024
2 parents 01904d3 + cdab718 commit ba608d7
Show file tree
Hide file tree
Showing 2 changed files with 134 additions and 46 deletions.
131 changes: 95 additions & 36 deletions tronko/assign/assign.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ PAIRED=""
UNPAIRED_F=""
UNPAIRED_R=""
LCA=5
VARS="/vars/crux_vars.sh"
while getopts "i:p:r:123" opt; do
case $opt in
i) PROJECTID="$OPTARG"
Expand All @@ -24,7 +25,7 @@ while getopts "i:p:r:123" opt; do
esac
done

source /vars/crux_vars.sh # gets $RUNID and $IPADDRESS
source $VARS # gets $RUNID and $IPADDRESS

# source /vars/crux_vars.sh # get tronko db $RUNID
mkdir $PROJECTID-$PRIMER $PROJECTID-$PRIMER-rc
Expand Down Expand Up @@ -174,30 +175,53 @@ removeProcessedFiles() {
fi
}

switchAWSCreds() {
export AWS_ACCESS_KEY_ID=$1
export AWS_SECRET_ACCESS_KEY=$2
export AWS_DEFAULT_REGION=$3
}

# Set creds for aws s3
switchAWSCreds $S3_ACCESS_KEY_ID $S3_SECRET_ACCESS_KEY $S3_DEFAULT_REGION

# get js2 credentials
if [[ $BRANCH == "master" ]]; then
secret_id="prod/trex/qcassign"
else
secret_id="staging/trex/qcassign"
fi
secret_json=$(aws secretsmanager get-secret-value --secret-id $secret_id --query SecretString --output text)
echo $secret_json | jq -r 'to_entries|map("export \(.key)=\(.value|tostring)")|.[]' > export_vars.sh
source export_vars.sh && rm export_vars.sh


if [ "${PAIRED}" = "TRUE" ]
then
# Set creds to js2
switchAWSCreds $JS2_ACCESS_KEY_ID $JS2_SECRET_ACCESS_KEY $JS2_DEFAULT_REGION

# download tronko database
aws s3 sync s3://$AWS_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/ $PROJECTID-$PRIMER/tronkodb/ --exclude "*" --include "$PRIMER*" --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 cp s3://$AWS_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/reference_tree.txt.gz $PROJECTID-$PRIMER/tronkodb/reference_tree.txt.gz --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/ $PROJECTID-$PRIMER/tronkodb/ --exclude "*" --include "$PRIMER*" --no-progress --endpoint-url $JS2_ENDPOINT
aws s3 cp s3://$JS2_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/reference_tree.txt.gz $PROJECTID-$PRIMER/tronkodb/reference_tree.txt.gz --no-progress --endpoint-url $JS2_ENDPOINT

# download old assign files
aws s3 sync s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired $PROJECTID-$PRIMER/old --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired $PROJECTID-$PRIMER/old --no-progress --endpoint-url $JS2_ENDPOINT
# copy to rc
cp -r "$PROJECTID-$PRIMER/old" "$PROJECTID-$PRIMER-rc/old"


# download QC sample paired files
aws s3 sync s3://$AWS_BUCKET/projects/$PROJECTID/QC/$PRIMER/paired/ $PROJECTID-$PRIMER/paired/ --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/projects/$PROJECTID/QC/$PRIMER/paired/ $PROJECTID-$PRIMER/paired/ --no-progress --endpoint-url $JS2_ENDPOINT

removeProcessedFiles "$PROJECTID" "$PRIMER" "paired_F" "F" "paired"

# upload new checksum_F
aws s3 cp $PROJECTID-$PRIMER/old/checksums_F.txt s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/checksums_F.txt --endpoint-url $AWS_ENDPOINT
aws s3 cp $PROJECTID-$PRIMER/old/checksums_F.txt s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/checksums_F.txt --endpoint-url $JS2_ENDPOINT

removeProcessedFiles "$PROJECTID" "$PRIMER" "paired_R" "R" "paired"

# upload new checksum_R
aws s3 cp $PROJECTID-$PRIMER/old/checksums_R.txt s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/checksums_R.txt --endpoint-url $AWS_ENDPOINT
aws s3 cp $PROJECTID-$PRIMER/old/checksums_R.txt s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/checksums_R.txt --endpoint-url $JS2_ENDPOINT

# create ASV files
python3 /mnt/asv.py --dir $PROJECTID-$PRIMER/paired --out $PROJECTID-$PRIMER/$PROJECTID-$PRIMER-paired_F.asv --primer $PRIMER --paired
Expand Down Expand Up @@ -276,7 +300,10 @@ then
fi

# upload output
aws s3 sync $PROJECTID-$PRIMER/ s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/ --exclude "*" --include "$PROJECTID-$PRIMER-paired*" --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync $PROJECTID-$PRIMER/ s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/ --exclude "*" --include "$PROJECTID-$PRIMER-paired*" --no-progress --endpoint-url $JS2_ENDPOINT
# upload to aws
switchAWSCreds $S3_ACCESS_KEY_ID $S3_SECRET_ACCESS_KEY $S3_DEFAULT_REGION
aws s3 sync $PROJECTID-$PRIMER/ s3://$S3_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/ --exclude "*" --include "$PROJECTID-$PRIMER-paired*" --no-progress --endpoint-url $S3_ENDPOINT
else
echo "v2 (rc) has the highest count: $count_2"
# rename filtered files
Expand Down Expand Up @@ -311,7 +338,10 @@ then
fi

# upload output
aws s3 sync $PROJECTID-$PRIMER-rc/ s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/ --exclude "*" --include "$PROJECTID-$PRIMER-paired*" --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync $PROJECTID-$PRIMER-rc/ s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/ --exclude "*" --include "$PROJECTID-$PRIMER-paired*" --no-progress --endpoint-url $JS2_ENDPOINT
# upload to aws
switchAWSCreds $S3_ACCESS_KEY_ID $S3_SECRET_ACCESS_KEY $S3_DEFAULT_REGION
aws s3 sync $PROJECTID-$PRIMER-rc/ s3://$S3_BUCKET/projects/$PROJECTID/assign/$PRIMER/paired/ --exclude "*" --include "$PROJECTID-$PRIMER-paired*" --no-progress --endpoint-url $S3_ENDPOINT
fi

# cleanup
Expand All @@ -320,20 +350,23 @@ fi

if [ "${UNPAIRED_F}" = "TRUE" ]
then
# Set creds to js2
switchAWSCreds $JS2_ACCESS_KEY_ID $JS2_SECRET_ACCESS_KEY $JS2_DEFAULT_REGION

# download tronko database
aws s3 sync s3://$AWS_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/ $PROJECTID-$PRIMER/tronkodb/ --exclude "*" --include "$PRIMER*" --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 cp s3://$AWS_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/reference_tree.txt.gz $PROJECTID-$PRIMER/tronkodb/reference_tree.txt.gz --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/ $PROJECTID-$PRIMER/tronkodb/ --exclude "*" --include "$PRIMER*" --no-progress --endpoint-url $JS2_ENDPOINT
aws s3 cp s3://$JS2_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/reference_tree.txt.gz $PROJECTID-$PRIMER/tronkodb/reference_tree.txt.gz --no-progress --endpoint-url $JS2_ENDPOINT

# download old assign files
aws s3 sync s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F $PROJECTID-$PRIMER/old --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F $PROJECTID-$PRIMER/old --no-progress --endpoint-url $JS2_ENDPOINT

# download QC sample unpaired_F files
aws s3 sync s3://$AWS_BUCKET/projects/$PROJECTID/QC/$PRIMER/unpaired_F/ $PROJECTID-$PRIMER/unpaired_F/ --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/projects/$PROJECTID/QC/$PRIMER/unpaired_F/ $PROJECTID-$PRIMER/unpaired_F/ --no-progress --endpoint-url $JS2_ENDPOINT

removeProcessedFiles "$PROJECTID" "$PRIMER" "unpaired_F" "F" "unpaired_F"

# upload new checksum_F
aws s3 cp $PROJECTID-$PRIMER/old/checksums.txt s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F/checksums.txt --endpoint-url $AWS_ENDPOINT
aws s3 cp $PROJECTID-$PRIMER/old/checksums.txt s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F/checksums.txt --endpoint-url $JS2_ENDPOINT

# create ASV files
python3 /mnt/asv.py --dir $PROJECTID-$PRIMER/unpaired_F/ --out $PROJECTID-$PRIMER/$PROJECTID-$PRIMER-unpaired_F.asv --primer $PRIMER --unpairedf
Expand Down Expand Up @@ -382,7 +415,10 @@ then
fi

# upload output
aws s3 sync $PROJECTID-$PRIMER/ s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_F*" --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync $PROJECTID-$PRIMER/ s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_F*" --no-progress --endpoint-url $JS2_ENDPOINT
# upload to aws
switchAWSCreds $S3_ACCESS_KEY_ID $S3_SECRET_ACCESS_KEY $S3_DEFAULT_REGION
aws s3 sync $PROJECTID-$PRIMER/ s3://$S3_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_F*" --no-progress --endpoint-url $S3_ENDPOINT
else
echo "v2 (rc) has the highest count: $count_2"

Expand All @@ -405,9 +441,13 @@ then
fi

# upload output
aws s3 sync $PROJECTID-$PRIMER/ s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_F*" --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync $PROJECTID-$PRIMER/ s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_F*" --no-progress --endpoint-url $JS2_ENDPOINT
aws s3 cp $PROJECTID-$PRIMER-rc/$PROJECTID-$PRIMER-unpaired_F.txt s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F/$PROJECTID-$PRIMER-unpaired_F.txt --no-progress --endpoint-url $JS2_ENDPOINT

aws s3 cp $PROJECTID-$PRIMER-rc/$PROJECTID-$PRIMER-unpaired_F.txt s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F/$PROJECTID-$PRIMER-unpaired_F.txt --no-progress --endpoint-url $AWS_ENDPOINT
# upload to aws
switchAWSCreds $S3_ACCESS_KEY_ID $S3_SECRET_ACCESS_KEY $S3_DEFAULT_REGION
aws s3 sync $PROJECTID-$PRIMER/ s3://$S3_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_F*" --no-progress --endpoint-url $S3_ENDPOINT
aws s3 cp $PROJECTID-$PRIMER-rc/$PROJECTID-$PRIMER-unpaired_F.txt s3://$S3_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_F/$PROJECTID-$PRIMER-unpaired_F.txt --no-progress --endpoint-url $S3_ENDPOINT
fi

# cleanup
Expand All @@ -416,20 +456,23 @@ fi

if [ "${UNPAIRED_R}" = "TRUE" ]
then
# Set creds to js2
switchAWSCreds $JS2_ACCESS_KEY_ID $JS2_SECRET_ACCESS_KEY $JS2_DEFAULT_REGION

# download tronko database
aws s3 sync s3://$AWS_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/ $PROJECTID-$PRIMER/tronkodb/ --exclude "*" --include "$PRIMER*" --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 cp s3://$AWS_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/reference_tree.txt.gz $PROJECTID-$PRIMER/tronkodb/reference_tree.txt.gz --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/ $PROJECTID-$PRIMER/tronkodb/ --exclude "*" --include "$PRIMER*" --no-progress --endpoint-url $JS2_ENDPOINT
aws s3 cp s3://$JS2_BUCKET/CruxV2/$RUNID/$PRIMER/tronko/reference_tree.txt.gz $PROJECTID-$PRIMER/tronkodb/reference_tree.txt.gz --no-progress --endpoint-url $JS2_ENDPOINT

# download old assign files
aws s3 sync s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R $PROJECTID-$PRIMER/old --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R $PROJECTID-$PRIMER/old --no-progress --endpoint-url $JS2_ENDPOINT

# download QC sample unpaired_R
aws s3 sync s3://$AWS_BUCKET/projects/$PROJECTID/QC/$PRIMER/unpaired_R/ $PROJECTID-$PRIMER/unpaired_R/ --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/projects/$PROJECTID/QC/$PRIMER/unpaired_R/ $PROJECTID-$PRIMER/unpaired_R/ --no-progress --endpoint-url $JS2_ENDPOINT

removeProcessedFiles "$PROJECTID" "$PRIMER" "unpaired_R" "R" "unpaired_R"

# upload new checksum_R
aws s3 cp $PROJECTID-$PRIMER/old/checksums.txt s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R/checksums.txt --endpoint-url $AWS_ENDPOINT
aws s3 cp $PROJECTID-$PRIMER/old/checksums.txt s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R/checksums.txt --endpoint-url $JS2_ENDPOINT

# create ASV files
python3 /mnt/asv.py --dir $PROJECTID-$PRIMER/unpaired_R --out $PROJECTID-$PRIMER/$PROJECTID-$PRIMER-unpaired_R.asv --primer $PRIMER --unpairedr
Expand Down Expand Up @@ -478,7 +521,11 @@ then
fi

# upload assign output
aws s3 sync $PROJECTID-$PRIMER/ s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_R*" --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync $PROJECTID-$PRIMER/ s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_R*" --no-progress --endpoint-url $JS2_ENDPOINT

# upload to aws
switchAWSCreds $S3_ACCESS_KEY_ID $S3_SECRET_ACCESS_KEY $S3_DEFAULT_REGION
aws s3 sync $PROJECTID-$PRIMER/ s3://$S3_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_R*" --no-progress --endpoint-url $S3_ENDPOINT
else
echo "v2 (rc) has the highest count: $count_2"

Expand All @@ -501,18 +548,26 @@ then
fi

# upload assign output
aws s3 sync $PROJECTID-$PRIMER/ s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_R*" --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 cp $PROJECTID-$PRIMER-rc/$PROJECTID-$PRIMER-unpaired_R.txt s3://$AWS_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R/$PROJECTID-$PRIMER-unpaired_R.txt --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync $PROJECTID-$PRIMER/ s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_R*" --no-progress --endpoint-url $JS2_ENDPOINT
aws s3 cp $PROJECTID-$PRIMER-rc/$PROJECTID-$PRIMER-unpaired_R.txt s3://$JS2_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R/$PROJECTID-$PRIMER-unpaired_R.txt --no-progress --endpoint-url $JS2_ENDPOINT

# upload to aws
switchAWSCreds $S3_ACCESS_KEY_ID $S3_SECRET_ACCESS_KEY $S3_DEFAULT_REGION
aws s3 sync $PROJECTID-$PRIMER/ s3://$S3_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R --exclude "*" --include "$PROJECTID-$PRIMER-unpaired_R*" --no-progress --endpoint-url $S3_ENDPOINT
aws s3 cp $PROJECTID-$PRIMER-rc/$PROJECTID-$PRIMER-unpaired_R.txt s3://$S3_BUCKET/projects/$PROJECTID/assign/$PRIMER/unpaired_R/$PROJECTID-$PRIMER-unpaired_R.txt --no-progress --endpoint-url $S3_ENDPOINT
fi

# cleanup
rm -r $PROJECTID-$PRIMER/* $PROJECTID-$PRIMER-rc/*
# fi
fi

# Set creds to js2
switchAWSCreds $JS2_ACCESS_KEY_ID $JS2_SECRET_ACCESS_KEY $JS2_DEFAULT_REGION

mkdir -p ednaexplorer-project-$PROJECTID/{tronko,terradactyl}
# dl all assign folders for $PROJECTID
aws s3 sync s3://$AWS_BUCKET/projects/$PROJECTID/assign ./$PROJECTID --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 sync s3://$JS2_BUCKET/projects/$PROJECTID/assign ./$PROJECTID --no-progress --endpoint-url $JS2_ENDPOINT
# run process_tronko.py for each primer with 1, 5, 10, 30, 50, and 100 mismatches
mismatches=(1 5 10 25 50 100)
for dir in "$PROJECTID"/*; do
Expand All @@ -525,22 +580,23 @@ for dir in "$PROJECTID"/*; do
fi
done

# Set creds to aws
switchAWSCreds $S3_ACCESS_KEY_ID $S3_SECRET_ACCESS_KEY $S3_DEFAULT_REGION

# download terradactyl files
aws s3 cp s3://$AWS_BUCKET/projects/$PROJECTID/METABARCODING.csv ednaexplorer-project-$PROJECTID/terradactyl/metabarcoding_metadata_original.csv --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 cp s3://$AWS_BUCKET/projects/$PROJECTID/MetadataOutput_Metabarcoding.csv ednaexplorer-project-$PROJECTID/terradactyl/metabarcoding_metadata_terradactyl.csv --no-progress --endpoint-url $AWS_ENDPOINT
aws s3 cp s3://$S3_BUCKET/projects/$PROJECTID/METABARCODING.csv ednaexplorer-project-$PROJECTID/terradactyl/metabarcoding_metadata_original.csv --no-progress --endpoint-url $S3_ENDPOINT
aws s3 cp s3://$S3_BUCKET/projects/$PROJECTID/MetadataOutput_Metabarcoding.csv ednaexplorer-project-$PROJECTID/terradactyl/metabarcoding_metadata_terradactyl.csv --no-progress --endpoint-url $S3_ENDPOINT
# copy README
# cp /mnt/README.md ednaexplorer-project-$PROJECTID/
# zip
tar -czvf ednaexplorer-project-$PROJECTID.tar.gz ednaexplorer-project-$PROJECTID
# upload
aws s3 cp ednaexplorer-project-$PROJECTID.tar.gz s3://$AWS_BUCKET/projects/$PROJECTID/ednaexplorer-project-$PROJECTID.tar.gz --no-progress --endpoint-url $AWS_ENDPOINT

# upload
aws s3 cp ednaexplorer-project-$PROJECTID.tar.gz s3://$S3_BUCKET/projects/$PROJECTID/ednaexplorer-project-$PROJECTID.tar.gz --no-progress --endpoint-url $S3_ENDPOINT

# call processing_notif.sh
cd /mnt/jwt
# download primer list for jwt step.
aws s3 cp s3://$AWS_BUCKET/projects/$PROJECTID/QC/metabarcode_loci_min_merge_length.txt . --no-progress --endpoint-url $AWS_ENDPOINT
./processing_notif.sh -i $PROJECTID
# upload to js2
switchAWSCreds $JS2_ACCESS_KEY_ID $JS2_SECRET_ACCESS_KEY $JS2_DEFAULT_REGION
aws s3 cp ednaexplorer-project-$PROJECTID.tar.gz s3://$JS2_BUCKET/projects/$PROJECTID/ednaexplorer-project-$PROJECTID.tar.gz --no-progress --endpoint-url $JS2_ENDPOINT

# cleanup
rm -r ${PROJECTID}*
Expand All @@ -550,4 +606,7 @@ curl -X POST http://$IPADDRESS:8004/initializer \
-H "Content-Type: application/json" \
-d "{
\"ProjectID\": \"$PROJECTID\"
}"
}"

# add notif job
ben add -s /tmp/ben-notif -c "/home/ubuntu/crux/tronko/assign/jwt/processing_notif.sh -i $PROJECTID" $PROJECTID-$PRIMER -o /etc/ben/output
Loading

0 comments on commit ba608d7

Please sign in to comment.