-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.sh
110 lines (98 loc) · 2.54 KB
/
main.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
#!/bin/bash
# shellcheck shell=bash
function dump {
echo "Beginning pg_dump"
if [ ! -z "$DEBUG" ]
then
pg_dump -h "$HOST" -U "$USER" -v -f "$OUTPUTDIR"/"$1".sql
else
pg_dump -h "$HOST" -U "$USER" -f "$OUTPUTDIR"/"$1".sql
fi
if [ ! -z "$AZCOPY" ]
then
azupload
fi
if [ ! -z "$S3COPY" ]
then
s3upload
fi
}
function dumpall {
echo "Beginning pg_dumpall"
# Redirecting STDERR to STDOUT because pg_dump outputs detailed object comments, start/stop times to the dump file, and progress messages to STDERR
# The same applies to pg_dump in function dump
if [ ! -z "$DEBUG" ]
then
pg_dumpall -h "$HOST" -U "$USER" -v -c -f "$OUTPUTDIR"/"$1".sql
else
pg_dumpall -h "$HOST" -U "$USER" -v -c -f "$OUTPUTDIR"/"$1".sql
fi
if [ ! -z "$AZCOPY" ]
then
azupload
fi
if [ ! -z "$S3COPY" ]
then
s3upload
fi
}
function azupload {
echo "Uploading to Azure enabled. Starting blob upload"
az storage blob upload -c "$AZ_CONTAINER_NAME" --account-name "$AZ_ACCOUNT_NAME" -s "$OUTPUTDIR"/"$date".sql --auth-mode key
if [ $? -eq 0 ]
then
echo "Upload completed"
else
echo "Unable to upload to Azure, please check the az command output above for more details"
fi
}
function s3upload {
if bucket_exists "$S3_BUCKET_NAME"
then
aws s3 cp "$OUTPUTDIR"/"$date".sql s3://"$S3_BUCKET_NAME"
else
aws s3 mb s3://"$S3_BUCKET_NAME"
aws s3 cp "$OUTPUTDIR"/"$date".sql s3://"$S3_BUCKET_NAME"
fi
if [ $? -eq 0 ]
then
echo "Upload to S3 completed"
else
echo "Unable to upload to S3, please check the aws command output above for more details"
fi
}
function bucket_exists {
bucketname=$1
if ! aws s3api head-bucket --bucket "$bucketname" >/dev/null 2>&1; then
return 0
else
return 1
fi
}
function outputfolder_readable {
if [ ! -r "$OUTPUTDIR" ]
then
echo "$OUTPUTDIR is not readable";
# Sending an exit code != 0
exit 2;
fi
}
set -e
echo "Script starting @ $(date)"
outputfolder_readable
if [ ! -z "$PASSWORD" ] && [ ! -z "$DATABASE" ]
then
echo "$HOST:$PORT:$DATABASE:$USER:$PASSWORD" > /.pgpass
chmod 600 /.pgpass
export PGPASSFILE=/.pgpass
fi
date="$(date +"%d%m%Y")"
if [ -z "$FULLDUMP" ]
then
dump "$date"
else
dumpall "$date"
fi
echo "Removing older backup files"
find "$OUTPUTDIR"/* -mtime +14 -exec rm -rf {} \;
echo "Script ended @ $(date)"