s3-backup.sh
· 10 KiB · Bash
Неформатований
#!/bin/bash
#####################################################################
# #
# Stupidly simple backup script for own projects #
# #
# Author: Anthony Axenov (Антон Аксенов) #
# Version: 1.2 #
# License: WTFPLv2 More info (RU): https://axenov.dev/?p=1272 #
# #
#####################################################################
# use remote storages ===============================================
USE_SSH=1
USE_S3=1
# database credentials ==============================================
DBUSER=
DBPASS=
DBNAME=
DBCHARSET="utf8"
# dates for file structure ==========================================
TODAY_DIR="$(date +%Y.%m.%d)"
TODAY_FILE="$(date +%H.%M)"
# local storage =====================================================
LOCAL_BAK_DIR="/backup"
LOCAL_BAK_PATH="$LOCAL_BAK_DIR/$TODAY_DIR"
# database backup file
LOCAL_SQL_FILE="$TODAY_FILE-db.sql.gz"
LOCAL_SQL_PATH="$LOCAL_BAK_PATH/$LOCAL_SQL_FILE"
# project path and backup file
LOCAL_SRC_DIR="/var/www/html"
LOCAL_SRC_FILE="$TODAY_FILE-src.tar.gz"
LOCAL_SRC_PATH="$LOCAL_BAK_PATH/$LOCAL_SRC_FILE"
# log file
LOG_FILE="$TODAY_FILE.log"
LOG_PATH="$LOCAL_BAK_PATH/$LOG_FILE"
# remote storages ===================================================
SSH_HOST="user@example.com"
SSH_BAK_DIR="/backup"
SSH_BAK_PATH="$SSH_BAK_DIR/$TODAY_DIR"
SSH_SQL_FILE="$SSH_BAK_PATH/$LOCAL_SQL_FILE"
SSH_SRC_FILE="$SSH_BAK_PATH/$LOCAL_SRC_FILE"
SSH_LOG_FILE="$SSH_BAK_PATH/$LOG_FILE"
S3_BUCKET="s3://my.bucket"
S3_DIR="$S3_BUCKET/$TODAY_DIR"
S3_SQL_FILE="$S3_DIR/$LOCAL_SQL_FILE"
S3_SRC_FILE="$S3_DIR/$LOCAL_SRC_FILE"
S3_LOG_FILE="$S3_DIR/$LOG_FILE"
# autoremove ========================================================
# time to live on different storages
TTL_LOCAL=3
TTL_SSH=7
TTL_S3=60
# autoremove flags
CLEAR_SSH=1
CLEAR_S3=1
# notifications =====================================================
USE_NTFY=1
NTFY_TITLE="Backup script"
NTFY_CHANNEL=
#====================================================================
#
# Functions used for the whole backup flow
#
#====================================================================
# prints arguments to stdout and into log file
log() {
echo -e "[$(date +%H:%M:%S)] $*" | tee -a "$LOG_PATH"
}
# sends notification with information
ntfy_info() {
[ $USE_NTFY == 1 ] && ntfy send \
--title "$NTFY_TITLE" \
--message "$1" \
--priority 1 \
"$NTFY_CHANNEL"
}
# sends notification with warning
ntfy_warn() {
[ $USE_NTFY == 1 ] && ntfy send \
--title "$NTFY_TITLE" \
--tags "warning" \
--message "$1" \
--priority 5 \
"$NTFY_CHANNEL"
}
# prints initialized parameters
show_params() {
log "Initialized parameters:"
log "├ [ Remotes ]"
log "│\t├ USE_SSH = $USE_SSH"
[ $USE_SSH == 1 ] && log "│\t├ SSH_HOST = $SSH_HOST"
log "│\t├ USE_S3 = $USE_S3"
[ $USE_S3 == 1 ] && log "│\t├ S3_BUCKET = $S3_BUCKET"
log "├ [ Database ]"
log "│\t├ DBUSER = $DBUSER"
log "│\t├ DBNAME = $DBNAME"
log "│\t├ DBCHARSET = $DBCHARSET"
log "│\t├ LOCAL_SQL_PATH = $LOCAL_SQL_PATH"
[ $USE_SSH == 1 ] && log "│\t├ SSH_SQL_FILE = $SSH_SQL_FILE"
[ $USE_S3 == 1 ] && log "│\t├ S3_SQL_FILE = $S3_SQL_FILE"
log "├ [ Sources ]"
log "│\t├ LOCAL_SRC_DIR = $LOCAL_SRC_DIR"
log "│\t├ LOCAL_SRC_PATH = $LOCAL_SRC_PATH"
[ $USE_SSH == 1 ] && log "│\t├ SSH_SRC_FILE = $SSH_SRC_FILE"
[ $USE_S3 == 1 ] && log "│\t├ S3_SRC_FILE = $S3_SRC_FILE"
log "├ [ Log ]"
log "│\t├ LOG_PATH = $LOG_PATH"
[ $USE_SSH == 1 ] && log "│\t├ SSH_LOG_FILE = $SSH_LOG_FILE"
[ $USE_S3 == 1 ] && log "│\t├ S3_LOG_FILE = $S3_LOG_FILE"
log "├ [ Autoclear ]"
log "│\t├ TTL_LOCAL = $TTL_LOCAL"
[ $USE_SSH == 1 ] && {
log "│\t├ CLEAR_SSH = $CLEAR_SSH"
log "│\t├ TTL_SSH = $TTL_SSH"
}
[ $USE_S3 == 1 ] && {
log "│\t├ CLEAR_S3 = $CLEAR_S3"
log "│\t├ TTL_S3 = $TTL_S3"
}
log "└ [ ntfy ]"
log "\t├ USE_NTFY = $USE_NTFY"
[ $USE_NTFY == 1 ] && log "\t├ NTFY_TITLE = $NTFY_TITLE"
[ $USE_NTFY == 1 ] && log "\t└ NTFY_CHANNEL = $NTFY_CHANNEL"
}
# initializes directories for backup
init_dirs() {
if [ ! -d "$LOCAL_BAK_PATH" ]; then
mkdir -p $LOCAL_BAK_PATH
fi
[ $USE_SSH == 1 ] && ssh $SSH_HOST "mkdir -p $SSH_BAK_PATH"
}
# clears old local backups
clear_local_backups() {
log "\tLocal:"
log $(find "$LOCAL_BAK_DIR" -type d -mtime +"$TTL_LOCAL" | sort)
find "$LOCAL_BAK_DIR" -type d -mtime +"$TTL_LOCAL" | xargs rm -rf
}
# clears old backups on remote ssh storage
clear_ssh_backups() {
if [ $USE_SSH == 1 ] && [ $CLEAR_SSH == 1 ]; then
log "\tSSH:"
log $(ssh "$SSH_HOST" "find $SSH_BAK_DIR -type d -mtime +$TTL_SSH" | sort)
ssh "$SSH_HOST" "find $SSH_BAK_DIR -type d -mtime +$TTL_SSH | xargs rm -rf"
else
log "\tSSH: disabled (\$USE_SSH, \$CLEAR_SSH)"
fi
}
# clears backups on remote s3 storage
clear_s3_backups() {
# https://gist.github.com/JProffitt71/9044744?permalink_comment_id=3539681#gistcomment-3539681
if [ $USE_S3 == 1 ] && [ $CLEAR_S3 == 1 ]; then
log "\tS3:"
OLDER_THAN=$(date -d "$TTL_S3 days ago" "+%s")
s3cmd ls -r $S3_DIR | while read -r line; do
FILETIME=$(echo "$line" | awk {'print $1" "$2'})
FILETIME=$(date -d "$FILETIME" "+%s")
if [[ $FILETIME -le $OLDER_THAN ]]; then
FILEPATH=$(echo "$line" | awk {'print $4'})
if [ $FILEPATH != "" ]; then
log "$line"
s3cmd del $FILEPATH
fi
fi
done
else
log "\tS3: disabled (\$USE_S3 + \$CLEAR_S3)"
fi
}
# clears old backups
clear_backups() {
echo
log "1/7 Removing old backups..."
clear_local_backups
clear_ssh_backups
clear_s3_backups
}
# makes archive with database dump
backup_db() {
echo
log "2/7 Dumping DB: $DBNAME..."
mysqldump \
--user=$DBUSER \
--password=$DBPASS \
--opt \
--default-character-set=$DBCHARSET \
--quick \
$DBNAME | gzip > $LOCAL_SQL_PATH
if [ $? == 0 ]; then
log "\t- OK"
send_db_ssh
send_db_s3
else
log "\t- ERROR: failed to create dump. Exit-code: $?"
ntfy_warn "ERROR: failed to create dump"
log "3/7 Sending database backup to $SSH_HOST... skipped"
log "4/7 Sending database backup to $S3_DIR... skipped"
fi
}
# sends database archive into ssh remote storage
send_db_ssh() {
echo
log "3/7 Sending database backup to $SSH_HOST..."
if [ $USE_SSH == 1 ]; then
rsync --progress "$LOCAL_SQL_PATH" "$SSH_HOST:$SSH_SQL_FILE"
if [ $? == 0 ]; then
log "\t- OK"
else
log "\t- ERROR: failed to send DB backup to $SSH_HOST. Exit-code: $?"
ntfy_warn "ERROR: failed to send DB backup to $SSH_HOST"
fi
else
log "\t- disabled (\$USE_SSH)"
fi
}
# sends database archive into s3 remote storage
send_db_s3() {
echo
log "4/7 Sending database backup to $S3_DIR..."
if [ $USE_S3 == 1 ]; then
s3cmd put "$LOCAL_SQL_PATH" "$S3_SQL_FILE"
if [ $? == 0 ]; then
log "\t- OK"
else
log "\t- ERROR: failed to send DB backup to $S3_DIR. Exit-code: $?"
ntfy_warn "ERROR: failed to send DB backup to $S3_DIR"
fi
else
log "\t- disabled (\$USE_SSH)"
fi
}
# makes archive with project sources
backup_src() {
echo
log "5/7 Compressing project dir: $LOCAL_SRC_DIR..."
tar -zcf "$LOCAL_SRC_PATH" "$LOCAL_SRC_DIR"
if [ $? == 0 ]; then
log "\t- OK"
send_src_ssh
send_src_s3
else
log "\t- ERROR: failed to compress project. Exit-code: $?"
ntfy_warn "ERROR: failed to compress project"
log "6/7 Sending project backup to $SSH_HOST... skipped"
log "7/7 Sending project backup to $S3_DIR... skipped"
fi
}
# sends sources archive into ssh remote storage
send_src_ssh() {
echo
log "6/7 Sending project backup to $SSH_HOST..."
if [ $USE_SSH == 1 ]; then
rsync --progress "$LOCAL_SRC_PATH" "$SSH_HOST:$SSH_SRC_FILE"
if [ $? == 0 ]; then
log "\t- OK"
else
log "\t- ERROR: failed to send project backup to $SSH_HOST. Exit-code: $?"
ntfy_warn "ERROR: failed to send project backup to $SSH_HOST"
fi
else
log "\t- disabled"
fi
}
# sends sources archive into s3 remote storage
send_src_s3() {
echo
log "7/7 Sending project backup to $S3_DIR..."
s3cmd put "$LOCAL_SRC_PATH" "$S3_SRC_FILE"
if [ $? == 0 ]; then
log "\t- OK"
else
log "\t- ERROR: failed to send database backup to $S3_DIR. Exit-code: $?"
ntfy_warn "ERROR: failed to send project backup to $S3_DIR"
fi
}
# prints used/free space on local storage
show_finish() {
echo
log "Finish!"
log "Used space: $(du -h "$LOCAL_BAK_PATH" | tail -n1)" # вывод размера папки с бэкапами за текущий день
log "Free space: $(df -h "$LOCAL_BAK_PATH" | tail -n1 | awk '{print $4}')" # вывод свободного места на локальном диске
echo
}
# sends log file into both remote storage
send_log() {
[ $USE_SSH == 1 ] && rsync --progress "$LOG_PATH" "$SSH_HOST:$SSH_LOG_FILE"
[ $USE_S3 == 1 ] && s3cmd put "$LOG_PATH" "$S3_LOG_FILE"
}
# main flow =========================================================
log "Start ----------------------------------------------------------"
show_params
init_dirs
clear_backups
backup_db
backup_src
show_finish
send_log
ntfy_info "Finish!"
| 1 | #!/bin/bash |
| 2 | ##################################################################### |
| 3 | # # |
| 4 | # Stupidly simple backup script for own projects # |
| 5 | # # |
| 6 | # Author: Anthony Axenov (Антон Аксенов) # |
| 7 | # Version: 1.2 # |
| 8 | # License: WTFPLv2 More info (RU): https://axenov.dev/?p=1272 # |
| 9 | # # |
| 10 | ##################################################################### |
| 11 | |
| 12 | # use remote storages =============================================== |
| 13 | |
| 14 | USE_SSH=1 |
| 15 | USE_S3=1 |
| 16 | |
| 17 | # database credentials ============================================== |
| 18 | |
| 19 | DBUSER= |
| 20 | DBPASS= |
| 21 | DBNAME= |
| 22 | DBCHARSET="utf8" |
| 23 | |
| 24 | # dates for file structure ========================================== |
| 25 | |
| 26 | TODAY_DIR="$(date +%Y.%m.%d)" |
| 27 | TODAY_FILE="$(date +%H.%M)" |
| 28 | |
| 29 | # local storage ===================================================== |
| 30 | |
| 31 | LOCAL_BAK_DIR="/backup" |
| 32 | LOCAL_BAK_PATH="$LOCAL_BAK_DIR/$TODAY_DIR" |
| 33 | |
| 34 | # database backup file |
| 35 | LOCAL_SQL_FILE="$TODAY_FILE-db.sql.gz" |
| 36 | LOCAL_SQL_PATH="$LOCAL_BAK_PATH/$LOCAL_SQL_FILE" |
| 37 | |
| 38 | # project path and backup file |
| 39 | LOCAL_SRC_DIR="/var/www/html" |
| 40 | LOCAL_SRC_FILE="$TODAY_FILE-src.tar.gz" |
| 41 | LOCAL_SRC_PATH="$LOCAL_BAK_PATH/$LOCAL_SRC_FILE" |
| 42 | |
| 43 | # log file |
| 44 | LOG_FILE="$TODAY_FILE.log" |
| 45 | LOG_PATH="$LOCAL_BAK_PATH/$LOG_FILE" |
| 46 | |
| 47 | # remote storages =================================================== |
| 48 | |
| 49 | SSH_HOST="user@example.com" |
| 50 | SSH_BAK_DIR="/backup" |
| 51 | SSH_BAK_PATH="$SSH_BAK_DIR/$TODAY_DIR" |
| 52 | SSH_SQL_FILE="$SSH_BAK_PATH/$LOCAL_SQL_FILE" |
| 53 | SSH_SRC_FILE="$SSH_BAK_PATH/$LOCAL_SRC_FILE" |
| 54 | SSH_LOG_FILE="$SSH_BAK_PATH/$LOG_FILE" |
| 55 | |
| 56 | S3_BUCKET="s3://my.bucket" |
| 57 | S3_DIR="$S3_BUCKET/$TODAY_DIR" |
| 58 | S3_SQL_FILE="$S3_DIR/$LOCAL_SQL_FILE" |
| 59 | S3_SRC_FILE="$S3_DIR/$LOCAL_SRC_FILE" |
| 60 | S3_LOG_FILE="$S3_DIR/$LOG_FILE" |
| 61 | |
| 62 | # autoremove ======================================================== |
| 63 | |
| 64 | # time to live on different storages |
| 65 | TTL_LOCAL=3 |
| 66 | TTL_SSH=7 |
| 67 | TTL_S3=60 |
| 68 | |
| 69 | # autoremove flags |
| 70 | CLEAR_SSH=1 |
| 71 | CLEAR_S3=1 |
| 72 | |
| 73 | # notifications ===================================================== |
| 74 | |
| 75 | USE_NTFY=1 |
| 76 | NTFY_TITLE="Backup script" |
| 77 | NTFY_CHANNEL= |
| 78 | |
| 79 | #==================================================================== |
| 80 | # |
| 81 | # Functions used for the whole backup flow |
| 82 | # |
| 83 | #==================================================================== |
| 84 | |
| 85 | # prints arguments to stdout and into log file |
| 86 | log() { |
| 87 | echo -e "[$(date +%H:%M:%S)] $*" | tee -a "$LOG_PATH" |
| 88 | } |
| 89 | |
| 90 | # sends notification with information |
| 91 | ntfy_info() { |
| 92 | [ $USE_NTFY == 1 ] && ntfy send \ |
| 93 | --title "$NTFY_TITLE" \ |
| 94 | --message "$1" \ |
| 95 | --priority 1 \ |
| 96 | "$NTFY_CHANNEL" |
| 97 | } |
| 98 | |
| 99 | # sends notification with warning |
| 100 | ntfy_warn() { |
| 101 | [ $USE_NTFY == 1 ] && ntfy send \ |
| 102 | --title "$NTFY_TITLE" \ |
| 103 | --tags "warning" \ |
| 104 | --message "$1" \ |
| 105 | --priority 5 \ |
| 106 | "$NTFY_CHANNEL" |
| 107 | } |
| 108 | |
| 109 | # prints initialized parameters |
| 110 | show_params() { |
| 111 | log "Initialized parameters:" |
| 112 | |
| 113 | log "├ [ Remotes ]" |
| 114 | log "│\t├ USE_SSH = $USE_SSH" |
| 115 | [ $USE_SSH == 1 ] && log "│\t├ SSH_HOST = $SSH_HOST" |
| 116 | log "│\t├ USE_S3 = $USE_S3" |
| 117 | [ $USE_S3 == 1 ] && log "│\t├ S3_BUCKET = $S3_BUCKET" |
| 118 | |
| 119 | log "├ [ Database ]" |
| 120 | log "│\t├ DBUSER = $DBUSER" |
| 121 | log "│\t├ DBNAME = $DBNAME" |
| 122 | log "│\t├ DBCHARSET = $DBCHARSET" |
| 123 | log "│\t├ LOCAL_SQL_PATH = $LOCAL_SQL_PATH" |
| 124 | [ $USE_SSH == 1 ] && log "│\t├ SSH_SQL_FILE = $SSH_SQL_FILE" |
| 125 | [ $USE_S3 == 1 ] && log "│\t├ S3_SQL_FILE = $S3_SQL_FILE" |
| 126 | |
| 127 | log "├ [ Sources ]" |
| 128 | log "│\t├ LOCAL_SRC_DIR = $LOCAL_SRC_DIR" |
| 129 | log "│\t├ LOCAL_SRC_PATH = $LOCAL_SRC_PATH" |
| 130 | [ $USE_SSH == 1 ] && log "│\t├ SSH_SRC_FILE = $SSH_SRC_FILE" |
| 131 | [ $USE_S3 == 1 ] && log "│\t├ S3_SRC_FILE = $S3_SRC_FILE" |
| 132 | |
| 133 | log "├ [ Log ]" |
| 134 | log "│\t├ LOG_PATH = $LOG_PATH" |
| 135 | [ $USE_SSH == 1 ] && log "│\t├ SSH_LOG_FILE = $SSH_LOG_FILE" |
| 136 | [ $USE_S3 == 1 ] && log "│\t├ S3_LOG_FILE = $S3_LOG_FILE" |
| 137 | |
| 138 | log "├ [ Autoclear ]" |
| 139 | log "│\t├ TTL_LOCAL = $TTL_LOCAL" |
| 140 | [ $USE_SSH == 1 ] && { |
| 141 | log "│\t├ CLEAR_SSH = $CLEAR_SSH" |
| 142 | log "│\t├ TTL_SSH = $TTL_SSH" |
| 143 | } |
| 144 | [ $USE_S3 == 1 ] && { |
| 145 | log "│\t├ CLEAR_S3 = $CLEAR_S3" |
| 146 | log "│\t├ TTL_S3 = $TTL_S3" |
| 147 | } |
| 148 | |
| 149 | log "└ [ ntfy ]" |
| 150 | log "\t├ USE_NTFY = $USE_NTFY" |
| 151 | [ $USE_NTFY == 1 ] && log "\t├ NTFY_TITLE = $NTFY_TITLE" |
| 152 | [ $USE_NTFY == 1 ] && log "\t└ NTFY_CHANNEL = $NTFY_CHANNEL" |
| 153 | } |
| 154 | |
| 155 | # initializes directories for backup |
| 156 | init_dirs() { |
| 157 | if [ ! -d "$LOCAL_BAK_PATH" ]; then |
| 158 | mkdir -p $LOCAL_BAK_PATH |
| 159 | fi |
| 160 | [ $USE_SSH == 1 ] && ssh $SSH_HOST "mkdir -p $SSH_BAK_PATH" |
| 161 | } |
| 162 | |
| 163 | # clears old local backups |
| 164 | clear_local_backups() { |
| 165 | log "\tLocal:" |
| 166 | log $(find "$LOCAL_BAK_DIR" -type d -mtime +"$TTL_LOCAL" | sort) |
| 167 | find "$LOCAL_BAK_DIR" -type d -mtime +"$TTL_LOCAL" | xargs rm -rf |
| 168 | } |
| 169 | |
| 170 | # clears old backups on remote ssh storage |
| 171 | clear_ssh_backups() { |
| 172 | if [ $USE_SSH == 1 ] && [ $CLEAR_SSH == 1 ]; then |
| 173 | log "\tSSH:" |
| 174 | log $(ssh "$SSH_HOST" "find $SSH_BAK_DIR -type d -mtime +$TTL_SSH" | sort) |
| 175 | ssh "$SSH_HOST" "find $SSH_BAK_DIR -type d -mtime +$TTL_SSH | xargs rm -rf" |
| 176 | else |
| 177 | log "\tSSH: disabled (\$USE_SSH, \$CLEAR_SSH)" |
| 178 | fi |
| 179 | } |
| 180 | |
| 181 | # clears backups on remote s3 storage |
| 182 | clear_s3_backups() { |
| 183 | # https://gist.github.com/JProffitt71/9044744?permalink_comment_id=3539681#gistcomment-3539681 |
| 184 | if [ $USE_S3 == 1 ] && [ $CLEAR_S3 == 1 ]; then |
| 185 | log "\tS3:" |
| 186 | OLDER_THAN=$(date -d "$TTL_S3 days ago" "+%s") |
| 187 | s3cmd ls -r $S3_DIR | while read -r line; do |
| 188 | FILETIME=$(echo "$line" | awk {'print $1" "$2'}) |
| 189 | FILETIME=$(date -d "$FILETIME" "+%s") |
| 190 | if [[ $FILETIME -le $OLDER_THAN ]]; then |
| 191 | FILEPATH=$(echo "$line" | awk {'print $4'}) |
| 192 | if [ $FILEPATH != "" ]; then |
| 193 | log "$line" |
| 194 | s3cmd del $FILEPATH |
| 195 | fi |
| 196 | fi |
| 197 | done |
| 198 | else |
| 199 | log "\tS3: disabled (\$USE_S3 + \$CLEAR_S3)" |
| 200 | fi |
| 201 | } |
| 202 | |
| 203 | # clears old backups |
| 204 | clear_backups() { |
| 205 | echo |
| 206 | log "1/7 Removing old backups..." |
| 207 | clear_local_backups |
| 208 | clear_ssh_backups |
| 209 | clear_s3_backups |
| 210 | } |
| 211 | |
| 212 | # makes archive with database dump |
| 213 | backup_db() { |
| 214 | echo |
| 215 | log "2/7 Dumping DB: $DBNAME..." |
| 216 | mysqldump \ |
| 217 | --user=$DBUSER \ |
| 218 | --password=$DBPASS \ |
| 219 | --opt \ |
| 220 | --default-character-set=$DBCHARSET \ |
| 221 | --quick \ |
| 222 | $DBNAME | gzip > $LOCAL_SQL_PATH |
| 223 | if [ $? == 0 ]; then |
| 224 | log "\t- OK" |
| 225 | send_db_ssh |
| 226 | send_db_s3 |
| 227 | else |
| 228 | log "\t- ERROR: failed to create dump. Exit-code: $?" |
| 229 | ntfy_warn "ERROR: failed to create dump" |
| 230 | log "3/7 Sending database backup to $SSH_HOST... skipped" |
| 231 | log "4/7 Sending database backup to $S3_DIR... skipped" |
| 232 | fi |
| 233 | } |
| 234 | |
| 235 | # sends database archive into ssh remote storage |
| 236 | send_db_ssh() { |
| 237 | echo |
| 238 | log "3/7 Sending database backup to $SSH_HOST..." |
| 239 | if [ $USE_SSH == 1 ]; then |
| 240 | rsync --progress "$LOCAL_SQL_PATH" "$SSH_HOST:$SSH_SQL_FILE" |
| 241 | if [ $? == 0 ]; then |
| 242 | log "\t- OK" |
| 243 | else |
| 244 | log "\t- ERROR: failed to send DB backup to $SSH_HOST. Exit-code: $?" |
| 245 | ntfy_warn "ERROR: failed to send DB backup to $SSH_HOST" |
| 246 | fi |
| 247 | else |
| 248 | log "\t- disabled (\$USE_SSH)" |
| 249 | fi |
| 250 | } |
| 251 | |
| 252 | # sends database archive into s3 remote storage |
| 253 | send_db_s3() { |
| 254 | echo |
| 255 | log "4/7 Sending database backup to $S3_DIR..." |
| 256 | if [ $USE_S3 == 1 ]; then |
| 257 | s3cmd put "$LOCAL_SQL_PATH" "$S3_SQL_FILE" |
| 258 | if [ $? == 0 ]; then |
| 259 | log "\t- OK" |
| 260 | else |
| 261 | log "\t- ERROR: failed to send DB backup to $S3_DIR. Exit-code: $?" |
| 262 | ntfy_warn "ERROR: failed to send DB backup to $S3_DIR" |
| 263 | fi |
| 264 | else |
| 265 | log "\t- disabled (\$USE_SSH)" |
| 266 | fi |
| 267 | } |
| 268 | |
| 269 | # makes archive with project sources |
| 270 | backup_src() { |
| 271 | echo |
| 272 | log "5/7 Compressing project dir: $LOCAL_SRC_DIR..." |
| 273 | tar -zcf "$LOCAL_SRC_PATH" "$LOCAL_SRC_DIR" |
| 274 | if [ $? == 0 ]; then |
| 275 | log "\t- OK" |
| 276 | send_src_ssh |
| 277 | send_src_s3 |
| 278 | else |
| 279 | log "\t- ERROR: failed to compress project. Exit-code: $?" |
| 280 | ntfy_warn "ERROR: failed to compress project" |
| 281 | log "6/7 Sending project backup to $SSH_HOST... skipped" |
| 282 | log "7/7 Sending project backup to $S3_DIR... skipped" |
| 283 | fi |
| 284 | } |
| 285 | |
| 286 | # sends sources archive into ssh remote storage |
| 287 | send_src_ssh() { |
| 288 | echo |
| 289 | log "6/7 Sending project backup to $SSH_HOST..." |
| 290 | if [ $USE_SSH == 1 ]; then |
| 291 | rsync --progress "$LOCAL_SRC_PATH" "$SSH_HOST:$SSH_SRC_FILE" |
| 292 | if [ $? == 0 ]; then |
| 293 | log "\t- OK" |
| 294 | else |
| 295 | log "\t- ERROR: failed to send project backup to $SSH_HOST. Exit-code: $?" |
| 296 | ntfy_warn "ERROR: failed to send project backup to $SSH_HOST" |
| 297 | fi |
| 298 | else |
| 299 | log "\t- disabled" |
| 300 | fi |
| 301 | } |
| 302 | |
| 303 | # sends sources archive into s3 remote storage |
| 304 | send_src_s3() { |
| 305 | echo |
| 306 | log "7/7 Sending project backup to $S3_DIR..." |
| 307 | s3cmd put "$LOCAL_SRC_PATH" "$S3_SRC_FILE" |
| 308 | if [ $? == 0 ]; then |
| 309 | log "\t- OK" |
| 310 | else |
| 311 | log "\t- ERROR: failed to send database backup to $S3_DIR. Exit-code: $?" |
| 312 | ntfy_warn "ERROR: failed to send project backup to $S3_DIR" |
| 313 | fi |
| 314 | } |
| 315 | |
| 316 | # prints used/free space on local storage |
| 317 | show_finish() { |
| 318 | echo |
| 319 | log "Finish!" |
| 320 | log "Used space: $(du -h "$LOCAL_BAK_PATH" | tail -n1)" # вывод размера папки с бэкапами за текущий день |
| 321 | log "Free space: $(df -h "$LOCAL_BAK_PATH" | tail -n1 | awk '{print $4}')" # вывод свободного места на локальном диске |
| 322 | echo |
| 323 | } |
| 324 | |
| 325 | # sends log file into both remote storage |
| 326 | send_log() { |
| 327 | [ $USE_SSH == 1 ] && rsync --progress "$LOG_PATH" "$SSH_HOST:$SSH_LOG_FILE" |
| 328 | [ $USE_S3 == 1 ] && s3cmd put "$LOG_PATH" "$S3_LOG_FILE" |
| 329 | } |
| 330 | |
| 331 | # main flow ========================================================= |
| 332 | |
| 333 | log "Start ----------------------------------------------------------" |
| 334 | show_params |
| 335 | init_dirs |
| 336 | clear_backups |
| 337 | backup_db |
| 338 | backup_src |
| 339 | show_finish |
| 340 | send_log |
| 341 | ntfy_info "Finish!" |
| 342 |