diff --git a/archive-tar.sh b/archive-tar.sh index 477ff06..d90465e 100644 --- a/archive-tar.sh +++ b/archive-tar.sh @@ -17,6 +17,11 @@ function archive_backup { function prepare_restore_archive { CONFLUENCE_BACKUP_ARCHIVE_NAME=$1 + CONFLUENCE_RESTORE_HOME=${CONFLUENCE_BACKUP_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}/CONFLUENCE-home + CONFLUENCE_RESTORE_DB=${CONFLUENCE_BACKUP_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}/CONFLUENCE-home + + + if [ -z "${CONFLUENCE_BACKUP_ARCHIVE_NAME}" ]; then print "Usage: $0 " if [ ! -d "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}" ]; then @@ -27,24 +32,27 @@ function prepare_restore_archive { exit 99 fi - if [ ! -f "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tar.gz" ]; then - error "'${CONFLUENCE_BACKUP_ARCHIVE_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tar.gz' does not exist!" + if [ ! -f "${CONFLUENCE_RESTORE_HOME}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tgz" ]; then + error "'${CONFLUENCE_RESTORE_HOME}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tgz' does not exist!" available_backups exit 99 fi # Setup restore paths CONFLUENCE_RESTORE_ROOT=$(mktemp -d /tmp/confluence.diy-restore.XXXXXX) - CONFLUENCE_RESTORE_DB="${CONFLUENCE_RESTORE_ROOT}/confluence-db" - CONFLUENCE_RESTORE_HOME="${CONFLUENCE_RESTORE_ROOT}/confluence-home" - CONFLUENCE_RESTORE_DATA_STORES="${CONFLUENCE_RESTORE_ROOT}/confluence-data-stores" + CONFLUENCE_RESTORE_DB="${CONFLUENCE_RESTORE_ROOT}/archive/CONFLUENCE-db" + CONFLUENCE_RESTORE_HOME="${CONFLUENCE_RESTORE_ROOT}/confluence" +} + +function copy_archive_to_tmp { + rsync -ah ${CONFLUENCE_BACKUP_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}/ ${CONFLUENCE_RESTORE_ROOT}/archive/ } function restore_archive { check_config_var "CONFLUENCE_BACKUP_ARCHIVE_ROOT" check_var "CONFLUENCE_BACKUP_ARCHIVE_NAME" check_var "CONFLUENCE_RESTORE_ROOT" - run tar -xzf "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tar.gz" -C "${CONFLUENCE_RESTORE_ROOT}" + run tar --strip-components=2 -xzf "${CONFLUENCE_RESTORE_ROOT}/archive/CONFLUENCE-home/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tgz" -C "${CONFLUENCE_RESTORE_ROOT}" } function cleanup_old_archives { @@ -56,5 +64,5 @@ function available_backups { check_config_var "CONFLUENCE_BACKUP_ARCHIVE_ROOT" print "Available backups:" # Drop the .tar.gz extension, to make it a backup identifier - ls "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}" | sed -e 's/\.tar\.gz$//g' + ls "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}" | sed -e 's/\.tgz$//g' } diff --git a/backup.sh b/backup.sh index 45fdaa1..2bf3f77 100755 --- a/backup.sh +++ b/backup.sh @@ -4,6 +4,7 @@ set -e SCRIPT_DIR=$(dirname "$0") source "${SCRIPT_DIR}/utils.sh" # source "${SCRIPT_DIR}/common.sh" +source "${SCRIPT_DIR}/func.sh" source "${SCRIPT_DIR}/vars.sh" source "${SCRIPT_DIR}/lvm.sh" source "${SCRIPT_DIR}/dbase.sh" @@ -18,8 +19,14 @@ declare -a COMPLETED_BG_JOBS # Failed background jobs declare -A FAILED_BG_JOBS +backup_start info "Preparing for backup" prepare_backup_db prepare_backup_disk -backup_start \ No newline at end of file + +info "Backing up the database and filesystem in parallel" +run_in_bg backup_db "$DB_BACKUP_JOB_NAME" +run_in_bg backup_disk "$DISK_BACKUP_JOB_NAME" + +# perform_cleanup_tmp \ No newline at end of file diff --git a/clean.sh b/clean.sh new file mode 100755 index 0000000..40f1a2b --- /dev/null +++ b/clean.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# docker stop confluence_wiki-server_1 +# docker stop confluence_wiki-db_1 +sudo su - mali -c "cd /home/mali/confluence/;docker-compose stop" +rm -rf /data1/* +source "./vars.sh" +# exit 1 +docker start confluence_wiki-db_1 +sleep 3 +psql -U "${POSTGRES_USERNAME}" -h "${POSTGRES_HOST}" --port=${POSTGRES_PORT} -d "template1" -tqc 'DROP DATABASE IF EXISTS confluence' +./confluence.diy-restore.sh 2022-10-28 +sudo su - mali -c "cd /home/mali/confluence/;docker-compose start" \ No newline at end of file diff --git a/common-functions.sh b/common-functions.sh new file mode 100644 index 0000000..e69de29 diff --git a/confluence.diy-restore.sh b/confluence.diy-restore.sh index 68d326f..42b0d22 100755 --- a/confluence.diy-restore.sh +++ b/confluence.diy-restore.sh @@ -15,24 +15,21 @@ set -e SCRIPT_DIR=$(dirname "$0") source "${SCRIPT_DIR}/utils.sh" source "${SCRIPT_DIR}/common.sh" - -if [ "${INSTANCE_TYPE}" = "bitbucket-mesh" ]; then - # Mesh nodes don't run with an external database, so it doesn't need to be restored - BACKUP_DATABASE_TYPE="none" - # Mesh nodes don't run with an external Elasticsearch instance configured, so it doesn't need to be restored - BACKUP_ELASTICSEARCH_TYPE="none" -fi +source "${SCRIPT_DIR}/func.sh" +source "${SCRIPT_DIR}/vars.sh" +source "${SCRIPT_DIR}/lvm.sh" +source "${SCRIPT_DIR}/dbase.sh" source_archive_strategy source_database_strategy source_disk_strategy -source_elasticsearch_strategy -# Ensure we know which user:group things should be owned as -if [ -z "${BITBUCKET_UID}" -o -z "${BITBUCKET_GID}" ]; then - error "Both BITBUCKET_UID and BITBUCKET_GID must be set in '${BACKUP_VARS_FILE}'" - bail "See 'bitbucket.diy-backup.vars.sh.example' for the defaults." -fi + +# # Ensure we know which user:group things should be owned as +# if [ -z "${BITBUCKET_UID}" -o -z "${BITBUCKET_GID}" ]; then +# error "Both BITBUCKET_UID and BITBUCKET_GID must be set in '${BACKUP_VARS_FILE}'" +# bail "See 'bitbucket.diy-backup.vars.sh.example' for the defaults." +# fi check_command "jq" @@ -47,13 +44,15 @@ info "Preparing for restore" prepare_restore_disk "${1}" prepare_restore_db "${1}" -prepare_restore_elasticsearch "${1}" + +copy_archive_to_tmp if [ -n "${BACKUP_ARCHIVE_TYPE}" ]; then restore_archive fi -info "Restoring disk (home directory and data stores) and database" + +info "Restoring disk and database" # Restore the filesystem restore_disk "${1}" @@ -61,9 +60,6 @@ restore_disk "${1}" # Restore the database restore_db -# Restore Elasticsearch data -restore_elasticsearch - success "Successfully completed the restore of your ${PRODUCT} instance" if [ -n "${FINAL_MESSAGE}" ]; then diff --git a/dbase.sh b/dbase.sh index bcc7bbc..258a36b 100644 --- a/dbase.sh +++ b/dbase.sh @@ -13,7 +13,7 @@ if [[ ${psql_majorminor} -ge 9003 ]]; then fi function prepare_backup_db { - check_config_var "CONFLUENCE_BACKUP_DB" + check_config_var "CONFLUENCE_BACKUP_DB_TMP" check_config_var "POSTGRES_USERNAME" check_config_var "POSTGRES_HOST" check_config_var "POSTGRES_PORT" @@ -21,10 +21,10 @@ function prepare_backup_db { } function backup_db { - [ -d "${CONFLUENCE_BACKUP_DB}" ] && rm -r "${CONFLUENCE_BACKUP_DB}" - mkdir -p "${CONFLUENCE_BACKUP_DB}" - run pg_dump -U "${POSTGRES_USERNAME}" -h "${POSTGRES_HOST}" --port=${POSTGRES_PORT} ${PG_PARALLEL} -Fd \ - -d "${CONFLUENCE_DB}" ${PG_SNAPSHOT_OPT} -f "${CONFLUENCE_BACKUP_DB}" + [ -d "${CONFLUENCE_BACKUP_DB_TMP}" ] && rm -r "${CONFLUENCE_BACKUP_DB_TMP}" + mkdir -p "${CONFLUENCE_BACKUP_DB_TMP}" + run pg_dump -U "${POSTGRES_USERNAME}" -h "${POSTGRES_HOST}" --port=${POSTGRES_PORT} ${PG_PARALLEL} -Fd -d "${CONFLUENCE_DB}" ${PG_SNAPSHOT_OPT} -f "${CONFLUENCE_BACKUP_DB_TMP}" + perform_rsync_compress_db } function prepare_restore_db { @@ -46,15 +46,21 @@ function prepare_restore_db { function restore_db { run pg_restore -U "${POSTGRES_USERNAME}" -h "${POSTGRES_HOST}" --port=${POSTGRES_PORT} ${PG_PARALLEL} \ - -d postgres -C -Fd "${CONFLUENCE_RESTORE_DB}" + -d postgres -C -Ft "${CONFLUENCE_RESTORE_DB}" } function cleanup_incomplete_db_backup { info "Cleaning up DB backup created as part of failed/incomplete backup" - rm -r "${CONFLUENCE_BACKUP_DB}" + rm -r "${CONFLUENCE_BACKUP_DB_TMP}" } function cleanup_old_db_backups { # Not required as old backups with this strategy are typically cleaned up in the archiving strategy. no_op -} \ No newline at end of file +} + +function perform_rsync_compress_db { + [ -d "${CONFLUENCE_BACKUP_DB}" ] && rm -r "${CONFLUENCE_BACKUP_DB}" + mkdir -p "${CONFLUENCE_BACKUP_DB}" + rsync --remove-source-files -h $CONFLUENCE_BACKUP_DB_TMP/* $CONFLUENCE_BACKUP_DB/ +} diff --git a/func.sh b/func.sh index 340e603..e3e5f9a 100644 --- a/func.sh +++ b/func.sh @@ -47,4 +47,21 @@ function cleanup_incomplete_backup { ;; esac done +} + +function perform_cleanup_tmp { + rm -rf $CONFLUENCE_TMP + : +} + +function run_in_bg { + ($1) & + local PID=$! + BG_JOBS["$2"]=${PID} + debug "Started $2 (PID=${PID})" +} + +function backup_start { + mkdir -p $CONFLUENCE_TMP + : } \ No newline at end of file diff --git a/lvm.sh b/lvm.sh index f7ac170..f75c98c 100644 --- a/lvm.sh +++ b/lvm.sh @@ -31,10 +31,10 @@ function backup_disk { vg=$(lvs | grep $snapshot_name | cut -d" " -f4) snap_volume=/dev/$vg/$snapshot_name - mount -onouuid,ro $snap_volume /data1/snapshot + mount -onouuid,ro $snap_volume ${CONFLUENCE_HOME_SNAP} # Create new variable to define source of backup as snapshot - CONFLUENCE_HOME_SNAP=/data1/snapshot/CONFLUENCE-home/ + # CONFLUENCE_HOME_SNAP=/data2/snapshot/CONFLUENCE-home/ # rsync home from snapshot # perform_rsync_home_directory @@ -43,23 +43,16 @@ function backup_disk { perform_rsync_compress_data # unmount and remove lvm snapshot - umount /data1/snapshot + umount ${CONFLUENCE_HOME_SNAP} lvremove -f $snap_volume - } function perform_compress_data { - - # Globals - backupDir="/backup/confluence" - pgdump="pg_dump" - - # Backup target directories - backupDirDaily="$backupDir/$day_new_format" day_new_format=$(date +%Y-%m-%d) - tar -czPf $backupDirDaily/$day_new_format.tgz /data1/snapshot + tar -czPf $CONFLUENCE_TMP/$day_new_format.tgz $CONFLUENCE_HOME_SNAP } function perform_rsync_compress_data { - rsync -avh --progress $backupDirDaily/$day_new_format.tgz /backup/confluence -} \ No newline at end of file + rsync --remove-source-files -h $CONFLUENCE_TMP/$day_new_format.tgz $CONFLUENCE_BACKUP_HOME/ +} + diff --git a/utils.sh b/utils.sh index 8052a24..a03ddb7 100644 --- a/utils.sh +++ b/utils.sh @@ -193,3 +193,4 @@ function hc_announce { ! curl ${CURL_OPTIONS} -X POST -H "Content-Type: application/json" -d "${hipchat_payload}" "${hipchat_url}" true } + diff --git a/vars.sh b/vars.sh index 912ae5e..870f85c 100644 --- a/vars.sh +++ b/vars.sh @@ -1,18 +1,28 @@ #vars INSTANCE_NAME=confluence -CONFLUENCE_UID=confluence -CONFLUENCE_GID=confluence +CONFLUENCE_UID=root +CONFLUENCE_GID=root BACKUP_DISK_TYPE=lvm BACKUP_DATABASE_TYPE=postgresql BACKUP_ARCHIVE_TYPE=tar +DATE_TIMESTAMP=$(date --iso-8601) -CONFLUENCE_HOME=/data2/confluence -CONFLUENCE_BACKUP_HOME=/backup/confluence +CONFLUENCE_HOME=/data1/confluence +CONFLUENCE_TMP=/tmp/confluence-backup +CONFLUENCE_BACKUP_DB_TMP=/tmp/confluence-backup/db +CONFLUENCE_BACKUP_ROOT=/backup/confluence +CONFLUENCE_BACKUP_ARCHIVE_ROOT=/backup/confluence +CONFLUENCE_RESTORE_ROOT=/tmp/confluence-restore + +CONFLUENCE_HOME_SNAP=/data1/snapshot +CONFLUENCE_HOME_SNAP_DATA=${CONFLUENCE_HOME_SNAP}/confluence + +BITBUCKET_VERBOSE_BACKUP="false" #db -CONFLUENCE_BACKUP_DB=${CONFLUENCE_BACKUP_ROOT}/CONFLUENCE-db/ -CONFLUENCE_BACKUP_HOME=${CONFLUENCE_BACKUP_ROOT}/CONFLUENCE-home/ -CONFLUENCE_BACKUP_DATA_STORES=${CONFLUENCE_BACKUP_ROOT}/CONFLUENCE-data-stores/ +CONFLUENCE_BACKUP_DB=${CONFLUENCE_BACKUP_ROOT}/${DATE_TIMESTAMP}/CONFLUENCE-db +CONFLUENCE_BACKUP_HOME=${CONFLUENCE_BACKUP_ROOT}/${DATE_TIMESTAMP}/CONFLUENCE-home + CONFLUENCE_DB=confluence POSTGRES_HOST=localhost POSTGRES_USERNAME=database1user