Sync repo.
This commit is contained in:
parent
e8258e87fb
commit
d10d71fbef
@ -17,6 +17,11 @@ function archive_backup {
|
||||
function prepare_restore_archive {
|
||||
CONFLUENCE_BACKUP_ARCHIVE_NAME=$1
|
||||
|
||||
CONFLUENCE_RESTORE_HOME=${CONFLUENCE_BACKUP_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}/CONFLUENCE-home
|
||||
CONFLUENCE_RESTORE_DB=${CONFLUENCE_BACKUP_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}/CONFLUENCE-home
|
||||
|
||||
|
||||
|
||||
if [ -z "${CONFLUENCE_BACKUP_ARCHIVE_NAME}" ]; then
|
||||
print "Usage: $0 <backup-snapshot>"
|
||||
if [ ! -d "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}" ]; then
|
||||
@ -27,24 +32,27 @@ function prepare_restore_archive {
|
||||
exit 99
|
||||
fi
|
||||
|
||||
if [ ! -f "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tar.gz" ]; then
|
||||
error "'${CONFLUENCE_BACKUP_ARCHIVE_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tar.gz' does not exist!"
|
||||
if [ ! -f "${CONFLUENCE_RESTORE_HOME}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tgz" ]; then
|
||||
error "'${CONFLUENCE_RESTORE_HOME}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tgz' does not exist!"
|
||||
available_backups
|
||||
exit 99
|
||||
fi
|
||||
|
||||
# Setup restore paths
|
||||
CONFLUENCE_RESTORE_ROOT=$(mktemp -d /tmp/confluence.diy-restore.XXXXXX)
|
||||
CONFLUENCE_RESTORE_DB="${CONFLUENCE_RESTORE_ROOT}/confluence-db"
|
||||
CONFLUENCE_RESTORE_HOME="${CONFLUENCE_RESTORE_ROOT}/confluence-home"
|
||||
CONFLUENCE_RESTORE_DATA_STORES="${CONFLUENCE_RESTORE_ROOT}/confluence-data-stores"
|
||||
CONFLUENCE_RESTORE_DB="${CONFLUENCE_RESTORE_ROOT}/archive/CONFLUENCE-db"
|
||||
CONFLUENCE_RESTORE_HOME="${CONFLUENCE_RESTORE_ROOT}/confluence"
|
||||
}
|
||||
|
||||
function copy_archive_to_tmp {
|
||||
rsync -ah ${CONFLUENCE_BACKUP_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}/ ${CONFLUENCE_RESTORE_ROOT}/archive/
|
||||
}
|
||||
|
||||
function restore_archive {
|
||||
check_config_var "CONFLUENCE_BACKUP_ARCHIVE_ROOT"
|
||||
check_var "CONFLUENCE_BACKUP_ARCHIVE_NAME"
|
||||
check_var "CONFLUENCE_RESTORE_ROOT"
|
||||
run tar -xzf "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tar.gz" -C "${CONFLUENCE_RESTORE_ROOT}"
|
||||
run tar --strip-components=2 -xzf "${CONFLUENCE_RESTORE_ROOT}/archive/CONFLUENCE-home/${CONFLUENCE_BACKUP_ARCHIVE_NAME}.tgz" -C "${CONFLUENCE_RESTORE_ROOT}"
|
||||
}
|
||||
|
||||
function cleanup_old_archives {
|
||||
@ -56,5 +64,5 @@ function available_backups {
|
||||
check_config_var "CONFLUENCE_BACKUP_ARCHIVE_ROOT"
|
||||
print "Available backups:"
|
||||
# Drop the .tar.gz extension, to make it a backup identifier
|
||||
ls "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}" | sed -e 's/\.tar\.gz$//g'
|
||||
ls "${CONFLUENCE_BACKUP_ARCHIVE_ROOT}" | sed -e 's/\.tgz$//g'
|
||||
}
|
||||
|
||||
@ -4,6 +4,7 @@ set -e
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
source "${SCRIPT_DIR}/utils.sh"
|
||||
# source "${SCRIPT_DIR}/common.sh"
|
||||
source "${SCRIPT_DIR}/func.sh"
|
||||
source "${SCRIPT_DIR}/vars.sh"
|
||||
source "${SCRIPT_DIR}/lvm.sh"
|
||||
source "${SCRIPT_DIR}/dbase.sh"
|
||||
@ -18,8 +19,14 @@ declare -a COMPLETED_BG_JOBS
|
||||
# Failed background jobs
|
||||
declare -A FAILED_BG_JOBS
|
||||
|
||||
backup_start
|
||||
info "Preparing for backup"
|
||||
prepare_backup_db
|
||||
prepare_backup_disk
|
||||
|
||||
backup_start
|
||||
|
||||
info "Backing up the database and filesystem in parallel"
|
||||
run_in_bg backup_db "$DB_BACKUP_JOB_NAME"
|
||||
run_in_bg backup_disk "$DISK_BACKUP_JOB_NAME"
|
||||
|
||||
# perform_cleanup_tmp
|
||||
13
clean.sh
Executable file
13
clean.sh
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
# docker stop confluence_wiki-server_1
|
||||
# docker stop confluence_wiki-db_1
|
||||
sudo su - mali -c "cd /home/mali/confluence/;docker-compose stop"
|
||||
rm -rf /data1/*
|
||||
source "./vars.sh"
|
||||
# exit 1
|
||||
docker start confluence_wiki-db_1
|
||||
sleep 3
|
||||
psql -U "${POSTGRES_USERNAME}" -h "${POSTGRES_HOST}" --port=${POSTGRES_PORT} -d "template1" -tqc 'DROP DATABASE IF EXISTS confluence'
|
||||
./confluence.diy-restore.sh 2022-10-28
|
||||
sudo su - mali -c "cd /home/mali/confluence/;docker-compose start"
|
||||
0
common-functions.sh
Normal file
0
common-functions.sh
Normal file
@ -15,24 +15,21 @@ set -e
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
source "${SCRIPT_DIR}/utils.sh"
|
||||
source "${SCRIPT_DIR}/common.sh"
|
||||
|
||||
if [ "${INSTANCE_TYPE}" = "bitbucket-mesh" ]; then
|
||||
# Mesh nodes don't run with an external database, so it doesn't need to be restored
|
||||
BACKUP_DATABASE_TYPE="none"
|
||||
# Mesh nodes don't run with an external Elasticsearch instance configured, so it doesn't need to be restored
|
||||
BACKUP_ELASTICSEARCH_TYPE="none"
|
||||
fi
|
||||
source "${SCRIPT_DIR}/func.sh"
|
||||
source "${SCRIPT_DIR}/vars.sh"
|
||||
source "${SCRIPT_DIR}/lvm.sh"
|
||||
source "${SCRIPT_DIR}/dbase.sh"
|
||||
|
||||
source_archive_strategy
|
||||
source_database_strategy
|
||||
source_disk_strategy
|
||||
source_elasticsearch_strategy
|
||||
|
||||
# Ensure we know which user:group things should be owned as
|
||||
if [ -z "${BITBUCKET_UID}" -o -z "${BITBUCKET_GID}" ]; then
|
||||
error "Both BITBUCKET_UID and BITBUCKET_GID must be set in '${BACKUP_VARS_FILE}'"
|
||||
bail "See 'bitbucket.diy-backup.vars.sh.example' for the defaults."
|
||||
fi
|
||||
|
||||
# # Ensure we know which user:group things should be owned as
|
||||
# if [ -z "${BITBUCKET_UID}" -o -z "${BITBUCKET_GID}" ]; then
|
||||
# error "Both BITBUCKET_UID and BITBUCKET_GID must be set in '${BACKUP_VARS_FILE}'"
|
||||
# bail "See 'bitbucket.diy-backup.vars.sh.example' for the defaults."
|
||||
# fi
|
||||
|
||||
check_command "jq"
|
||||
|
||||
@ -47,13 +44,15 @@ info "Preparing for restore"
|
||||
|
||||
prepare_restore_disk "${1}"
|
||||
prepare_restore_db "${1}"
|
||||
prepare_restore_elasticsearch "${1}"
|
||||
|
||||
copy_archive_to_tmp
|
||||
|
||||
if [ -n "${BACKUP_ARCHIVE_TYPE}" ]; then
|
||||
restore_archive
|
||||
fi
|
||||
|
||||
info "Restoring disk (home directory and data stores) and database"
|
||||
|
||||
info "Restoring disk and database"
|
||||
|
||||
# Restore the filesystem
|
||||
restore_disk "${1}"
|
||||
@ -61,9 +60,6 @@ restore_disk "${1}"
|
||||
# Restore the database
|
||||
restore_db
|
||||
|
||||
# Restore Elasticsearch data
|
||||
restore_elasticsearch
|
||||
|
||||
success "Successfully completed the restore of your ${PRODUCT} instance"
|
||||
|
||||
if [ -n "${FINAL_MESSAGE}" ]; then
|
||||
|
||||
22
dbase.sh
22
dbase.sh
@ -13,7 +13,7 @@ if [[ ${psql_majorminor} -ge 9003 ]]; then
|
||||
fi
|
||||
|
||||
function prepare_backup_db {
|
||||
check_config_var "CONFLUENCE_BACKUP_DB"
|
||||
check_config_var "CONFLUENCE_BACKUP_DB_TMP"
|
||||
check_config_var "POSTGRES_USERNAME"
|
||||
check_config_var "POSTGRES_HOST"
|
||||
check_config_var "POSTGRES_PORT"
|
||||
@ -21,10 +21,10 @@ function prepare_backup_db {
|
||||
}
|
||||
|
||||
function backup_db {
|
||||
[ -d "${CONFLUENCE_BACKUP_DB}" ] && rm -r "${CONFLUENCE_BACKUP_DB}"
|
||||
mkdir -p "${CONFLUENCE_BACKUP_DB}"
|
||||
run pg_dump -U "${POSTGRES_USERNAME}" -h "${POSTGRES_HOST}" --port=${POSTGRES_PORT} ${PG_PARALLEL} -Fd \
|
||||
-d "${CONFLUENCE_DB}" ${PG_SNAPSHOT_OPT} -f "${CONFLUENCE_BACKUP_DB}"
|
||||
[ -d "${CONFLUENCE_BACKUP_DB_TMP}" ] && rm -r "${CONFLUENCE_BACKUP_DB_TMP}"
|
||||
mkdir -p "${CONFLUENCE_BACKUP_DB_TMP}"
|
||||
run pg_dump -U "${POSTGRES_USERNAME}" -h "${POSTGRES_HOST}" --port=${POSTGRES_PORT} ${PG_PARALLEL} -Fd -d "${CONFLUENCE_DB}" ${PG_SNAPSHOT_OPT} -f "${CONFLUENCE_BACKUP_DB_TMP}"
|
||||
perform_rsync_compress_db
|
||||
}
|
||||
|
||||
function prepare_restore_db {
|
||||
@ -46,15 +46,21 @@ function prepare_restore_db {
|
||||
|
||||
function restore_db {
|
||||
run pg_restore -U "${POSTGRES_USERNAME}" -h "${POSTGRES_HOST}" --port=${POSTGRES_PORT} ${PG_PARALLEL} \
|
||||
-d postgres -C -Fd "${CONFLUENCE_RESTORE_DB}"
|
||||
-d postgres -C -Ft "${CONFLUENCE_RESTORE_DB}"
|
||||
}
|
||||
|
||||
function cleanup_incomplete_db_backup {
|
||||
info "Cleaning up DB backup created as part of failed/incomplete backup"
|
||||
rm -r "${CONFLUENCE_BACKUP_DB}"
|
||||
rm -r "${CONFLUENCE_BACKUP_DB_TMP}"
|
||||
}
|
||||
|
||||
function cleanup_old_db_backups {
|
||||
# Not required as old backups with this strategy are typically cleaned up in the archiving strategy.
|
||||
no_op
|
||||
}
|
||||
}
|
||||
|
||||
function perform_rsync_compress_db {
|
||||
[ -d "${CONFLUENCE_BACKUP_DB}" ] && rm -r "${CONFLUENCE_BACKUP_DB}"
|
||||
mkdir -p "${CONFLUENCE_BACKUP_DB}"
|
||||
rsync --remove-source-files -h $CONFLUENCE_BACKUP_DB_TMP/* $CONFLUENCE_BACKUP_DB/
|
||||
}
|
||||
|
||||
17
func.sh
17
func.sh
@ -47,4 +47,21 @@ function cleanup_incomplete_backup {
|
||||
;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
function perform_cleanup_tmp {
|
||||
rm -rf $CONFLUENCE_TMP
|
||||
:
|
||||
}
|
||||
|
||||
function run_in_bg {
|
||||
($1) &
|
||||
local PID=$!
|
||||
BG_JOBS["$2"]=${PID}
|
||||
debug "Started $2 (PID=${PID})"
|
||||
}
|
||||
|
||||
function backup_start {
|
||||
mkdir -p $CONFLUENCE_TMP
|
||||
:
|
||||
}
|
||||
21
lvm.sh
21
lvm.sh
@ -31,10 +31,10 @@ function backup_disk {
|
||||
vg=$(lvs | grep $snapshot_name | cut -d" " -f4)
|
||||
snap_volume=/dev/$vg/$snapshot_name
|
||||
|
||||
mount -onouuid,ro $snap_volume /data1/snapshot
|
||||
mount -onouuid,ro $snap_volume ${CONFLUENCE_HOME_SNAP}
|
||||
|
||||
# Create new variable to define source of backup as snapshot
|
||||
CONFLUENCE_HOME_SNAP=/data1/snapshot/CONFLUENCE-home/
|
||||
# CONFLUENCE_HOME_SNAP=/data2/snapshot/CONFLUENCE-home/
|
||||
|
||||
# rsync home from snapshot
|
||||
# perform_rsync_home_directory
|
||||
@ -43,23 +43,16 @@ function backup_disk {
|
||||
perform_rsync_compress_data
|
||||
|
||||
# unmount and remove lvm snapshot
|
||||
umount /data1/snapshot
|
||||
umount ${CONFLUENCE_HOME_SNAP}
|
||||
lvremove -f $snap_volume
|
||||
|
||||
}
|
||||
|
||||
function perform_compress_data {
|
||||
|
||||
# Globals
|
||||
backupDir="/backup/confluence"
|
||||
pgdump="pg_dump"
|
||||
|
||||
# Backup target directories
|
||||
backupDirDaily="$backupDir/$day_new_format"
|
||||
day_new_format=$(date +%Y-%m-%d)
|
||||
tar -czPf $backupDirDaily/$day_new_format.tgz /data1/snapshot
|
||||
tar -czPf $CONFLUENCE_TMP/$day_new_format.tgz $CONFLUENCE_HOME_SNAP
|
||||
}
|
||||
|
||||
function perform_rsync_compress_data {
|
||||
rsync -avh --progress $backupDirDaily/$day_new_format.tgz /backup/confluence
|
||||
}
|
||||
rsync --remove-source-files -h $CONFLUENCE_TMP/$day_new_format.tgz $CONFLUENCE_BACKUP_HOME/
|
||||
}
|
||||
|
||||
|
||||
1
utils.sh
1
utils.sh
@ -193,3 +193,4 @@ function hc_announce {
|
||||
! curl ${CURL_OPTIONS} -X POST -H "Content-Type: application/json" -d "${hipchat_payload}" "${hipchat_url}"
|
||||
true
|
||||
}
|
||||
|
||||
|
||||
24
vars.sh
24
vars.sh
@ -1,18 +1,28 @@
|
||||
#vars
|
||||
INSTANCE_NAME=confluence
|
||||
CONFLUENCE_UID=confluence
|
||||
CONFLUENCE_GID=confluence
|
||||
CONFLUENCE_UID=root
|
||||
CONFLUENCE_GID=root
|
||||
BACKUP_DISK_TYPE=lvm
|
||||
BACKUP_DATABASE_TYPE=postgresql
|
||||
BACKUP_ARCHIVE_TYPE=tar
|
||||
DATE_TIMESTAMP=$(date --iso-8601)
|
||||
|
||||
CONFLUENCE_HOME=/data2/confluence
|
||||
CONFLUENCE_BACKUP_HOME=/backup/confluence
|
||||
CONFLUENCE_HOME=/data1/confluence
|
||||
CONFLUENCE_TMP=/tmp/confluence-backup
|
||||
CONFLUENCE_BACKUP_DB_TMP=/tmp/confluence-backup/db
|
||||
CONFLUENCE_BACKUP_ROOT=/backup/confluence
|
||||
CONFLUENCE_BACKUP_ARCHIVE_ROOT=/backup/confluence
|
||||
CONFLUENCE_RESTORE_ROOT=/tmp/confluence-restore
|
||||
|
||||
CONFLUENCE_HOME_SNAP=/data1/snapshot
|
||||
CONFLUENCE_HOME_SNAP_DATA=${CONFLUENCE_HOME_SNAP}/confluence
|
||||
|
||||
BITBUCKET_VERBOSE_BACKUP="false"
|
||||
|
||||
#db
|
||||
CONFLUENCE_BACKUP_DB=${CONFLUENCE_BACKUP_ROOT}/CONFLUENCE-db/
|
||||
CONFLUENCE_BACKUP_HOME=${CONFLUENCE_BACKUP_ROOT}/CONFLUENCE-home/
|
||||
CONFLUENCE_BACKUP_DATA_STORES=${CONFLUENCE_BACKUP_ROOT}/CONFLUENCE-data-stores/
|
||||
CONFLUENCE_BACKUP_DB=${CONFLUENCE_BACKUP_ROOT}/${DATE_TIMESTAMP}/CONFLUENCE-db
|
||||
CONFLUENCE_BACKUP_HOME=${CONFLUENCE_BACKUP_ROOT}/${DATE_TIMESTAMP}/CONFLUENCE-home
|
||||
|
||||
CONFLUENCE_DB=confluence
|
||||
POSTGRES_HOST=localhost
|
||||
POSTGRES_USERNAME=database1user
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user