mirror of
https://github.com/andsyrovatko/s4k-psql-db-backuper.git
synced 2026-04-21 22:18:52 +02:00
Initial release: PSQL DB dump NFS backup automation
This commit is contained in:
+197
@@ -0,0 +1,197 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# Script Name : db_backuper.sh
|
||||
# Description : Backup PostgreSQL database called by cron or manually.
|
||||
# Usage : ./db_backuper
|
||||
# Author : syr4ok (Andrii Syrovatko)
|
||||
# Version : 2.1.3r
|
||||
# =============================================================================
|
||||
|
||||
# Stop script on pipeline errors
|
||||
set -o pipefail
|
||||
|
||||
PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
|
||||
# --- LOADING CONFIGURATION ---
|
||||
DATE_NOW=$(date +%y%m%d)
|
||||
CONFIG_FILE="$(dirname "$0")/db_backuper.conf"
|
||||
if [[ -f "$CONFIG_FILE" ]]; then
|
||||
# shellcheck source=/dev/null
|
||||
source "$CONFIG_FILE"
|
||||
else
|
||||
echo "Error: Configuration file not found. Create db_backuper.conf from example."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- DEPENDENCY CHECK ---
|
||||
# Check critical tool (if not present - exit)
|
||||
if ! command -v "${PSQL_COMMAND}" &> /dev/null; then
|
||||
PSQL_COMMAND=$(which pg_basebackup 2>/dev/null)
|
||||
if [ -z "$PSQL_COMMAND" ]; then
|
||||
echo "❌ Critical Error: pg_basebackup not found! Backup impossible."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
# Checking optional tools (Curl (for Telegram) / Mail)
|
||||
HAS_CURL=true
|
||||
HAS_MAIL=true
|
||||
command -v curl &> /dev/null || HAS_CURL=false
|
||||
command -v mail &> /dev/null || HAS_MAIL=false
|
||||
|
||||
# --- Main functions ---
|
||||
log_divider() {
|
||||
local LABEL=$1
|
||||
echo "----------------------- $LABEL $(date +%Y/%m/%d-%H:%M) $LABEL -----------------------"
|
||||
}
|
||||
|
||||
send_tg() {
|
||||
if [ "$HAS_CURL" = true ]; then
|
||||
/usr/bin/curl -s -X POST "https://api.telegram.org/bot${TG_BOT_ID}/sendMessage" -d chat_id=${TG_CHAT_ID} -d text="[${S_HOSTNAME}]: $1"
|
||||
else
|
||||
echo "⚠️ Warning: Telegram report skipped (curl not installed)." | tee -a "${CACHE_FILE}"
|
||||
fi
|
||||
}
|
||||
|
||||
send_report() {
|
||||
local STATUS=$1
|
||||
local EMOJI="💾"
|
||||
[ "$STATUS" == "ERROR" ] && EMOJI="❌"
|
||||
|
||||
log_divider "END" >> "${CACHE_FILE}"
|
||||
if [ "$HAS_MAIL" = true ]; then
|
||||
local SUBJECT
|
||||
SUBJECT="[${S_HOSTNAME}] ${EMOJI} ${STATUS} DB Backup Info - $(date +%Y/%m/%d-%H:%M)"
|
||||
cat "${CACHE_FILE}" | mail -s "$SUBJECT" "$MAIL_SENDER" "$MAIL_RECEIVER"
|
||||
else
|
||||
echo "⚠️ Warning: Email report skipped (mailutils not installed)." | tee -a "${CACHE_FILE}"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# --- BLOCKING THE SCRIPT (only one working process per time)---
|
||||
exec 200>"$LOCK_FILE"
|
||||
if ! flock -n 200; then
|
||||
msg="❌ The script is already running in another process. Exit!"
|
||||
echo "$msg" | tee -a "${CACHE_FILE}"
|
||||
send_tg "$msg"
|
||||
send_report "ERROR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- START ---
|
||||
|
||||
# 1. Preparing the log file
|
||||
[ ! -f "${CACHE_FILE}" ] && touch "${CACHE_FILE}" && chmod 660 "${CACHE_FILE}"
|
||||
log_divider "START" > "${CACHE_FILE}"
|
||||
|
||||
# 2. Checking and mounting NFS (only if IS_LOCAL_BACKUP=false)
|
||||
DB_DIR="$MNT_POINT/psql_db_$DATE_NOW"
|
||||
TMP_MSG="locally"
|
||||
if [ "$IS_LOCAL_BACKUP" = false ]; then
|
||||
TMP_MSG="on NFS"
|
||||
if ! command -v mount.nfs &> /dev/null; then
|
||||
msg="❌ Critical Error: nfs-common (mount.nfs) is not installed! Remote backup impossible."
|
||||
echo "$msg" | tee -a "${CACHE_FILE}"
|
||||
send_tg "$msg"
|
||||
send_report "ERROR"
|
||||
exit 1
|
||||
fi
|
||||
if ! mountpoint -q "$MNT_POINT"; then
|
||||
echo "Attempting to mount NFS..." >> "${CACHE_FILE}"
|
||||
if ! mount -t nfs "${NFS_SERVER_IP}:${NFS_SERVER_DIR}" "$MNT_POINT" -o soft,timeo=30,retrans=2; then
|
||||
msg="❌ NFS Mount Failed! Server ${NFS_SERVER_IP} unreachable."
|
||||
echo "$msg" | tee -a "${CACHE_FILE}"
|
||||
send_tg "$msg"
|
||||
send_report "ERROR"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# 3. Check for duplicate directory
|
||||
if [ -d "${DB_DIR}" ]; then
|
||||
msg="⚠️ DB backup stopped. Today's dir ($DB_DIR) already exists!"
|
||||
echo "$msg" | tee -a "${CACHE_FILE}"
|
||||
send_tg "$msg"
|
||||
send_report "ERROR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 4. Creating a directory
|
||||
if [ "$DEBUG" -eq 1 ]; then
|
||||
echo "DEBUG: [DRY RUN] Skipping directory creation: ${DB_DIR}" >> "${CACHE_FILE}"
|
||||
else
|
||||
if ! mkdir -p "${DB_DIR}"; then
|
||||
msg="❌ Failed to create directory ${DB_DIR} ${TMP_MSG}."
|
||||
echo "$msg" | tee -a "${CACHE_FILE}"
|
||||
send_tg "$msg"
|
||||
send_report "ERROR"
|
||||
exit 1
|
||||
fi
|
||||
chown postgres:postgres "${DB_DIR}"
|
||||
fi
|
||||
|
||||
# 5. Starting a backup process
|
||||
BACKUP_SUCCESS=false
|
||||
{
|
||||
[ "$DEBUG" -eq 1 ] && echo "--- DEBUG MODE ON (DRY RUN) ---"
|
||||
echo "Backup DB STARTED at $(date +%Y/%m/%d-%H:%M)"
|
||||
} >> "${CACHE_FILE}"
|
||||
|
||||
if [ "$DEBUG" -eq 1 ]; then
|
||||
echo "DEBUG: Skipping real pg_basebackup command..." >> "${CACHE_FILE}"
|
||||
BACKUP_SUCCESS=true # True - to read old files.
|
||||
else
|
||||
if [ "$EXTENDED_BACK_STATUS" = true ]; then
|
||||
EXT_STATUS_MSG='-P'
|
||||
else
|
||||
EXT_STATUS_MSG=''
|
||||
fi
|
||||
|
||||
echo -e "Using command for DB dump:\ncd /tmp/ && sudo -u ${PSQL_USER} ${PSQL_COMMAND} -p ${PSQL_PORT} -D ${DB_DIR} --checkpoint=${PSQL_CHECKPOINT} -Ft -z ${EXT_STATUS_MSG} 2>&1" | tee -a "${CACHE_FILE}"
|
||||
|
||||
if cd /tmp/ && sudo -u "${PSQL_USER}" "${PSQL_COMMAND}" -p "${PSQL_PORT}" -D "${DB_DIR}" --checkpoint="${PSQL_CHECKPOINT}" -Ft -z ${EXT_STATUS_MSG} 2>&1 | tee -a "${CACHE_FILE}"; then
|
||||
# Checking whether the file was actually created (additional security measure)
|
||||
if [ -d "${DB_DIR}" ]; then
|
||||
BACKUP_SUCCESS=true
|
||||
sync
|
||||
DUMP_SIZE=$(du -sh "${DB_DIR}" 2>/dev/null | cut -f1)
|
||||
echo -e "Files synced and compressed!\nDB dump size: ${DUMP_SIZE}" | tee -a "${CACHE_FILE}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# 6. Cleaning up old backups
|
||||
if [ "$BACKUP_SUCCESS" = true ]; then
|
||||
echo "Cleaning old backups ${TMP_MSG} (Retention: ${RETENTION_DAYS} days)..." | tee -a "${CACHE_FILE}"
|
||||
|
||||
mapfile -t OLD_BACKUPS < <(find "${MNT_POINT}" -mindepth 1 -maxdepth 1 -name "psql_db*" -mtime +"${RETENTION_DAYS}" -print)
|
||||
|
||||
if [ ${#OLD_BACKUPS[@]} -gt 0 ]; then
|
||||
echo "Found ${#OLD_BACKUPS[@]} old backup(s) for deletion:" | tee -a "${CACHE_FILE}"
|
||||
for dir in "${OLD_BACKUPS[@]}"; do
|
||||
if [ "$DEBUG" -eq 1 ]; then
|
||||
echo "DEBUG: [DRY RUN] Would delete: $dir" | tee -a "${CACHE_FILE}"
|
||||
else
|
||||
echo "Deleting: $dir" | tee -a "${CACHE_FILE}"
|
||||
rm -rfv "$dir" | tee -a "${CACHE_FILE}" 2>&1
|
||||
fi
|
||||
done
|
||||
echo "Cleanup finished." | tee -a "${CACHE_FILE}"
|
||||
else
|
||||
echo "No old backups found older than ${RETENTION_DAYS} days." | tee -a "${CACHE_FILE}"
|
||||
fi
|
||||
|
||||
sync
|
||||
send_report "SUCCESS"
|
||||
else
|
||||
msg="❌ Backup process failed!"
|
||||
echo "$msg" | tee -a "${CACHE_FILE}"
|
||||
send_tg "$msg"
|
||||
send_report "ERROR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# 7. Complete and exit
|
||||
exit 0
|
||||
Reference in New Issue
Block a user