Skip to content

Canton | Backup

#Go to dir
cd $HOME/splice-node/docker-compose/validator
#backup data (save result)
token=$(python get-token.py ledger-api-user)
curl "http://wallet.localhost/api/validator/v0/admin/participant/identities" -H "Authorization: Bearer $token"
2. Backup Postgres Data (Always backup, at least every 4 hours)
Section titled “2. Backup Postgres Data (Always backup, at least every 4 hours)”
mkdir $HOME/canton-backup
backup_dir=$HOME/canton-backup
docker exec -i splice-validator-postgres-splice-1 pg_dump -U cnadmin validator > "${backup_dir}"/validator-"$(date -u +"%Y-%m-%dT%H:%M:%S%:z")".dump
active_participant_db=$(docker exec splice-validator-participant-1 bash -c 'echo $CANTON_PARTICIPANT_POSTGRES_DB')
docker exec splice-validator-postgres-splice-1 pg_dump -U cnadmin "${active_participant_db}" > "${backup_dir}"/"${active_participant_db}"-"$(date -u +"%Y-%m-%dT%H:%M:%S%:z")".dump
#Check backup file
cd $HOME/canton-backup
ls -lah
sudo apt install rsync -y
cat > ~/backup_canton.sh << 'EOF'
#!/bin/bash
# ============================================
# Canton Validator Backup Script
# ============================================
# Set PATH eksplisit for crontab
export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
# Set HOME eksplisit for crontab
export HOME=/root
VALIDATOR_NAME="your_val_name"
BACKUP_DIR="/root/canton-backup/${VALIDATOR_NAME}"
REMOTE_USER="username"
REMOTE_HOST="ip-server-backup"
REMOTE_DIR="/backup/canton/${VALIDATOR_NAME}"
SSH_KEY="/root/.ssh/canton_backup"
KEEP_LAST=2
DOCKER="/usr/bin/docker"
RSYNC="/usr/bin/rsync"
SSH="/usr/bin/ssh"
mkdir -p "$BACKUP_DIR"
TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%S")
LOG="$BACKUP_DIR/backup.log"
echo "[$TIMESTAMP] Starting backup for ${VALIDATOR_NAME}..." | tee -a "$LOG"
# ============================================
# 1. Check docker is accessible
# ============================================
if ! $DOCKER ps > /dev/null 2>&1; then
echo "[$TIMESTAMP] ERROR: Docker cannot be accessed" | tee -a "$LOG"
exit 1
fi
# ============================================
# 2. Backup validator database
# ============================================
VALIDATOR_FILE="$BACKUP_DIR/validator-${TIMESTAMP}.dump"
$DOCKER exec splice-validator-postgres-splice-1 \
pg_dump -U cnadmin validator > "$VALIDATOR_FILE"
if [ $? -eq 0 ] && [ -s "$VALIDATOR_FILE" ]; then
echo "[$TIMESTAMP] validator backup OK: $(du -sh $VALIDATOR_FILE | cut -f1)" | tee -a "$LOG"
else
echo "[$TIMESTAMP] ERROR: backup validator FAILED or empty file" | tee -a "$LOG"
rm -f "$VALIDATOR_FILE"
exit 1
fi
# ============================================
# 3. Backup participant database
# ============================================
ACTIVE_DB=$($DOCKER exec splice-validator-participant-1 \
bash -c 'echo $CANTON_PARTICIPANT_POSTGRES_DB')
PARTICIPANT_FILE="$BACKUP_DIR/${ACTIVE_DB}-${TIMESTAMP}.dump"
$DOCKER exec splice-validator-postgres-splice-1 \
pg_dump -U cnadmin "$ACTIVE_DB" > "$PARTICIPANT_FILE"
if [ $? -eq 0 ] && [ -s "$PARTICIPANT_FILE" ]; then
echo "[$TIMESTAMP] participant backup OK: $(du -sh $PARTICIPANT_FILE | cut -f1)" | tee -a "$LOG"
else
echo "[$TIMESTAMP] ERROR: participant backup FAILED or empty file" | tee -a "$LOG"
rm -f "$PARTICIPANT_FILE"
exit 1
fi
# ============================================
# 4. Delete old local backups
# ============================================
echo "[$TIMESTAMP] Cleaning old local backups..." | tee -a "$LOG"
ls -t "$BACKUP_DIR"/validator-*.dump 2>/dev/null | \
tail -n +$((KEEP_LAST + 1)) | xargs -r rm -v >> "$LOG" 2>&1
ls -t "$BACKUP_DIR"/"${ACTIVE_DB}"-*.dump 2>/dev/null | \
tail -n +$((KEEP_LAST + 1)) | xargs -r rm -v >> "$LOG" 2>&1
# ============================================
# 5. Send to remote server
# ============================================
echo "[$TIMESTAMP] Sending to ${REMOTE_HOST}:${REMOTE_DIR}..." | tee -a "$LOG"
$SSH -i "$SSH_KEY" -o StrictHostKeyChecking=no \
${REMOTE_USER}@${REMOTE_HOST} \
"mkdir -p ${REMOTE_DIR}" >> "$LOG" 2>&1
$RSYNC -avz \
-e "$SSH -i $SSH_KEY -o StrictHostKeyChecking=no" \
"$VALIDATOR_FILE" \
"$PARTICIPANT_FILE" \
"${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_DIR}/" >> "$LOG" 2>&1
if [ $? -eq 0 ]; then
echo "[$TIMESTAMP] Remote sync OK" | tee -a "$LOG"
# Delete old backups on remote
$SSH -i "$SSH_KEY" -o StrictHostKeyChecking=no \
${REMOTE_USER}@${REMOTE_HOST} "
ls -t ${REMOTE_DIR}/validator-*.dump 2>/dev/null | \
tail -n +$((KEEP_LAST + 1)) | xargs -r rm -v
ls -t ${REMOTE_DIR}/${ACTIVE_DB}-*.dump 2>/dev/null | \
tail -n +$((KEEP_LAST + 1)) | xargs -r rm -v
" >> "$LOG" 2>&1
else
echo "[$TIMESTAMP] WARNING: Remote sync FAILED" | tee -a "$LOG"
fi
echo "[$TIMESTAMP] Done. Local files:" | tee -a "$LOG"
ls -lh "$BACKUP_DIR"/*.dump 2>/dev/null | awk '{print $5, $9}' | tee -a "$LOG"
echo "---" >> "$LOG"
EOF
chmod +x ~/backup_canton.sh
ssh-keygen -t ed25519 -C "canton-backup" -f ~/.ssh/canton_backup -N ""
ssh-copy-id -i ~/.ssh/canton_backup.pub user@server-ip
ssh -i ~/.ssh/canton_backup user@server-ip "echo SSH OK"
4. Add to crontab (auto backup every 4 hours)
Section titled “4. Add to crontab (auto backup every 4 hours)”
crontab -e
0 */4 * * * /root/backup_canton.sh >> /root/canton-backup/cron.log 2>&1
env -i HOME=/root PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin bash ~/backup_canton.sh
nano /opt/sync/oauth/backup-keycloak.sh
#!/bin/bash
# ============================================
# Keycloak Backup Script
# Local + Remote
# ============================================
# Fix PATH for crontab
PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
# --- Config ---
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_DIR="/opt/sync/oauth/backup"
CONTAINER_NAME="canton-keycloak"
VOLUME_NAME="oauth_keycloak_data"
REMOTE_USER="root"
REMOTE_HOST="ip-server"
REMOTE_DIR="/backup/keycloak/party_hint"
SSH_KEY="/root/.ssh/keycloak_backup"
RETENTION_LOCAL=7 # local backup save day
RETENTION_REMOTE=14 # remote backup save day
LOG="/var/log/keycloak-backup.log"
# --- Function log ---
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG"
}
# --- Dependency validation ---
for cmd in docker rsync ssh; do
if ! command -v $cmd &>/dev/null; then
log "ERROR: $cmd not found. Abort."
exit 1
fi
done
# --- Validate running container ---
if ! docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
log "ERROR: Container $CONTAINER_NAME not working. Abort."
exit 1
fi
# --- Validasi SSH key ---
if [ ! -f "$SSH_KEY" ]; then
log "ERROR: SSH key not found in $SSH_KEY. Abort."
exit 1
fi
log "====== START BACKUP $TIMESTAMP ======"
# ============================================
# STEP 1: Export Realm JSON
# ============================================
log "[1/5] Exporting realm data..."
TEMP_EXPORT="/tmp/kc-backup-$TIMESTAMP"
docker exec "$CONTAINER_NAME" /opt/keycloak/bin/kc.sh export \
--dir "$TEMP_EXPORT" \
--users realm_file >> "$LOG" 2>&1
if [ $? -ne 0 ]; then
log "ERROR: Realm export gagal!"
exit 1
fi
mkdir -p "$BACKUP_DIR/realm_$TIMESTAMP"
docker cp "$CONTAINER_NAME:$TEMP_EXPORT/." "$BACKUP_DIR/realm_$TIMESTAMP/" >> "$LOG" 2>&1
docker exec "$CONTAINER_NAME" rm -rf "$TEMP_EXPORT"
log "Realm export OK → $BACKUP_DIR/realm_$TIMESTAMP/"
# ============================================
# STEP 2: Backup Volume H2
# ============================================
log "[2/5] Backing up H2 volume..."
docker run --rm \
-v "${VOLUME_NAME}:/data" \
-v "${BACKUP_DIR}:/backup" \
alpine tar czf "/backup/keycloak_volume_${TIMESTAMP}.tar.gz" -C /data . >> "$LOG" 2>&1
if [ $? -ne 0 ]; then
log "ERROR: Volume backup failed!"
exit 1
fi
log "Volume backup OK → $BACKUP_DIR/keycloak_volume_${TIMESTAMP}.tar.gz"
# ============================================
# STEP 3: Transfer to Remote Server
# ============================================
log "[3/5] Transferring to remote $REMOTE_HOST..."
# Make sure the remote folder exists
ssh -i "$SSH_KEY" \
-o StrictHostKeyChecking=no \
-o ConnectTimeout=30 \
"$REMOTE_USER@$REMOTE_HOST" \
"mkdir -p $REMOTE_DIR" >> "$LOG" 2>&1
if [ $? -ne 0 ]; then
log "ERROR: Cannot connect to remote server $REMOTE_HOST!"
exit 1
fi
# Transfer realm JSON
rsync -avz \
-e "ssh -i $SSH_KEY -o StrictHostKeyChecking=no -o ConnectTimeout=30" \
"$BACKUP_DIR/realm_$TIMESTAMP/" \
"$REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR/realm_$TIMESTAMP/" >> "$LOG" 2>&1
# Transfer volume tar.gz
rsync -avz \
-e "ssh -i $SSH_KEY -o StrictHostKeyChecking=no -o ConnectTimeout=30" \
"$BACKUP_DIR/keycloak_volume_${TIMESTAMP}.tar.gz" \
"$REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR/" >> "$LOG" 2>&1
if [ $? -eq 0 ]; then
log "Transfer OK → $REMOTE_HOST:$REMOTE_DIR"
else
log "ERROR: Transfer FAILED to remote server!"
exit 1
fi
# ============================================
# STEP 4: Cleanup Lokal
# ============================================
log "[4/5] Cleaning local backups older than $RETENTION_LOCAL days..."
find "$BACKUP_DIR" -name "keycloak_volume_*.tar.gz" -mtime +$RETENTION_LOCAL -delete
find "$BACKUP_DIR" -maxdepth 1 -name "realm_*" -type d -mtime +$RETENTION_LOCAL -exec rm -rf {} + 2>/dev/null
log "Local cleanup OK"
# ============================================
# STEP 5: Cleanup Remote
# ============================================
log "[5/5] Cleaning remote backups older than $RETENTION_REMOTE days..."
ssh -i "$SSH_KEY" \
-o StrictHostKeyChecking=no \
-o ConnectTimeout=30 \
"$REMOTE_USER@$REMOTE_HOST" \
"find $REMOTE_DIR -name 'keycloak_volume_*.tar.gz' -mtime +$RETENTION_REMOTE -delete 2>/dev/null; \
find $REMOTE_DIR -maxdepth 1 -name 'realm_*' -type d -mtime +$RETENTION_REMOTE -exec rm -rf {} + 2>/dev/null; \
echo 'Remote cleanup OK'" >> "$LOG" 2>&1
log "====== BACKUP DONE ======"
echo "" >> "$LOG"
chmod +x /opt/sync/oauth/backup-keycloak.sh
ssh-keygen -t ed25519 -C "keycloak-backup" -f ~/.ssh/keycloak_backup -N ""
ssh-copy-id -i ~/.ssh/keycloak_backup.pub root@ip-server
env -i HOME=/root /bin/bash /opt/sync/oauth/backup-keycloak.sh
ssh -i ~/.ssh/keycloak_backup root@ip-server "ls -lh /backup/keycloak/party_hint"
crontab -e
# Keycloak backup every 12 hours (06:00 & 18:00)
0 6,18 * * * /opt/sync/oauth/backup-keycloak.sh >> /var/log/keycloak-backup.log 2>&1

lastUpdated: 2026-03-18