This guide provides practical instructions for setting up, configuring, and managing automated database backups using ObjBackup - a robust backup framework for MariaDB/MySQL databases.
ObjBackup provides comprehensive backup functionality:
sudo apt install p7zip-full)INSERT INTO def_Backup (
Name,
Package,
Remote,
Tablemask,
Backupfolder,
Description
) VALUES (
'daily_full_backup',
'factory.core',
'PRIMARY', -- Use primary database from config.yaml
'*', -- Backup all tables
'local.documents/backup/daily',
'Daily full database backup'
);
# Stage backup jobs
dev-env/bin/python factory.core/ObjBackup.py prestage
# Execute all staged backups
dev-env/bin/python factory.core/ObjBackup.py backupall
# Or run specific backup
dev-env/bin/python factory.core/ObjBackup.py backup daily_full_backup
from ObjBackup import ObjBackup
# Initialize backup
backup = ObjBackup(0)
# Read backup configuration
backup.read_backup(
backupcode="daily_full_backup",
database="my_database"
)
# Execute backup
status, size = backup.pack(context={})
if status == "DONE":
print(f"Backup completed successfully: {size} bytes")
else:
print(f"Backup failed")
Control which tables to include in backups using Tablemask:
-- Backup all tables
Tablemask = '*'
-- Backup only definition tables
Tablemask = 'def_*'
-- Backup multiple patterns (comma-separated)
Tablemask = 'def_*,data_*,sys_*'
-- Exclude system databases (automatic)
-- Excludes: information_schema, performance_schema, mysql, sys
Backup only user data:
INSERT INTO def_Backup (
Name,
Package,
Remote,
Tablemask,
Backupfolder
) VALUES (
'user_data_backup',
'factory.core',
'PRIMARY',
'data_*', -- Only tables starting with 'data_'
'local.documents/backup/userdata'
);
Backup configuration tables:
INSERT INTO def_Backup (
Name,
Package,
Remote,
Tablemask,
Backupfolder
) VALUES (
'config_backup',
'factory.core',
'PRIMARY',
'def_*,sys_*', -- Definition and system tables
'local.documents/backup/config'
);
In config.yaml:
remote_databases:
PRODUCTION_DB:
host: "prod-db.example.com"
port: 3306
user: "backup_user"
password: "encrypted_password" # Encrypted with ObjEncryption
database: "production"
INSERT INTO def_Backup (
Name,
Package,
Remote,
Tablemask,
Backupfolder
) VALUES (
'production_backup',
'factory.core',
'PRODUCTION_DB', -- References remote_databases key in config.yaml
'*',
'local.documents/backup/production'
);
Set Database field to a pattern to auto-discover databases:
INSERT INTO def_Backup (
Name,
Package,
Remote,
Database, -- Discovery pattern
Tablemask,
Backupfolder
) VALUES (
'backup_all_customer_dbs',
'factory.core',
'CUSTOMER_SERVER',
'customer_%', -- Discovers all databases matching pattern
'def_*,data_*',
'local.documents/backup/customers'
);
# Edit crontab
crontab -e
# Add daily backup at 2 AM
0 2 * * * cd /path/to/axion && dev-env/bin/python factory.core/ObjBackup.py backupall
# Add weekly full backup on Sunday at 3 AM
0 3 * * 0 cd /path/to/axion && dev-env/bin/python factory.core/ObjBackup.py backup weekly_full
INSERT INTO def_scheduler (
Name,
Package,
Schedule,
Action,
ActionParams
) VALUES (
'daily_database_backup',
'factory.core',
'0 2 * * *', -- Daily at 2 AM
'BACKUP',
'{"backup_name": "daily_full_backup"}'
);
-- Define workflow node for backup
INSERT INTO def_workflows (
Workflow,
Package,
Name,
Rank,
NodeUp,
NodeUpExtra,
BranchDirect
) VALUES (
'nightly_maintenance',
'factory.core',
'backup_databases',
5,
'SERVICE',
'ObjBackup.BackupSet', -- Calls BackupSet service
'send_backup_report'
);
-- View recent backups
SELECT
Database,
Backupcode,
Status,
Filesize,
Created,
CONCAT(ROUND(Filesize / 1024 / 1024, 2), ' MB') as size_mb
FROM stage_backup
WHERE Created >= DATE_SUB(NOW(), INTERVAL 7 DAY)
ORDER BY Created DESC;
-- View backup success rate
SELECT
Backupcode,
COUNT(*) as total_backups,
SUM(CASE WHEN Status = 'DONE' THEN 1 ELSE 0 END) as successful,
SUM(CASE WHEN Status = 'ERROR' THEN 1 ELSE 0 END) as failed,
MAX(Created) as last_backup,
CONCAT(ROUND(AVG(Filesize) / 1024 / 1024, 2), ' MB') as avg_size
FROM stage_backup
GROUP BY Backupcode
ORDER BY last_backup DESC;
from ObjBackup import ObjBackup
# Query failed backups
failed_backups = db.query("""
SELECT Guid, Backupcode, Database
FROM stage_backup
WHERE Status = 'ERROR'
AND Created >= DATE_SUB(NOW(), INTERVAL 1 DAY)
""")
# Retry each failed backup
for backup_info in failed_backups:
backup = ObjBackup(0)
backup.read_backup(
guid=backup_info['Guid'],
backupcode=backup_info['Backupcode'],
database=backup_info['Database']
)
status, size = backup.pack(context={})
print(f"{backup_info['Backupcode']}: {status}")
Interactive archive (with prompt to drop database):
# Interactive selection
dev-env/bin/python factory.core/ObjBackup.py archive
# Specific database
dev-env/bin/python factory.core/ObjBackup.py archive my_database
Programmatic archive:
from ObjBackup import ObjBackup
backup = ObjBackup(0)
backup.archive("my_database")
# Creates: archive.documents/backup/my_database_YYYYMMDD_HHMMSS.7z
# Extract backup archive
7z x local.documents/backup/daily/my_database_20251226_020000.7z
# Restore to database
mysql -u root -p my_database < my_database_20251226_020000.sql
# Or restore with progress
pv my_database_20251226_020000.sql | mysql -u root -p my_database
# Extract backup
7z x backup_file.7z
# Create database if needed
mysql -u root -p -e "CREATE DATABASE my_database_restored;"
# Restore
mysql -u root -p my_database_restored < backup_file.sql
# Verify data
mysql -u root -p my_database_restored -e "SELECT COUNT(*) FROM important_table;"
Use param1-param9 for custom backup logic:
INSERT INTO def_Backup (
Name,
Package,
Remote,
Tablemask,
Backupfolder,
Param1, -- Custom retention days
Param2, -- Email notification address
Param3 -- Backup encryption key
) VALUES (
'secure_backup',
'factory.core',
'PRIMARY',
'*',
'local.documents/backup/secure',
'30', -- Param1: Retain for 30 days
'admin@company.com', -- Param2: Notification email
'ENCRYPTION_KEY_ID' -- Param3: Encryption key
);
Access parameters in code:
backup = ObjBackup(0)
backup.read_backup(backupcode="secure_backup", database="mydb")
retention_days = backup.param1 # "30"
notification_email = backup.param2 # "admin@company.com"
encryption_key = backup.param3 # "ENCRYPTION_KEY_ID"
Create factory module for custom logic:
# factory.backup/SecureBackupFactory.py
class SecureBackupFactory:
def __init__(self, backup_obj):
self.backup = backup_obj
def pre_backup(self):
"""Run before backup starts."""
print("Starting secure backup...")
# Custom pre-processing logic
def post_backup(self, backup_file):
"""Run after backup completes."""
# Encrypt backup file
self.encrypt_file(backup_file)
# Upload to cloud storage
self.upload_to_s3(backup_file)
# Send notification
self.notify_admin()
def encrypt_file(self, file_path):
# Implement encryption logic
pass
def upload_to_s3(self, file_path):
# Implement S3 upload
pass
def notify_admin(self):
# Send notification email
pass
Use factory in backup:
backup = ObjBackup(0)
backup.read_backup(backupcode="secure_backup", database="mydb")
# Load custom factory
factory = backup.factory_backup("SecureBackupFactory")
if factory:
factory.pre_backup()
status, size = backup.pack(context={})
if factory and status == "DONE":
factory.post_backup(backup.backup_file)
import os
def verify_backup(backup_file):
"""Verify backup file integrity."""
# Check file exists
if not os.path.exists(backup_file):
return False, "Backup file not found"
# Check minimum size (4KB)
size = os.path.getsize(backup_file)
if size < 4096:
return False, f"Backup too small: {size} bytes"
# Test 7z extraction
test_result = os.system(f"7z t {backup_file} > /dev/null 2>&1")
if test_result != 0:
return False, "7z archive corrupted"
return True, f"Backup valid: {size} bytes"
# Use verification
backup_file = "local.documents/backup/daily/mydb_20251226_020000.7z"
valid, message = verify_backup(backup_file)
print(f"Verification: {message}")
from ObjBackup import ObjBackup
import tempfile
import os
def test_backup_restore(backup_code, test_database):
"""Test backup and restore cycle."""
# Step 1: Create backup
backup = ObjBackup(0)
backup.read_backup(backupcode=backup_code, database=test_database)
status, size = backup.pack(context={})
if status != "DONE":
return False, "Backup failed"
# Step 2: Extract to temp directory
with tempfile.TemporaryDirectory() as tmpdir:
os.system(f"7z x {backup.backup_file} -o{tmpdir}")
# Step 3: Verify SQL file exists
sql_file = os.path.join(tmpdir, f"{test_database}.sql")
if not os.path.exists(sql_file):
return False, "SQL file not found in archive"
# Step 4: Test restore to temporary database
test_db = f"{test_database}_test"
os.system(f"mysql -e 'DROP DATABASE IF EXISTS {test_db}'")
os.system(f"mysql -e 'CREATE DATABASE {test_db}'")
restore_result = os.system(f"mysql {test_db} < {sql_file}")
if restore_result != 0:
return False, "Restore failed"
# Step 5: Verify data
# (Add specific data verification queries)
# Cleanup
os.system(f"mysql -e 'DROP DATABASE {test_db}'")
return True, f"Backup test successful: {size} bytes"
import os
from datetime import datetime, timedelta
def cleanup_old_backups(backup_folder, retention_days=30):
"""Delete backups older than retention period."""
cutoff_date = datetime.now() - timedelta(days=retention_days)
for filename in os.listdir(backup_folder):
if not filename.endswith('.7z'):
continue
file_path = os.path.join(backup_folder, filename)
file_time = datetime.fromtimestamp(os.path.getmtime(file_path))
if file_time < cutoff_date:
os.remove(file_path)
print(f"Deleted old backup: {filename}")
def implement_321_backup(backup_file):
"""Implement 3-2-1 backup strategy."""
# Copy 1: Local backup (already created)
# Copy 2: Secondary local storage
os.system(f"cp {backup_file} /mnt/backup_drive/")
# Copy 3: Offsite (cloud)
upload_to_s3(backup_file)
def upload_to_s3(file_path):
"""Upload backup to S3."""
# Implement S3 upload
pass
from ObjNotify import ObjNotify
def monitor_backups():
"""Send daily backup status report."""
# Query backup status
backups = query_backup_status()
# Generate report
report = generate_backup_report(backups)
# Send notification
notify = ObjNotify(0)
notify.Run(
notify_code="BACKUP_STATUS",
message_text=report
)
def generate_backup_report(backups):
"""Generate HTML backup status report."""
html = "<h2>Daily Backup Status</h2><table>"
html += "<tr><th>Database</th><th>Status</th><th>Size</th><th>Time</th></tr>"
for backup in backups:
status_color = "green" if backup['Status'] == 'DONE' else "red"
html += f"<tr>"
html += f"<td>{backup['Database']}</td>"
html += f"<td style='color:{status_color}'>{backup['Status']}</td>"
html += f"<td>{backup['size_mb']} MB</td>"
html += f"<td>{backup['Created']}</td>"
html += f"</tr>"
html += "</table>"
return html
# Secure backup file permissions
os.chmod(backup_file, 0o600) # Owner read/write only
# Encrypt sensitive backups
def encrypt_backup(backup_file, encryption_key):
"""Encrypt backup file."""
encrypted_file = f"{backup_file}.encrypted"
os.system(f"openssl enc -aes-256-cbc -salt -in {backup_file} -out {encrypted_file} -k {encryption_key}")
os.remove(backup_file) # Remove unencrypted
return encrypted_file
Check database user permissions:
GRANT SELECT, LOCK TABLES, SHOW VIEW, EVENT, TRIGGER
ON database_name.* TO 'backup_user'@'%';
FLUSH PRIVILEGES;
Solutions:
-- 1. Use table filtering
UPDATE def_Backup
SET Tablemask = 'def_*,data_*' -- Exclude large log tables
WHERE Name = 'daily_backup';
-- 2. Backup incrementally
-- Create separate backups for large tables
INSERT INTO def_Backup (Name, Package, Remote, Tablemask, Backupfolder)
VALUES ('large_tables_backup', 'factory.core', 'PRIMARY', 'log_*', 'local.documents/backup/logs');
Optimize performance:
# 1. Disable foreign key checks during restore
# Add to backup SQL:
# SET FOREIGN_KEY_CHECKS=0;
# ... data ...
# SET FOREIGN_KEY_CHECKS=1;
# 2. Use parallel compression
os.system(f"7z a -mmt4 {archive_file} {sql_file}") # 4 threads
# 3. Exclude unnecessary tables
Tablemask = 'def_*,data_*,!log_*,!temp_*' # Exclude logs and temp
Check character set:
# Extract and check SQL file encoding
7z x backup.7z
file -i backup.sql
# Convert if needed
iconv -f ISO-8859-1 -t UTF-8 backup.sql > backup_utf8.sql
# Restore with explicit charset
mysql --default-character-set=utf8mb4 mydb < backup.sql
from ObjBackup import BackupSet
from ObjNotify import ObjNotify
from datetime import datetime
import os
class EnterpriseBackupSystem:
def __init__(self):
self.backup_set = BackupSet(0)
self.notify = ObjNotify(0)
def run_daily_backups(self):
"""Execute daily backup routine."""
print(f"=== Starting Daily Backups: {datetime.now()} ===")
# Stage all backup jobs
self.backup_set.pre_stage()
# Execute backups
context = {}
result = self.backup_set.run_workflow_direct(context)
# Verify backups
verification_results = self.verify_all_backups()
# Cleanup old backups
self.cleanup_expired_backups(retention_days=30)
# Send status report
self.send_backup_report(verification_results)
print("=== Backup Routine Complete ===")
return result
def verify_all_backups(self):
"""Verify all recent backups."""
results = {}
# Query today's backups
backups = self.get_todays_backups()
for backup in backups:
backup_file = self.get_backup_file_path(backup)
valid, message = verify_backup(backup_file)
results[backup['Database']] = {
"valid": valid,
"message": message,
"size": backup['Filesize']
}
return results
def cleanup_expired_backups(self, retention_days=30):
"""Remove backups older than retention period."""
backup_folders = [
"local.documents/backup/daily",
"local.documents/backup/weekly"
]
for folder in backup_folders:
cleanup_old_backups(folder, retention_days)
def send_backup_report(self, verification_results):
"""Send backup status notification."""
report = self.format_backup_report(verification_results)
self.notify.Run(
notify_code="BACKUP_DAILY_REPORT",
message_text=report
)
def format_backup_report(self, results):
"""Format backup report as HTML."""
total = len(results)
successful = sum(1 for r in results.values() if r['valid'])
html = f"<h2>Backup Report - {datetime.now().strftime('%Y-%m-%d')}</h2>"
html += f"<p>Total Backups: {total} | Successful: {successful} | Failed: {total - successful}</p>"
html += "<table border='1'>"
html += "<tr><th>Database</th><th>Status</th><th>Size (MB)</th><th>Message</th></tr>"
for db, result in results.items():
status = "✓ SUCCESS" if result['valid'] else "✗ FAILED"
color = "green" if result['valid'] else "red"
size_mb = round(result['size'] / 1024 / 1024, 2)
html += f"<tr>"
html += f"<td>{db}</td>"
html += f"<td style='color:{color}'>{status}</td>"
html += f"<td>{size_mb}</td>"
html += f"<td>{result['message']}</td>"
html += f"</tr>"
html += "</table>"
return html
# Run enterprise backup system
if __name__ == "__main__":
backup_system = EnterpriseBackupSystem()
backup_system.run_daily_backups()