Amazon EC2
aws ec2 run-instances
--image-id ami-12345678
--instance-type t3.micro
--user-data file://script.yamlSets up automated backups using rsync and cron
#cloud-config
package_update: true
package_upgrade: true
packages:
- rsync
- cron
- gzip
- tar
write_files:
- path: /usr/local/bin/backup-script.sh
content: |
#!/bin/bash
# Backup configuration
BACKUP_DIR="/opt/backups"
REMOTE_HOST="backup.example.com"
REMOTE_USER="backup"
REMOTE_PATH="/backups/$(hostname)"
DATE=$(date +%Y%m%d_%H%M%S)
# Create backup directory
mkdir -p "$BACKUP_DIR"
# Backup system files
tar -czf "$BACKUP_DIR/system_$DATE.tar.gz" \
/etc \
/home \
/var/www \
/opt \
--exclude="/opt/backups" \
2>/dev/null
# Backup databases (if MySQL is installed)
if command -v mysqldump >/dev/null 2>&1; then
mysqldump --all-databases --single-transaction > "$BACKUP_DIR/mysql_$DATE.sql"
gzip "$BACKUP_DIR/mysql_$DATE.sql"
fi
# Sync to remote server (uncomment and configure)
# rsync -avz --delete "$BACKUP_DIR/" "$REMOTE_USER@$REMOTE_HOST:$REMOTE_PATH/"
# Clean old backups (keep last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -mtime +7 -delete
find "$BACKUP_DIR" -name "*.sql.gz" -mtime +7 -delete
echo "Backup completed: $(date)"
permissions: '0755'
- path: /etc/cron.d/backup
content: |
# Run backup daily at 2 AM
0 2 * * * root /usr/local/bin/backup-script.sh >> /var/log/backup.log 2>&1
runcmd:
- mkdir -p /opt/backups
- systemctl enable cron
- systemctl start cronaws ec2 run-instances
--image-id ami-12345678
--instance-type t3.micro
--user-data file://script.yamldoctl compute droplet create
--image ubuntu-22-04-x64
--size s-1vcpu-1gb
--user-data-file script.yaml
my-dropletgcloud compute instances create
my-instance
--metadata-from-file
user-data=script.yaml