diff --git a/nas-gdrive-backup.sh b/nas-gdrive-backup.sh new file mode 100644 index 0000000..09433a6 --- /dev/null +++ b/nas-gdrive-backup.sh @@ -0,0 +1,258 @@ +#!/bin/bash + +# NAS Backup Script to Google Drive using rclone and 7zip +# For each folder on BACKUP_SOURCE, it gets the sha256 checksum of it, checks the checksum against the previous backup, and if it has changed, it creates a 7zip archive of the folder with encryption. +# It then uploads the archive to Google Drive using rclone. + +# Configuration +BACKUP_SOURCE="/export/Backup" +META_DIR="/export/Backup/.gdrive" +TMP_DIR="/export/Backup/.gdrive/tmp" +ZIP_PASSWORD="password" +GDRIVE_REMOTE="gdrive" +GDRIVE_PATH="/NAS-Backups" +ARCHIVE_NAME="backup.7z" +LOG_FILE="/var/log/nas-gdrive-backup.log" + +# Check for install flag +# Usage: curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/nas-gdrive-backup.sh | bash -s -- --install +if [[ "$1" == "--install" ]]; then + install_script + exit 0 +fi + +# Function for logging +log() { + echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" | tee -a "$LOG_FILE" +} + +clean_up() { + log "Cleaning up temporary files" + rm -rf "$TMP_DIR" +} + +trap clean_up EXIT + +# Check if 7z is installed +if ! which 7z; then + log "ERROR: 7z is not installed" + exit 1 +fi + +# Check if rclone is installed +if ! which rclone; then + log "ERROR: rclone is not installed" + exit 1 +fi + +# Create meta directory if it doesn't exist +if [ ! -d "$META_DIR" ]; then + log "Creating meta directory: $META_DIR" + mkdir -p "$META_DIR" +fi + +# Fix permissions for the meta directory (777 recursively) +chmod -R 777 "$META_DIR" + +create_7z() { + local folder="$1" + local archive_name="$2" + log "Creating 7zip archive of $folder" + 7z a -p"$ZIP_PASSWORD" -mhe=on -mx=5 "$archive_name" "$folder" + if [ $? -ne 0 ]; then + log "ERROR: Failed to create 7zip archive of $folder" + fi +} + +upload_to_gdrive() { + local archive_name="$1" + log "Uploading $archive_name to Google Drive" + + # Should replace existing file if it exists + rclone copy "$archive_name" "$GDRIVE_REMOTE:$GDRIVE_PATH" \ + --progress \ + --check-first \ + --transfers 1 \ + --checkers 1 \ + --retries 1 \ + --low-level-retries 10 + if [ $? -ne 0 ]; then + log "ERROR: Failed to upload $archive_name to Google Drive" + fi +} + +# Loop through each folder in the backup source +for folder in "$BACKUP_SOURCE"/*; do + if [ -d "$folder" ]; then + log "Processing folder: $folder" + # Get the sha256 checksum of the folder + CHECKSUM=$(find "$folder" -type f -exec sha256sum {} + | sha256sum | awk '{print $1}') + META_FILE="$META_DIR/$(basename "$folder").sha256" + # Check if the checksum file exists + if [ -f "$META_FILE" ]; then + # Read the previous checksum from the file + PREV_CHECKSUM=$(cat "$META_FILE") + # Compare the checksums + if [ "$CHECKSUM" != "$PREV_CHECKSUM" ]; then + log "Changes detected in $folder - creating new archive" + + create_7z "$folder" "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME" + upload_to_gdrive "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME" + + # Update the checksum file + echo "$CHECKSUM" > "$META_FILE" + + # Remove the temporary archive file + log "Removing temporary archive file" + rm "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME" + else + log "No changes detected in $folder" + fi + else + log "No previous checksum found for $folder - creating new archive" + + create_7z "$folder" "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME" + upload_to_gdrive "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME" + + # Create a checksum file for the folder + echo "$CHECKSUM" > "$META_FILE" + + # Remove the temporary archive file + log "Removing temporary archive file" + rm "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME" + fi + else + log "Skipping $folder, not a directory" + fi + log "" +done + +log "Backup process completed successfully" + +# Exit with success +exit 0 + +# Function to install a dependency if not already installed +install_dependency() { + local package="$1" + if ! dpkg -l | grep -q "$package"; then + install_log_info "Installing $package" + apt-get update && apt-get install -y "$package" + if [ $? -ne 0 ]; then + install_log_error "ERROR: Failed to install $package" + exit 1 + fi + install_log_ok "$package installed successfully" + else + install_log_ok "$package is already installed" + fi +} + +install_log_ok() { + echo -e "\e[32m[✓]\e[0m $1" +} +install_log_error() { + echo -e "\e[31m[✗]\e[0m $1" +} +install_log_info() { + echo -e "\e[34m[!]\e[0m $1" +} +install_log_separator() { + echo -e "\e[36m========================================\e[0m" +} + +install_script() { + echo -e "" + install_log_separator + install_log_info "Starting installation of NAS to Google Drive backup script" + install_log_separator + echo -e "" + + install_log_separator + # Check if running as root + if [ "$(id -u)" -ne 0 ]; then + install_log_error "ERROR: This script must be run as root" + exit 1 + fi + install_log_separator + + # Check for dependencies + install_log_info "Checking for required dependencies" + install_dependency "rclone" + install_dependency "p7zip-full" + install_log_separator + + # Check if crontab exists + install_log_info "Checking if crontab is installed" + if ! command -v crontab &>/dev/null; then + install_log_error "crontab is not installed" + exit 1 + else + install_log_ok "crontab is installed" + fi + install_log_separator + + install_log_info "Installing script to /usr/local/bin/nas-gdrive-backup.sh" + curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/nas-gdrive-backup.sh -o /usr/local/bin/nas-gdrive-backup.sh + chmod +x /usr/local/bin/nas-gdrive-backup.sh + + install_log_info "Setting up ZIP_PASSWORD in $0" + if [ -z "$ZIP_PASSWORD" ]; then + install_log_error "ERROR: ZIP_PASSWORD is not set" + exit 1 + fi + read -p "Enter ZIP_PASSWORD: " ZIP_PASSWORD + if [ -z "$ZIP_PASSWORD" ]; then + install_log_error "ERROR: ZIP_PASSWORD cannot be empty" + exit 1 + fi + # Update the ZIP_PASSWORD in the script + sed -i "s/^ZIP_PASSWORD=.*/ZIP_PASSWORD=\"$ZIP_PASSWORD\"/" /usr/local/bin/nas-gdrive-backup.sh + log "ZIP_PASSWORD updated in /usr/local/bin/nas-gdrive-backup.sh" + install_log_separator + + # Check for existence of source directories + install_log_info "Checking if BACKUP_SOURCE, META_DIR, and TMP_DIR exists" + if ! [ -d "$BACKUP_SOURCE" ]; then + install_log_error "ERROR: BACKUP_SOURCE directory does not exist" + exit 1 + else + install_log_ok "BACKUP_SOURCE directory exists: $BACKUP_SOURCE" + fi + if ! [ -d "$META_DIR" ]; then + install_log_info "Creating META_DIR: $META_DIR" + mkdir -p "$META_DIR" + fi + if ! [ -d "$TMP_DIR" ]; then + install_log_info "Creating TMP_DIR: $TMP_DIR" + mkdir -p "$TMP_DIR" + fi + install_log_info "Setting permissions for $META_DIR and $TMP_DIR to 777" + chmod -R 777 "$META_DIR" "$TMP_DIR" + install_log_separator + + install_log_info "Setting up rclone configuration" + if ! rclone config show gdrive &>/dev/null; then + install_log_error "ERROR: rclone gdrive remote is not configured" + install_log_error "Please run 'rclone config' to set up your Google Drive remote" + exit 1 + fi + install_log_ok "rclone gdrive remote is configured" + install_log_separator + + install_log_info "Setting up cron job for backup script" + (crontab -l 2>/dev/null; echo "30 23 * * 1,5 $0 > /tmp/nas-gdrive-backup.log") | crontab - + install_log_ok "Cron job set up to run $0 every Monday and Friday at 23:30" + install_log_separator + + echo -e "" + install_log_separator + install_log_ok "Installation completed successfully!" + install_log_separator + echo -e "" + echo -e "You can now run the script manually with: \e[32m/usr/local/bin/nas-gdrive-backup.sh\e[0m" + echo -e "Or it will run automatically according to the cron schedule." + + # Exit with success + exit 0 +} \ No newline at end of file diff --git a/windows-backup.ps1 b/windows-backup.ps1 new file mode 100644 index 0000000..ea3a11a --- /dev/null +++ b/windows-backup.ps1 @@ -0,0 +1,52 @@ +$7zipPath = "$env:ProgramFiles\7-Zip\7z.exe" + +# Check if 7-Zip is installed +if (!(Test-Path "$env:ProgramFiles\7-Zip\7z.exe")) { + Write-Host "7-Zip is not installed. Please install it to use this script." + exit 1 +} + +$BackupSource = @( + "$env:USERPROFILE\Documents", + "$env:USERPROFILE\Desktop", + "$env:USERPROFILE\Pictures" +) + +$NASDestination = "\\OMV\Backup\$env:COMPUTERNAME" +$TempDir = "$env:TEMP\BackupTemp" +$Date = Get-Date -Format "yyyy-MM-dd" + +# Create temp directory +New-Item -ItemType Directory -Path $TempDir -Force + +# Create NAS destination if it doesn't exist +if (!(Test-Path $NASDestination)) { + New-Item -ItemType Directory -Path $NASDestination -Force +} + +foreach ($Folder in $BackupSource) { + if (Test-Path $Folder) { + $FolderName = Split-Path $Folder -Leaf + $ZipFile = "$TempDir\$FolderName-$Date.zip" + + Write-Host "Compressing $Folder..." + & "$7zipPath" a -tzip "$ZipFile" "$Folder\*" -mx=9 + + Write-Host "Copying $ZipFile to NAS..." + Copy-Item $ZipFile $NASDestination -Force + + Write-Host "Removing $ZipFile..." + Remove-Item $ZipFile + } +} + +Write-Host "Removing Files older than 15 days from $NASDestination..." +$OldFiles = Get-ChildItem -Path $NASDestination -File | Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-15) } +foreach ($OldFile in $OldFiles) { + Remove-Item $OldFile.FullName -Force + Write-Host "Removed: $($OldFile.FullName)" +} + +# Cleanup +Remove-Item $TempDir -Recurse -Force +Write-Host "Backup completed!" \ No newline at end of file