updating things
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 35s

This commit is contained in:
2025-09-20 20:53:54 -03:00
parent 96548f4773
commit 61ade81d39
6 changed files with 10 additions and 1011 deletions

View File

@@ -2,29 +2,18 @@
Useful scripts for managing servers written in Bash. Supported OS includes only Ubuntu/Debian and Alpine. Useful scripts for managing servers written in Bash. Supported OS includes only Ubuntu/Debian and Alpine.
They feature auto-updating, and some error handling. In the past I was using it more for maintaining my home servers, bare metal with Docker, however when I changed to k3s, I started using it less. Now I'm using this as a collection of useful scripts that I can use when needed. Also `clean.sh` is still useful.
## `scripts-download.sh`
This script is used to download selected scripts in this repository to a specified directory and install them as cron jobs.
```bash
curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/scripts-download.sh | bash
```
When you run the script, you will be prompted to select which scripts you want to download and install.
### `backup.sh`
This script is used to backup a directory to a remote server using `rsync`. It is intended to be run as a cron job.
#### `nas-gdrive-backup.sh`
This script is used to both install itself and to run a periodic backup to Google Drive for files that had changed.
### `clean.sh` ### `clean.sh`
This script is used to clean some of the files, docker dangling images, and docker stopped/unused containers. This script is used to clean some of the files, docker dangling images, and docker stopped/unused containers.
### `docker-updater.sh` ### `windows-backup.ps1`
This script is used to update all the docker containers on the server. This PowerShell script is used to backup files from a Windows machine to a remote Samba server. It uses `7zip` to create the zip file, and then sends over the network using SMB protocol.
## Haven Notify
It's a small internal service designed to send notifications to a specified Discord channel via webhooks. It's written in Go and can be easily deployed.
The project is defined on [/haven-notify](./haven-notify) folder.

View File

@@ -1,62 +0,0 @@
#!/bin/bash
# Function to send notification
HOSTNAME=$(cat /etc/hostname)
NOTIFY_URL_ERROR="http://notify.haven/template/notify/error"
NOTIFY_URL_BACKUP="http://notify.haven/template/notify/backup"
send_error_notification() {
local message="$1"
local critical="$2"
curl -s -X POST "$NOTIFY_URL_ERROR" \
-H "Content-Type: application/json" \
-d "{\"caller\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\", \"critical\": $critical}"
}
send_backup_notification() {
local message="$1"
local backup_size="$2"
curl -s -X POST "$NOTIFY_URL_BACKUP" \
-H "Content-Type: application/json" \
-d "{\"title\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\", \"backupSizeInMB\": $backup_size}"
}
####################
# Variables
SOURCE_DIR="/root/docker"
BACKUP_FILE="/tmp/docker_backup_$(date +%Y%m%d%H%M%S).tar.gz"
REMOTE_USER="ivanch"
REMOTE_HOST="nas.haven"
REMOTE_DIR="/export/Backup/Docker/$(cat /etc/hostname)"
# Create a compressed backup file
zip -q -r $BACKUP_FILE $SOURCE_DIR || true
if [ $? -ne 0 ]; then
send_error_notification "⚠️ Some files or folders in $SOURCE_DIR could not be backed up (possibly in use or locked). Backup archive created with available files." false
fi
# Check if remote path exists
if ! ssh $REMOTE_USER@$REMOTE_HOST "mkdir -p $REMOTE_DIR"; then
send_error_notification "❌ Failed to create remote directory: $REMOTE_DIR on $REMOTE_HOST" true
exit 1
fi
# Transfer the backup file to the remote server
if ! scp $BACKUP_FILE $REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR; then
send_error_notification "❌ Failed to transfer backup file to remote server: $REMOTE_HOST:$REMOTE_DIR" true
exit 1
fi
# Remove the backup file
BACKUP_SIZE=$(du -m $BACKUP_FILE | cut -f1)
rm $BACKUP_FILE
# Erase last 7 days backups from remote server
if ! ssh $REMOTE_USER@$REMOTE_HOST "find $REMOTE_DIR -type f -name 'docker_backup_*' -mtime +7 -exec rm {} \;"; then
send_error_notification "⚠️ Failed to clean old backups on remote server: $REMOTE_HOST:$REMOTE_DIR" false
fi
# Success notification
send_backup_notification "✅ Backup completed successfully for: $SOURCE_DIR to $REMOTE_HOST:$REMOTE_DIR" $BACKUP_SIZE
echo "Backup completed successfully"
exit 0

View File

@@ -1,20 +1,8 @@
#!/bin/bash #!/bin/bash
# System Cleanup and Maintenance Script # System Cleanup and Maintenance Script
#
# Description: Comprehensive system cleanup for Docker containers and Linux systems
# Features:
# - Docker resource cleanup (images, containers, volumes, networks)
# - Package manager cache cleanup (APK/APT)
# - System cache and temporary file cleanup
# - Log rotation and cleanup
# - Memory cache optimization
# - Journal cleanup (systemd)
# - Thumbnail and user cache cleanup
# Author: ivanch
# Version: 2.0
set -euo pipefail # Exit on error, undefined vars, and pipe failures set -euo pipefail
#============================================================================== #==============================================================================
# CONFIGURATION # CONFIGURATION
@@ -35,9 +23,6 @@ readonly JOURNAL_RETENTION_DAYS=7
readonly TEMP_DIRS=("/tmp" "/var/tmp") readonly TEMP_DIRS=("/tmp" "/var/tmp")
readonly CACHE_DIRS=("/var/cache" "/root/.cache") readonly CACHE_DIRS=("/var/cache" "/root/.cache")
# Auto-update configuration
readonly AUTO_UPDATE_ENABLED=true
#============================================================================== #==============================================================================
# UTILITY FUNCTIONS # UTILITY FUNCTIONS
#============================================================================== #==============================================================================
@@ -147,12 +132,6 @@ cleanup_docker() {
return 0 return 0
fi fi
# Get initial Docker disk usage
local docker_usage_before=""
if docker system df >/dev/null 2>&1; then
docker_usage_before=$(docker system df 2>/dev/null || echo "")
fi
# Remove unused images # Remove unused images
log_info "Removing unused Docker images..." log_info "Removing unused Docker images..."
if docker image prune -af >/dev/null 2>&1; then if docker image prune -af >/dev/null 2>&1; then
@@ -192,11 +171,6 @@ cleanup_docker() {
else else
log_warning "Docker system cleanup failed" log_warning "Docker system cleanup failed"
fi fi
# Show space freed if possible
if [[ -n "$docker_usage_before" ]] && docker system df >/dev/null 2>&1; then
log_info "Docker cleanup completed"
fi
} }
#============================================================================== #==============================================================================

View File

@@ -1,280 +0,0 @@
#!/bin/bash
# Docker Container Updater
#
# Description: Automatically updates Docker containers and manages Docker images
# Features:
# - Updates all Docker Compose projects in /root/docker
# - Skips containers with .ignore file
# - Removes obsolete Docker Compose version attributes
# - Cleans up unused Docker images
# Author: ivanch
# Version: 2.0
set -euo pipefail # Exit on error, undefined vars, and pipe failures
HOSTNAME=$(cat /etc/hostname)
NOTIFY_URL_ERROR="http://notify.haven/template/notify/error"
NOTIFY_URL_UPDATE="http://notify.haven/template/notify/update"
send_error_notification() {
local message="$1"
local critical="$2"
curl -s -X POST "$NOTIFY_URL_ERROR" \
-H "Content-Type: application/json" \
-d "{\"caller\": \"$HOSTNAME\", \"message\": \"$message\", \"critical\": $critical}"
}
send_update_notification() {
local script_time="$1"
curl -s -X POST "$NOTIFY_URL_UPDATE" \
-H "Content-Type: application/json" \
-d "{\"host\": \"$HOSTNAME\", \"asset\": \"Docker containers\", \"time\": $script_time}"
}
#==============================================================================
# CONFIGURATION
#==============================================================================
# Color definitions for output formatting
readonly NC='\033[0m'
readonly RED='\033[1;31m'
readonly GREEN='\033[1;32m'
readonly LIGHT_GREEN='\033[1;32m'
readonly LIGHT_BLUE='\033[1;34m'
readonly LIGHT_GREY='\033[0;37m'
readonly YELLOW='\033[1;33m'
# Script configuration
readonly DOCKER_FOLDER="/root/docker"
readonly COMPOSE_FILES=("docker-compose.yml" "docker-compose.yaml" "compose.yaml" "compose.yml")
# Auto-update configuration
readonly AUTO_UPDATE_ENABLED=true
#==============================================================================
# UTILITY FUNCTIONS
#==============================================================================
# Print formatted log messages
log_info() { echo -e "${LIGHT_GREY}[i] $1${NC}"; }
log_success() { echo -e "${LIGHT_GREEN}[✓] $1${NC}"; }
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
log_container() { echo -e "${LIGHT_BLUE}[$1] $2${NC}"; }
log_warning() {
echo -e "${YELLOW}[!] $1${NC}";
send_error_notification "$1" false
}
log_error() {
echo -e "${RED}[x] $1${NC}" >&2;
send_error_notification "$1" true
}
# Exit with error message
die() {
log_error "$1"
exit 1
}
# Check if a command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# Check if Docker and Docker Compose are available
check_docker_requirements() {
log_info "Checking Docker requirements..."
if ! command_exists docker; then
die "Docker is not installed or not in PATH"
fi
if ! docker compose version >/dev/null 2>&1; then
die "Docker Compose is not available"
fi
log_success "Docker requirements satisfied"
}
# Get SHA256 hash of a file
get_file_hash() {
local file="$1"
sha256sum "$file" 2>/dev/null | awk '{print $1}' || echo ""
}
# Get SHA256 hash from URL content
get_url_hash() {
local url="$1"
curl -s "$url" 2>/dev/null | sha256sum | awk '{print $1}' || echo ""
}
#==============================================================================
# DOCKER COMPOSE MANAGEMENT
#==============================================================================
# Find the active Docker Compose file in current directory
find_compose_file() {
for compose_file in "${COMPOSE_FILES[@]}"; do
if [[ -f "$compose_file" ]]; then
echo "$compose_file"
return 0
fi
done
return 1
}
# Remove obsolete version attribute from Docker Compose files
clean_compose_files() {
local container_name="$1"
for compose_file in "${COMPOSE_FILES[@]}"; do
if [[ -f "$compose_file" ]]; then
log_container "$container_name" "Cleaning obsolete version attribute from $compose_file"
sed -i '/^version:/d' "$compose_file" || log_warning "Failed to clean $compose_file"
fi
done
}
# Check if container should be skipped
should_skip_container() {
[[ -f ".ignore" ]]
}
# Check if any containers are running in current directory
has_running_containers() {
local running_containers
running_containers=$(docker compose ps -q 2>/dev/null || echo "")
[[ -n "$running_containers" ]]
}
# Update a single Docker Compose project
update_docker_project() {
local project_dir="$1"
local container_name
container_name=$(basename "$project_dir")
log_container "$container_name" "Checking for updates..."
# Change to project directory
cd "$project_dir" || {
log_error "Cannot access directory: $project_dir"
return 1
}
# Check if container should be skipped
if should_skip_container; then
log_container "$container_name" "Skipping (found .ignore file)"
return 0
fi
# Verify compose file exists
local compose_file
if ! compose_file=$(find_compose_file); then
log_container "$container_name" "No Docker Compose file found, skipping"
return 0
fi
# Clean compose files
clean_compose_files "$container_name"
# Check if containers are running
if ! has_running_containers; then
log_container "$container_name" "No running containers, skipping update"
return 0
fi
# Stop containers
log_container "$container_name" "Stopping containers..."
if ! docker compose down >/dev/null 2>&1; then
log_error "Failed to stop containers in $container_name"
return 1
fi
# Pull updated images
log_container "$container_name" "Pulling updated images..."
if ! docker compose pull -q >/dev/null 2>&1; then
log_warning "Failed to pull images for $container_name, attempting to restart anyway"
fi
# Start containers
log_container "$container_name" "Starting containers..."
if ! docker compose up -d >/dev/null 2>&1; then
log_error "Failed to start containers in $container_name"
return 1
fi
log_container "$container_name" "Update completed successfully!"
return 0
}
# Update all Docker Compose projects
update_all_docker_projects() {
log_step "Starting Docker container updates..."
# Check if Docker folder exists
if [[ ! -d "$DOCKER_FOLDER" ]]; then
die "Docker folder not found: $DOCKER_FOLDER"
fi
# Change to Docker folder
cd "$DOCKER_FOLDER" || die "Cannot access Docker folder: $DOCKER_FOLDER"
# Process each subdirectory
for project_dir in */; do
if [[ -d "$project_dir" ]]; then
local project_path="$DOCKER_FOLDER/$project_dir"
update_docker_project "$project_path"
# Return to Docker folder for next iteration
cd "$DOCKER_FOLDER" || die "Cannot return to Docker folder"
fi
done
}
#==============================================================================
# DOCKER CLEANUP
#==============================================================================
# Clean up unused Docker resources
cleanup_docker_resources() {
log_step "Cleaning up unused Docker resources..."
# Remove unused images
log_info "Removing unused Docker images..."
if docker image prune -af >/dev/null 2>&1; then
log_success "Docker image cleanup completed"
else
log_warning "Docker image cleanup failed"
fi
}
#==============================================================================
# MAIN EXECUTION
#==============================================================================
main() {
START_TIME=$(date +%s)
log_step "Starting Docker Container Updater"
echo
# Check requirements
check_docker_requirements
# Update all Docker projects
update_all_docker_projects
# Clean up Docker resources
cleanup_docker_resources
echo
log_success "Docker container update process completed!"
END_TIME=$(date +%s)
DURATION=$((END_TIME - START_TIME))
log_info "Total duration: $DURATION seconds"
send_update_notification $DURATION
}
# Execute main function with all arguments
main "$@"

View File

@@ -1,292 +0,0 @@
#!/bin/bash
# NAS Backup Script to Google Drive using rclone and 7zip
# For each folder on BACKUP_SOURCE, it gets the sha256 checksum of it, checks the checksum against the previous backup, and if it has changed, it creates a 7zip archive of the folder with encryption.
# It then uploads the archive to Google Drive using rclone.
# Install: curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/nas-gdrive-backup.sh | bash -s -- --install
# Run manually: /usr/local/bin/nas-gdrive-backup.sh
# Configuration
BACKUP_SOURCE="/export/Backup"
META_DIR="/export/Backup/.gdrive"
TMP_DIR="/export/Backup/.gdrive/tmp"
ZIP_PASSWORD="password"
GDRIVE_REMOTE="gdrive"
GDRIVE_PATH="/NAS-Backups"
ARCHIVE_NAME="backup.7z"
LOG_FILE="/var/log/nas-gdrive-backup.log"
# Function for logging
log() {
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" | tee -a "$LOG_FILE"
}
clean_up() {
log "Cleaning up temporary files"
rm -rf "$TMP_DIR"
}
trap clean_up EXIT
create_7z() {
local folder="$1"
local archive_name="$2"
log "Creating 7zip archive of $folder"
7z a -p"$ZIP_PASSWORD" -mhe=on -mx=3 "$archive_name" "$folder"
if [ $? -ne 0 ]; then
log "ERROR: Failed to create 7zip archive of $folder"
fi
}
upload_to_gdrive() {
local archive_name="$1"
log "Uploading $archive_name to Google Drive"
# Should replace existing file if it exists
rclone copy "$archive_name" "$GDRIVE_REMOTE:$GDRIVE_PATH" \
--progress \
--check-first \
--transfers 1 \
--checkers 1 \
--retries 1 \
--low-level-retries 10
if [ $? -ne 0 ]; then
log "ERROR: Failed to upload $archive_name to Google Drive"
fi
}
main() {
# Check if 7z is installed
if ! which 7z > /dev/null; then
log "ERROR: 7z is not installed"
exit 1
fi
# Check if rclone is installed
if ! which rclone > /dev/null; then
log "ERROR: rclone is not installed"
exit 1
fi
# Create meta directory if it doesn't exist
if [ ! -d "$META_DIR" ]; then
log "Creating meta directory: $META_DIR"
mkdir -p "$META_DIR"
fi
# Fix permissions for the meta directory (777 recursively)
chmod -R 777 "$META_DIR"
# Loop through each folder in the backup source
for folder in "$BACKUP_SOURCE"/*; do
if [ -d "$folder" ]; then
log "Processing folder: $folder"
# Get the sha256 checksum of the folder
CHECKSUM=$(find "$folder" -type f -exec sha256sum {} + | sha256sum | awk '{print $1}')
META_FILE="$META_DIR/$(basename "$folder").sha256"
# Check if the checksum file exists
if [ -f "$META_FILE" ]; then
# Read the previous checksum from the file
PREV_CHECKSUM=$(cat "$META_FILE")
# Compare the checksums
if [ "$CHECKSUM" != "$PREV_CHECKSUM" ]; then
log "Changes detected in $folder - creating new archive"
create_7z "$folder" "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
upload_to_gdrive "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
# Update the checksum file
echo "$CHECKSUM" > "$META_FILE"
# Remove the temporary archive file
log "Removing temporary archive file"
rm "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
else
log "No changes detected in $folder"
fi
else
log "No previous checksum found for $folder - creating new archive"
create_7z "$folder" "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
upload_to_gdrive "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
# Create a checksum file for the folder
echo "$CHECKSUM" > "$META_FILE"
# Remove the temporary archive file
log "Removing temporary archive file"
rm "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
fi
else
log "Skipping $folder, not a directory"
fi
log ""
done
# Fix permissions for the meta directory (777 recursively)
chmod -R 777 "$META_DIR"
log "Backup process completed successfully"
# Exit with success
exit 0
}
###########################
### Installation script ###
###########################
# Function to install a dependency if not already installed
install_dependency() {
local package="$1"
if ! dpkg -l | grep -q "$package"; then
install_log_info "Installing $package"
apt-get update && apt-get install -y "$package"
if [ $? -ne 0 ]; then
install_log_error "ERROR: Failed to install $package"
exit 1
fi
install_log_ok "$package installed successfully"
else
install_log_ok "$package is already installed"
fi
}
install_log_ok() {
echo -e "\e[32m[✓]\e[0m $1"
}
install_log_error() {
echo -e "\e[31m[✗]\e[0m $1"
}
install_log_info() {
echo -e "\e[34m[!]\e[0m $1"
}
install_log_separator() {
echo -e "\e[36m========================================\e[0m"
}
install_script() {
echo -e ""
install_log_separator
install_log_info "Starting installation of NAS to Google Drive backup script"
install_log_separator
echo -e ""
install_log_separator
# Check if running as root
install_log_info "Checking if the script is running as root"
if [ "$(id -u)" -ne 0 ]; then
install_log_error "ERROR: This script must be run as root"
exit 1
else
install_log_ok "Running as root"
fi
install_log_separator
# Check for dependencies
install_log_info "Checking for required dependencies"
install_dependency "rclone"
install_dependency "p7zip-full"
install_log_separator
# Check if crontab exists
install_log_info "Checking if crontab is installed"
if ! command -v crontab &>/dev/null; then
install_log_error "crontab is not installed"
exit 1
else
install_log_ok "crontab is installed"
fi
install_log_separator
install_log_info "Installing script to /usr/local/bin/nas-gdrive-backup.sh"
curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/nas-gdrive-backup.sh -o /usr/local/bin/nas-gdrive-backup.sh
chmod +x /usr/local/bin/nas-gdrive-backup.sh
install_log_info "Setting up ZIP_PASSWORD in $0"
if [ -z "$ZIP_PASSWORD" ]; then
install_log_error "ERROR: ZIP_PASSWORD is not set"
exit 1
fi
read -p "Enter ZIP_PASSWORD: " ZIP_PASSWORD </dev/tty
if [ -z "$ZIP_PASSWORD" ]; then
install_log_error "ERROR: ZIP_PASSWORD cannot be empty"
exit 1
fi
# Update the ZIP_PASSWORD in the script
sed -i "s/^ZIP_PASSWORD=.*/ZIP_PASSWORD=\"$ZIP_PASSWORD\"/" /usr/local/bin/nas-gdrive-backup.sh
log "ZIP_PASSWORD updated in /usr/local/bin/nas-gdrive-backup.sh"
install_log_separator
# Check for existence of source directories
install_log_info "Checking if BACKUP_SOURCE, META_DIR, and TMP_DIR exists"
if ! [ -d "$BACKUP_SOURCE" ]; then
install_log_error "ERROR: BACKUP_SOURCE directory does not exist"
exit 1
else
install_log_ok "BACKUP_SOURCE directory exists: $BACKUP_SOURCE"
fi
if ! [ -d "$META_DIR" ]; then
install_log_info "Creating META_DIR: $META_DIR"
mkdir -p "$META_DIR"
fi
if ! [ -d "$TMP_DIR" ]; then
install_log_info "Creating TMP_DIR: $TMP_DIR"
mkdir -p "$TMP_DIR"
fi
install_log_info "Setting permissions for $META_DIR and $TMP_DIR to 777"
chmod -R 777 "$META_DIR" "$TMP_DIR"
install_log_ok "Directories checked and are ok"
# Check for existing .sha256 files, if there are any, prompt to remove them
install_log_info "Verifying existing .sha256 files in $META_DIR"
for file in "$META_DIR"/*; do
if [ -f "$file" ] && [[ "$file" == *.sha256 ]]; then
install_log_info "Found .sha256 file: \e[96m\e[4m$file\e[0m"
read -p "Do you want to remove this file? [y/N]: " choice </dev/tty
if [[ "$choice" == "y" || "$choice" == "Y" ]]; then
install_log_info "Removing $file"
rm "$file"
else
install_log_info "Skipping $file"
fi
fi
done
install_log_ok "Existing .sha256 files checked"
install_log_separator
install_log_info "Setting up rclone configuration"
if ! rclone config show gdrive &>/dev/null; then
install_log_error "ERROR: rclone gdrive remote is not configured"
install_log_error "Please run 'rclone config' to set up your Google Drive remote"
exit 1
fi
install_log_ok "rclone gdrive remote is configured"
install_log_separator
install_log_info "Setting up cron job for backup script"
(crontab -l 2>/dev/null; echo "55 23 * * 1 /usr/local/bin/nas-gdrive-backup.sh > /tmp/nas-gdrive-backup.log") | crontab -
install_log_ok "Cron job set up to run /usr/local/bin/nas-gdrive-backup.sh every Monday at 23:55"
install_log_separator
echo -e ""
install_log_separator
install_log_ok "Installation completed successfully!"
install_log_separator
echo -e ""
echo -e "You can now run the script manually with: \e[32mnas-gdrive-backup.sh\e[0m"
echo -e "Or it will run automatically according to the cron schedule."
# Exit with success
exit 0
}
# Check for install flag
if [[ "$1" == "--install" ]]; then
install_script
exit 0
fi
main "$@"
exit 0

View File

@@ -1,330 +0,0 @@
#!/bin/bash
# Usage: curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/scripts-download.sh | bash
set -euo pipefail
#==============================================================================
# CONFIGURATION
#==============================================================================
# Color definitions for output formatting
readonly RED='\033[1;31m'
readonly GREEN='\033[1;32m'
readonly NC='\033[0m'
readonly LIGHT_BLUE='\033[1;34m'
readonly LIGHT_RED='\033[1;31m'
readonly LIGHT_GREEN='\033[1;32m'
readonly GREY='\033[1;30m'
readonly YELLOW='\033[1;33m'
# Configuration
readonly FILES_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
readonly REQUIRED_PACKAGES=("zip" "unzip" "curl")
readonly REQUIRED_COMMANDS=("zip" "unzip" "sha256sum" "curl" "crontab")
readonly AVAILABLE_SCRIPTS=("clean.sh" "backup.sh" "docker-updater.sh")
# Format: [script_name]="cron_schedule"
declare -A CRONTAB_SCHEDULES=(
["clean.sh"]="0 3 * * *" # Daily at 3 AM
["backup.sh"]="0 23 * * 1,5" # Monday and Friday at 11 PM
["docker-updater.sh"]="0 3 * * 6" # Every Saturday at 3 AM
)
#==============================================================================
# UTILITY FUNCTIONS
#==============================================================================
# Print formatted log messages
log_info() { echo -e "${GREY}[i] $1${NC}"; }
log_success() { echo -e "${GREEN}[✓] $1${NC}"; }
log_warning() { echo -e "${YELLOW}[!] $1${NC}"; }
log_error() { echo -e "${RED}[x] $1${NC}" >&2; }
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
# Exit with error message
die() {
log_error "$1"
exit 1
}
# Check if a command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# Check if a process is running
process_running() {
pgrep "$1" >/dev/null 2>&1
}
#==============================================================================
# MAIN FUNCTIONS
#==============================================================================
# Detect the operating system
detect_operating_system() {
if command_exists apk; then
echo "Alpine"
elif command_exists apt; then
echo "Debian"
else
die "Unsupported operating system. This script supports Alpine and Debian-based systems only."
fi
}
# Check for missing packages
get_missing_packages() {
local missing=()
# Check each required command and map to package names
if ! command_exists "zip"; then
missing+=("zip")
fi
if ! command_exists "unzip"; then
missing+=("unzip")
fi
if ! command_exists "curl"; then
missing+=("curl")
fi
# sha256sum is part of coreutils (usually pre-installed)
# crontab is part of cron package, but we'll check for cron service later
# Only print if there are missing packages
if [[ ${#missing[@]} -gt 0 ]]; then
printf '%s\n' "${missing[@]}"
fi
}
# Install packages based on the detected OS
install_packages() {
local os="$1"
shift
local packages=("$@")
if [[ ${#packages[@]} -eq 0 ]]; then
log_info "No packages to install"
return 0
fi
log_info "Installing required packages: ${packages[*]}"
log_info "Debug: Installing ${#packages[@]} packages on $os"
case "$os" in
"Alpine")
log_info "Updating APK package index..."
apk update >/dev/null || die "Failed to update APK package index"
log_info "Installing packages via APK..."
apk add --no-cache "${packages[@]}" >/dev/null || die "Failed to install packages via APK"
;;
"Debian")
log_info "Ensuring /var/cache/apt/archives/partial exists..."
mkdir -p /var/cache/apt/archives/partial || die "Failed to create /var/cache/apt/archives/partial"
log_info "Updating APT package index..."
apt-get update -y >/dev/null || die "Failed to update APT package index"
log_info "Installing packages via APT..."
apt-get install -y "${packages[@]}" >/dev/null || die "Failed to install packages via APT"
;;
*)
log_error "Debug info - OS variable content: '$os'"
log_error "Debug info - OS variable length: ${#os}"
die "Unknown operating system: '$os'"
;;
esac
}
# Verify all required packages are available
verify_packages() {
log_info "Verifying package installation..."
local missing_packages
readarray -t missing_packages < <(get_missing_packages)
if [[ ${#missing_packages[@]} -gt 0 ]]; then
log_error "Failed to install required packages: ${missing_packages[*]}"
die "Please install the missing packages manually and try again"
fi
log_success "All required packages are available"
}
# Check if crontab service is running
check_crontab_service() {
log_info "Checking crontab service status..."
if ! process_running "cron"; then
die "Crontab service is not running. Please start the cron service first."
fi
log_success "Crontab service is running"
}
# Prompt user to select scripts for installation
select_scripts() {
local selected=()
echo >&2 # Send to stderr so it doesn't get captured
echo -e "${GREY}[i] Available scripts for download and installation:${NC}" >&2
echo >&2
for script in "${AVAILABLE_SCRIPTS[@]}"; do
local schedule="${CRONTAB_SCHEDULES[$script]:-"0 0 * * *"}"
echo -e " ${LIGHT_BLUE}$script${NC} - Schedule: ${GREY}$schedule${NC}" >&2
done
echo >&2
echo -e "${GREY}[i] Select scripts to download and install:${NC}" >&2
for script in "${AVAILABLE_SCRIPTS[@]}"; do
read -p "Install $script? [Y/n]: " choice </dev/tty
if [[ "$choice" =~ ^[Yy]?$ ]]; then
selected+=("$script")
fi
done
if [[ ${#selected[@]} -eq 0 ]]; then
echo -e "${RED}[x] No scripts selected. Exiting...${NC}" >&2
exit 1
fi
# Only output the selected scripts to stdout
printf '%s\n' "${selected[@]}"
}
# Verify server connectivity for selected scripts
verify_server_connectivity() {
local scripts=("$@")
log_info "Verifying server connectivity..."
for script in "${scripts[@]}"; do
local url="$FILES_URL/$script"
if ! curl -s --head "$url" | head -n 1 | grep -E "HTTP/[12] [23].." >/dev/null; then
die "Script '$script' not found on server: $url"
fi
done
log_success "Server connectivity verified"
}
# Download selected scripts
download_scripts() {
local scripts=("$@")
log_info "Downloading ${#scripts[@]} script(s)..."
for script in "${scripts[@]}"; do
local url="$FILES_URL/$script"
log_step "Downloading $script..."
if ! curl -s -o "./$script" "$url"; then
die "Failed to download $script from $url"
fi
# Set executable permissions
chmod +x "./$script" || die "Failed to set executable permissions for $script"
done
log_success "All scripts downloaded and configured"
}
# Setup crontab entries for selected scripts
setup_crontab() {
local scripts=("$@")
local current_workdir
current_workdir=$(pwd)
log_info "Setting up crontab entries..."
for script in "${scripts[@]}"; do
local schedule="${CRONTAB_SCHEDULES[$script]:-"0 0 * * *"}"
local log_file="/tmp/${script%.*}.log"
local cron_entry="$schedule $current_workdir/$script > $log_file 2>&1"
log_step "Configuring crontab for $script (Schedule: $schedule)..."
# Remove existing crontab entry for this script
if crontab -l 2>/dev/null | grep -q "$script"; then
log_step "Removing existing crontab entry for $script..."
crontab -l 2>/dev/null | grep -v "$script" | crontab - || die "Failed to remove existing crontab entry"
fi
# Add new crontab entry
(crontab -l 2>/dev/null; echo "$cron_entry") | crontab - || die "Failed to add crontab entry for $script"
# Verify the entry was added
if ! crontab -l 2>/dev/null | grep -q "$script"; then
die "Failed to verify crontab entry for $script"
fi
log_success "Crontab configured for $script"
done
log_success "All crontab entries configured successfully"
}
#==============================================================================
# MAIN EXECUTION
#==============================================================================
main() {
log_step "Starting Server Scripts Downloader"
echo
# System detection and validation
log_info "Detecting operating system..."
local detected_os
detected_os=$(detect_operating_system)
detected_os=$(echo "$detected_os" | tr -d '\n\r' | xargs) # Clean any whitespace/newlines
log_success "Detected $detected_os Linux"
# Package management
local missing_packages
readarray -t missing_packages < <(get_missing_packages)
# Filter out empty strings
local filtered_packages=()
for pkg in "${missing_packages[@]}"; do
if [[ -n "$pkg" ]]; then
filtered_packages+=("$pkg")
fi
done
missing_packages=("${filtered_packages[@]}")
# Debug output
log_info "Debug: Found ${#missing_packages[@]} missing packages"
for i in "${!missing_packages[@]}"; do
log_info "Debug: Missing package $((i+1)): '${missing_packages[i]}'"
done
if [[ ${#missing_packages[@]} -gt 0 ]]; then
log_warning "Missing packages detected: ${missing_packages[*]}"
install_packages "$detected_os" "${missing_packages[@]}"
else
log_success "All required packages are already installed"
fi
verify_packages
check_crontab_service
# Script selection and installation
local selected_scripts
readarray -t selected_scripts < <(select_scripts)
log_info "Selected scripts: ${selected_scripts[*]}"
verify_server_connectivity "${selected_scripts[@]}"
download_scripts "${selected_scripts[@]}"
setup_crontab "${selected_scripts[@]}"
echo
log_success "Installation completed successfully!"
log_info "Scripts have been downloaded to: $(pwd)"
log_info "Crontab entries have been configured. Use 'crontab -l' to view them."
log_info "Log files will be created in /tmp/ directory."
}
# Execute main function
main "$@"