scripts v2.0
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 35s
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 35s
This commit is contained in:
594
clean.sh
594
clean.sh
@@ -1,74 +1,558 @@
|
||||
#!/bin/bash
|
||||
|
||||
### AUTO-UPDATER ###
|
||||
# Variables
|
||||
SERVER_FILE="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/clean.sh"
|
||||
SERVER_OK=1
|
||||
# System Cleanup and Maintenance Script
|
||||
#
|
||||
# Description: Comprehensive system cleanup for Docker containers and Linux systems
|
||||
# Features:
|
||||
# - Self-updating capability
|
||||
# - Docker resource cleanup (images, containers, volumes, networks)
|
||||
# - Package manager cache cleanup (APK/APT)
|
||||
# - System cache and temporary file cleanup
|
||||
# - Log rotation and cleanup
|
||||
# - Memory cache optimization
|
||||
# - Journal cleanup (systemd)
|
||||
# - Thumbnail and user cache cleanup
|
||||
# Author: ivanch
|
||||
# Version: 2.0
|
||||
|
||||
# Check if the server file exists
|
||||
curl -s --head $SERVER_FILE | head -n 1 | grep -E "HTTP/[12] [23].." > /dev/null
|
||||
set -euo pipefail # Exit on error, undefined vars, and pipe failures
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error: $SERVER_FILE not found." >&2
|
||||
SERVER_OK=0
|
||||
fi
|
||||
#==============================================================================
|
||||
# CONFIGURATION
|
||||
#==============================================================================
|
||||
|
||||
if [ $SERVER_OK -eq 1 ]; then
|
||||
echo "Running auto-update..."
|
||||
# Color definitions for output formatting
|
||||
readonly NC='\033[0m'
|
||||
readonly RED='\033[1;31m'
|
||||
readonly GREEN='\033[1;32m'
|
||||
readonly LIGHT_GREEN='\033[1;32m'
|
||||
readonly LIGHT_BLUE='\033[1;34m'
|
||||
readonly LIGHT_GREY='\033[0;37m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
|
||||
# Compare the local and server files sha256sum to check if an update is needed
|
||||
LOCAL_SHA256=$(sha256sum clean.sh | awk '{print $1}')
|
||||
SERVER_SHA256=$(curl -s $SERVER_FILE | sha256sum | awk '{print $1}')
|
||||
# Script configuration
|
||||
readonly SCRIPT_NAME="clean.sh"
|
||||
readonly SERVER_BASE_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
|
||||
|
||||
if [ "$LOCAL_SHA256" != "$SERVER_SHA256" ]; then
|
||||
echo "Updating clean.sh..."
|
||||
curl -s -o clean.sh $SERVER_FILE
|
||||
echo "clean.sh updated."
|
||||
# Cleanup configuration
|
||||
readonly LOG_RETENTION_DAYS=30
|
||||
readonly JOURNAL_RETENTION_DAYS=7
|
||||
readonly TEMP_DIRS=("/tmp" "/var/tmp")
|
||||
readonly CACHE_DIRS=("/var/cache" "/root/.cache")
|
||||
|
||||
chmod +x clean.sh
|
||||
echo "Permissions set up."
|
||||
# Auto-update configuration
|
||||
readonly AUTO_UPDATE_ENABLED=true
|
||||
|
||||
echo "Running updated clean.sh..."
|
||||
./clean.sh
|
||||
exit 0
|
||||
#==============================================================================
|
||||
# UTILITY FUNCTIONS
|
||||
#==============================================================================
|
||||
|
||||
# Print formatted log messages
|
||||
log_info() { echo -e "${LIGHT_GREY}[i] $1${NC}"; }
|
||||
log_success() { echo -e "${LIGHT_GREEN}[✓] $1${NC}"; }
|
||||
log_warning() { echo -e "${YELLOW}[!] $1${NC}"; }
|
||||
log_error() { echo -e "${RED}[x] $1${NC}" >&2; }
|
||||
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
|
||||
|
||||
# Exit with error message
|
||||
die() {
|
||||
log_error "$1"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if a command exists
|
||||
command_exists() {
|
||||
command -v "$1" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# Get directory size in human readable format
|
||||
get_dir_size() {
|
||||
local dir="$1"
|
||||
if [[ -d "$dir" ]]; then
|
||||
du -sh "$dir" 2>/dev/null | cut -f1 || echo "0B"
|
||||
else
|
||||
echo "clean.sh is up to date.."
|
||||
echo "0B"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
####################
|
||||
# Safe directory cleanup with size reporting
|
||||
clean_directory() {
|
||||
local dir="$1"
|
||||
local description="$2"
|
||||
|
||||
# Run Docker system prune
|
||||
echo "Running Docker system prune..."
|
||||
docker image prune -af
|
||||
docker system prune -af
|
||||
if [[ ! -d "$dir" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Clean APK cache from Alpine or apt for Debian
|
||||
if [ -x "$(command -v apk)" ]; then
|
||||
echo "Cleaning APK cache..."
|
||||
rm -rf /var/cache/apk/*
|
||||
apk cache clean
|
||||
apk update
|
||||
fi
|
||||
local size_before
|
||||
size_before=$(get_dir_size "$dir")
|
||||
|
||||
if [ -x "$(command -v apt)" ]; then
|
||||
echo "Cleaning apt cache..."
|
||||
apt-get clean
|
||||
apt-get autoclean
|
||||
apt-get update
|
||||
fi
|
||||
if [[ "$size_before" == "0B" ]]; then
|
||||
log_info "$description: already clean"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Clean system caches
|
||||
echo "Cleaning system caches..."
|
||||
rm -rf /var/cache/*
|
||||
rm -rf /tmp/*
|
||||
log_step "$description (was $size_before)..."
|
||||
|
||||
# General system maintenance
|
||||
echo "Performing general system maintenance..."
|
||||
sync; echo 3 > /proc/sys/vm/drop_caches
|
||||
# Use find with -delete for safer cleanup
|
||||
if find "$dir" -mindepth 1 -delete 2>/dev/null; then
|
||||
log_success "$description: freed $size_before"
|
||||
else
|
||||
# Fallback to rm if find fails
|
||||
if rm -rf "$dir"/* 2>/dev/null; then
|
||||
log_success "$description: freed $size_before"
|
||||
else
|
||||
log_warning "$description: partial cleanup completed"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Remove old logs
|
||||
echo "Removing old logs..."
|
||||
find /var/log -type f -name "*.log" -mtime +30 -delete
|
||||
# Get system information for reporting
|
||||
get_system_info() {
|
||||
local info=""
|
||||
|
||||
echo "Maintenance completed."
|
||||
# Memory info
|
||||
if [[ -f /proc/meminfo ]]; then
|
||||
local mem_total mem_available
|
||||
mem_total=$(grep MemTotal /proc/meminfo | awk '{print $2}')
|
||||
mem_available=$(grep MemAvailable /proc/meminfo | awk '{print $2}')
|
||||
if [[ -n "$mem_total" && -n "$mem_available" ]]; then
|
||||
info+="Memory: $((mem_available/1024))MB available of $((mem_total/1024))MB total"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Disk space info
|
||||
if command_exists df; then
|
||||
local disk_info
|
||||
disk_info=$(df -h / 2>/dev/null | tail -1 | awk '{print $4 " available of " $2 " total"}')
|
||||
if [[ -n "$disk_info" ]]; then
|
||||
info+="${info:+, }Disk: $disk_info"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "$info"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# AUTO-UPDATE FUNCTIONALITY
|
||||
#==============================================================================
|
||||
|
||||
# Check server connectivity
|
||||
check_server_connectivity() {
|
||||
local url="$1"
|
||||
curl -s --head "$url" | head -n 1 | grep -E "HTTP/[12] [23].." >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# Get SHA256 hash of a file
|
||||
get_file_hash() {
|
||||
local file="$1"
|
||||
sha256sum "$file" 2>/dev/null | awk '{print $1}' || echo ""
|
||||
}
|
||||
|
||||
# Get SHA256 hash from URL content
|
||||
get_url_hash() {
|
||||
local url="$1"
|
||||
curl -s "$url" 2>/dev/null | sha256sum | awk '{print $1}' || echo ""
|
||||
}
|
||||
|
||||
# Perform self-update if newer version is available
|
||||
perform_self_update() {
|
||||
if [[ "$AUTO_UPDATE_ENABLED" != "true" ]]; then
|
||||
log_info "Auto-update is disabled"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local server_url="$SERVER_BASE_URL/$SCRIPT_NAME"
|
||||
|
||||
log_step "Checking for script updates..."
|
||||
|
||||
# Check if server file is accessible
|
||||
if ! check_server_connectivity "$server_url"; then
|
||||
log_warning "Cannot connect to update server, continuing with current version"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Compare local and server file hashes
|
||||
local local_hash server_hash
|
||||
local_hash=$(get_file_hash "$SCRIPT_NAME")
|
||||
server_hash=$(get_url_hash "$server_url")
|
||||
|
||||
if [[ -z "$local_hash" || -z "$server_hash" ]]; then
|
||||
log_warning "Cannot determine file hashes, skipping update"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "$local_hash" != "$server_hash" ]]; then
|
||||
log_info "Update available, downloading new version..."
|
||||
|
||||
# Create backup of current script
|
||||
local backup_file="${SCRIPT_NAME}.backup.$(date +%s)"
|
||||
cp "$SCRIPT_NAME" "$backup_file" || die "Failed to create backup"
|
||||
|
||||
# Download updated script
|
||||
if curl -s -o "$SCRIPT_NAME" "$server_url"; then
|
||||
chmod +x "$SCRIPT_NAME" || die "Failed to set executable permissions"
|
||||
log_success "Script updated successfully"
|
||||
|
||||
log_step "Running updated script..."
|
||||
exec ./"$SCRIPT_NAME" "$@"
|
||||
else
|
||||
# Restore backup on failure
|
||||
mv "$backup_file" "$SCRIPT_NAME"
|
||||
die "Failed to download updated script"
|
||||
fi
|
||||
else
|
||||
log_success "Script is already up to date"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# DOCKER CLEANUP FUNCTIONS
|
||||
#==============================================================================
|
||||
|
||||
# Clean Docker resources
|
||||
cleanup_docker() {
|
||||
if ! command_exists docker; then
|
||||
log_info "Docker not found, skipping Docker cleanup"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_step "Starting Docker cleanup..."
|
||||
|
||||
# Check if Docker daemon is running
|
||||
if ! docker info >/dev/null 2>&1; then
|
||||
log_warning "Docker daemon not running, skipping Docker cleanup"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Get initial Docker disk usage
|
||||
local docker_usage_before=""
|
||||
if docker system df >/dev/null 2>&1; then
|
||||
docker_usage_before=$(docker system df 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
# Remove unused images
|
||||
log_info "Removing unused Docker images..."
|
||||
if docker image prune -af >/dev/null 2>&1; then
|
||||
log_success "Docker images cleaned"
|
||||
else
|
||||
log_warning "Docker image cleanup failed"
|
||||
fi
|
||||
|
||||
# Remove stopped containers
|
||||
log_info "Removing stopped Docker containers..."
|
||||
if docker container prune -f >/dev/null 2>&1; then
|
||||
log_success "Docker containers cleaned"
|
||||
else
|
||||
log_warning "Docker container cleanup failed"
|
||||
fi
|
||||
|
||||
# Remove unused volumes
|
||||
log_info "Removing unused Docker volumes..."
|
||||
if docker volume prune -f >/dev/null 2>&1; then
|
||||
log_success "Docker volumes cleaned"
|
||||
else
|
||||
log_warning "Docker volume cleanup failed"
|
||||
fi
|
||||
|
||||
# Remove unused networks
|
||||
log_info "Removing unused Docker networks..."
|
||||
if docker network prune -f >/dev/null 2>&1; then
|
||||
log_success "Docker networks cleaned"
|
||||
else
|
||||
log_warning "Docker network cleanup failed"
|
||||
fi
|
||||
|
||||
# Complete system cleanup
|
||||
log_info "Running Docker system cleanup..."
|
||||
if docker system prune -af >/dev/null 2>&1; then
|
||||
log_success "Docker system cleanup completed"
|
||||
else
|
||||
log_warning "Docker system cleanup failed"
|
||||
fi
|
||||
|
||||
# Show space freed if possible
|
||||
if [[ -n "$docker_usage_before" ]] && docker system df >/dev/null 2>&1; then
|
||||
log_info "Docker cleanup completed"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# PACKAGE MANAGER CLEANUP FUNCTIONS
|
||||
#==============================================================================
|
||||
|
||||
# Clean APK cache (Alpine Linux)
|
||||
cleanup_apk() {
|
||||
if ! command_exists apk; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_step "Cleaning APK cache..."
|
||||
|
||||
# Clean APK cache
|
||||
if [[ -d /var/cache/apk ]]; then
|
||||
clean_directory "/var/cache/apk" "APK cache directory"
|
||||
fi
|
||||
|
||||
# Clean APK cache using apk command
|
||||
if apk cache clean >/dev/null 2>&1; then
|
||||
log_success "APK cache cleaned"
|
||||
fi
|
||||
|
||||
# Update package index
|
||||
log_info "Updating APK package index..."
|
||||
if apk update >/dev/null 2>&1; then
|
||||
log_success "APK index updated"
|
||||
else
|
||||
log_warning "APK index update failed"
|
||||
fi
|
||||
}
|
||||
|
||||
# Clean APT cache (Debian/Ubuntu)
|
||||
cleanup_apt() {
|
||||
if ! command_exists apt-get; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_step "Cleaning APT cache..."
|
||||
|
||||
# Clean downloaded packages
|
||||
if apt-get clean >/dev/null 2>&1; then
|
||||
log_success "APT cache cleaned"
|
||||
else
|
||||
log_warning "APT clean failed"
|
||||
fi
|
||||
|
||||
# Remove orphaned packages
|
||||
if apt-get autoclean >/dev/null 2>&1; then
|
||||
log_success "APT autocleaned"
|
||||
else
|
||||
log_warning "APT autoclean failed"
|
||||
fi
|
||||
|
||||
# Remove unnecessary packages
|
||||
if apt-get autoremove -y >/dev/null 2>&1; then
|
||||
log_success "Unnecessary packages removed"
|
||||
else
|
||||
log_warning "APT autoremove failed"
|
||||
fi
|
||||
|
||||
# Update package index
|
||||
log_info "Updating APT package index..."
|
||||
if apt-get update >/dev/null 2>&1; then
|
||||
log_success "APT index updated"
|
||||
else
|
||||
log_warning "APT index update failed"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# SYSTEM CLEANUP FUNCTIONS
|
||||
#==============================================================================
|
||||
|
||||
# Clean system temporary directories
|
||||
cleanup_temp_dirs() {
|
||||
log_step "Cleaning temporary directories..."
|
||||
|
||||
for temp_dir in "${TEMP_DIRS[@]}"; do
|
||||
if [[ -d "$temp_dir" ]]; then
|
||||
# Clean contents but preserve the directory
|
||||
find "$temp_dir" -mindepth 1 -maxdepth 1 -mtime +1 -exec rm -rf {} + 2>/dev/null || true
|
||||
log_success "Cleaned old files in $temp_dir"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Clean system cache directories
|
||||
cleanup_cache_dirs() {
|
||||
log_step "Cleaning cache directories..."
|
||||
|
||||
for cache_dir in "${CACHE_DIRS[@]}"; do
|
||||
if [[ -d "$cache_dir" ]]; then
|
||||
clean_directory "$cache_dir" "Cache directory $cache_dir"
|
||||
fi
|
||||
done
|
||||
|
||||
# Clean additional cache locations
|
||||
local additional_caches=(
|
||||
"/var/lib/apt/lists"
|
||||
"/var/cache/debconf"
|
||||
"/root/.npm"
|
||||
"/root/.pip"
|
||||
"/home/*/.cache"
|
||||
"/home/*/.npm"
|
||||
"/home/*/.pip"
|
||||
)
|
||||
|
||||
for cache_pattern in "${additional_caches[@]}"; do
|
||||
# Use shell expansion for patterns
|
||||
for cache_path in $cache_pattern; do
|
||||
if [[ -d "$cache_path" ]]; then
|
||||
clean_directory "$cache_path" "Additional cache $cache_path"
|
||||
fi
|
||||
done 2>/dev/null || true
|
||||
done
|
||||
}
|
||||
|
||||
# Clean old log files
|
||||
cleanup_logs() {
|
||||
log_step "Cleaning old log files..."
|
||||
|
||||
# Clean logs older than retention period
|
||||
if [[ -d /var/log ]]; then
|
||||
local cleaned_count=0
|
||||
|
||||
# Find and remove old log files
|
||||
while IFS= read -r -d '' logfile; do
|
||||
rm -f "$logfile" 2>/dev/null && ((cleaned_count++))
|
||||
done < <(find /var/log -type f -name "*.log" -mtime +"$LOG_RETENTION_DAYS" -print0 2>/dev/null || true)
|
||||
|
||||
# Clean compressed logs
|
||||
while IFS= read -r -d '' logfile; do
|
||||
rm -f "$logfile" 2>/dev/null && ((cleaned_count++))
|
||||
done < <(find /var/log -type f \( -name "*.log.gz" -o -name "*.log.bz2" -o -name "*.log.xz" \) -mtime +"$LOG_RETENTION_DAYS" -print0 2>/dev/null || true)
|
||||
|
||||
if [[ $cleaned_count -gt 0 ]]; then
|
||||
log_success "Removed $cleaned_count old log files"
|
||||
else
|
||||
log_info "No old log files to remove"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Truncate large active log files
|
||||
local large_logs
|
||||
while IFS= read -r -d '' logfile; do
|
||||
if [[ -f "$logfile" && -w "$logfile" ]]; then
|
||||
truncate -s 0 "$logfile" 2>/dev/null || true
|
||||
fi
|
||||
done < <(find /var/log -type f -name "*.log" -size +100M -print0 2>/dev/null || true)
|
||||
}
|
||||
|
||||
# Clean systemd journal
|
||||
cleanup_journal() {
|
||||
if ! command_exists journalctl; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_step "Cleaning systemd journal..."
|
||||
|
||||
# Clean journal older than retention period
|
||||
if journalctl --vacuum-time="${JOURNAL_RETENTION_DAYS}d" >/dev/null 2>&1; then
|
||||
log_success "Journal cleaned (older than $JOURNAL_RETENTION_DAYS days)"
|
||||
else
|
||||
log_warning "Journal cleanup failed"
|
||||
fi
|
||||
|
||||
# Limit journal size
|
||||
if journalctl --vacuum-size=100M >/dev/null 2>&1; then
|
||||
log_success "Journal size limited to 100MB"
|
||||
fi
|
||||
}
|
||||
|
||||
# Clean thumbnail caches
|
||||
cleanup_thumbnails() {
|
||||
log_step "Cleaning thumbnail caches..."
|
||||
|
||||
local thumbnail_dirs=(
|
||||
"/root/.thumbnails"
|
||||
"/root/.cache/thumbnails"
|
||||
"/home/*/.thumbnails"
|
||||
"/home/*/.cache/thumbnails"
|
||||
)
|
||||
|
||||
for thumb_pattern in "${thumbnail_dirs[@]}"; do
|
||||
for thumb_dir in $thumb_pattern; do
|
||||
if [[ -d "$thumb_dir" ]]; then
|
||||
clean_directory "$thumb_dir" "Thumbnail cache $thumb_dir"
|
||||
fi
|
||||
done 2>/dev/null || true
|
||||
done
|
||||
}
|
||||
|
||||
# Optimize memory caches
|
||||
optimize_memory() {
|
||||
log_step "Optimizing memory caches..."
|
||||
|
||||
# Sync filesystem
|
||||
if sync; then
|
||||
log_info "Filesystem synced"
|
||||
fi
|
||||
|
||||
# Drop caches (page cache, dentries and inodes)
|
||||
if [[ -w /proc/sys/vm/drop_caches ]]; then
|
||||
echo 3 > /proc/sys/vm/drop_caches 2>/dev/null && log_success "Memory caches dropped" || log_warning "Failed to drop memory caches"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# REPORTING FUNCTIONS
|
||||
#==============================================================================
|
||||
|
||||
# Generate cleanup summary
|
||||
generate_summary() {
|
||||
log_step "Generating cleanup summary..."
|
||||
|
||||
local system_info
|
||||
system_info=$(get_system_info)
|
||||
|
||||
if [[ -n "$system_info" ]]; then
|
||||
log_info "System status: $system_info"
|
||||
fi
|
||||
|
||||
# Show disk usage of important directories
|
||||
local important_dirs=("/" "/var" "/tmp" "/var/log" "/var/cache")
|
||||
for dir in "${important_dirs[@]}"; do
|
||||
if [[ -d "$dir" ]]; then
|
||||
local usage
|
||||
usage=$(df -h "$dir" 2>/dev/null | tail -1 | awk '{print $5 " used (" $4 " available)"}' || echo "unknown")
|
||||
log_info "$dir: $usage"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# MAIN EXECUTION
|
||||
#==============================================================================
|
||||
|
||||
main() {
|
||||
log_step "Starting System Cleanup and Maintenance"
|
||||
echo
|
||||
|
||||
# Show initial system status
|
||||
local initial_info
|
||||
initial_info=$(get_system_info)
|
||||
if [[ -n "$initial_info" ]]; then
|
||||
log_info "Initial system status: $initial_info"
|
||||
echo
|
||||
fi
|
||||
|
||||
# Perform self-update if enabled
|
||||
perform_self_update "$@"
|
||||
|
||||
# Docker cleanup
|
||||
cleanup_docker
|
||||
|
||||
# Package manager cleanup
|
||||
cleanup_apk
|
||||
cleanup_apt
|
||||
|
||||
# System cleanup
|
||||
cleanup_temp_dirs
|
||||
cleanup_cache_dirs
|
||||
cleanup_logs
|
||||
cleanup_journal
|
||||
cleanup_thumbnails
|
||||
|
||||
# Memory optimization
|
||||
optimize_memory
|
||||
|
||||
# Generate summary
|
||||
echo
|
||||
generate_summary
|
||||
|
||||
echo
|
||||
log_success "System cleanup and maintenance completed!"
|
||||
}
|
||||
|
||||
# Execute main function with all arguments
|
||||
main "$@"
|
@@ -1,106 +1,338 @@
|
||||
#!/bin/bash
|
||||
|
||||
NC='\033[0m'
|
||||
LIGHT_GREEN='\033[1;32m'
|
||||
LIGHT_BLUE='\033[1;34m'
|
||||
LIGHT_GREEN='\033[1;32m'
|
||||
LIGHT_GREY='\033[0;37m'
|
||||
# Docker Container Updater
|
||||
#
|
||||
# Description: Automatically updates Docker containers and manages Docker images
|
||||
# Features:
|
||||
# - Self-updating capability
|
||||
# - Updates all Docker Compose projects in /root/docker
|
||||
# - Skips containers with .ignore file
|
||||
# - Removes obsolete Docker Compose version attributes
|
||||
# - Cleans up unused Docker images
|
||||
# Author: ivanch
|
||||
# Version: 2.0
|
||||
|
||||
### AUTO-UPDATER ###
|
||||
FILE_NAME="docker-updater.sh"
|
||||
SERVER_FILE="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/$FILE_NAME"
|
||||
SERVER_OK=1
|
||||
set -euo pipefail # Exit on error, undefined vars, and pipe failures
|
||||
|
||||
# Check if the server file exists
|
||||
curl -s --head $SERVER_FILE | head -n 1 | grep -E "HTTP/[12] [23].." > /dev/null
|
||||
#==============================================================================
|
||||
# CONFIGURATION
|
||||
#==============================================================================
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED}[x] Error: $SERVER_FILE not found.${NC}" >&2
|
||||
SERVER_OK=0
|
||||
fi
|
||||
# Color definitions for output formatting
|
||||
readonly NC='\033[0m'
|
||||
readonly RED='\033[1;31m'
|
||||
readonly GREEN='\033[1;32m'
|
||||
readonly LIGHT_GREEN='\033[1;32m'
|
||||
readonly LIGHT_BLUE='\033[1;34m'
|
||||
readonly LIGHT_GREY='\033[0;37m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
|
||||
if [ $SERVER_OK -eq 1 ]; then
|
||||
echo -e "${LIGHT_BLUE}[i] Running auto-update"
|
||||
# Script configuration
|
||||
readonly SCRIPT_NAME="docker-updater.sh"
|
||||
readonly SERVER_BASE_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
|
||||
readonly DOCKER_FOLDER="/root/docker"
|
||||
readonly COMPOSE_FILES=("docker-compose.yml" "docker-compose.yaml" "compose.yaml" "compose.yml")
|
||||
|
||||
# Compare the local and server files sha256sum to check if an update is needed
|
||||
LOCAL_SHA256=$(sha256sum $FILE_NAME | awk '{print $1}')
|
||||
SERVER_SHA256=$(curl -s $SERVER_FILE | sha256sum | awk '{print $1}')
|
||||
# Auto-update configuration
|
||||
readonly AUTO_UPDATE_ENABLED=true
|
||||
|
||||
if [ "$LOCAL_SHA256" != "$SERVER_SHA256" ]; then
|
||||
echo -e "${LIGHT_GREY}[i] Updating $FILE_NAME${NC}"
|
||||
#==============================================================================
|
||||
# UTILITY FUNCTIONS
|
||||
#==============================================================================
|
||||
|
||||
curl -s -o $FILE_NAME $SERVER_FILE
|
||||
chmod +x $FILE_NAME
|
||||
echo -e "${LIGHT_GREEN}[i] $FILE_NAME updated.${NC}"
|
||||
# Print formatted log messages
|
||||
log_info() { echo -e "${LIGHT_GREY}[i] $1${NC}"; }
|
||||
log_success() { echo -e "${LIGHT_GREEN}[✓] $1${NC}"; }
|
||||
log_warning() { echo -e "${YELLOW}[!] $1${NC}"; }
|
||||
log_error() { echo -e "${RED}[x] $1${NC}" >&2; }
|
||||
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
|
||||
log_container() { echo -e "${LIGHT_BLUE}[$1] $2${NC}"; }
|
||||
|
||||
echo -e "${LIGHT_BLUE}[i] Running updated $FILE_NAME...${NC}"
|
||||
./$FILE_NAME
|
||||
exit 0
|
||||
else
|
||||
echo -e "${LIGHT_GREEN}[i] $FILE_NAME is already up to date.${NC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
####################
|
||||
|
||||
# Navigate to docker folder
|
||||
DOCKER_FOLDER=/root/docker
|
||||
|
||||
if [ -d "$DOCKER_FOLDER" ]; then
|
||||
cd $DOCKER_FOLDER
|
||||
else
|
||||
echo -e "${LIGHT_GREY}[i] Docker folder not found.${NC}"
|
||||
# Exit with error message
|
||||
die() {
|
||||
log_error "$1"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Updating Docker containers
|
||||
# Check if a command exists
|
||||
command_exists() {
|
||||
command -v "$1" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
for folder in */; do
|
||||
cd $DOCKER_FOLDER/$folder
|
||||
# Check if Docker and Docker Compose are available
|
||||
check_docker_requirements() {
|
||||
log_info "Checking Docker requirements..."
|
||||
|
||||
# Remove trailing slash from folder name if it exists
|
||||
folder=${folder%/}
|
||||
echo -e "${LIGHT_BLUE}[$folder] Checking for updates..."
|
||||
|
||||
# if .ignore file exists, skip the folder
|
||||
if [ -f ".ignore" ]; then
|
||||
echo -e "${LIGHT_BLUE}[$folder] Skipping docker container update"
|
||||
cd ..
|
||||
continue
|
||||
if ! command_exists docker; then
|
||||
die "Docker is not installed or not in PATH"
|
||||
fi
|
||||
|
||||
# Check compose files for obsolete version attribute
|
||||
for compose_file in "docker-compose.yml" "docker-compose.yaml" "compose.yaml" "compose.yml"; do
|
||||
if [ -f "$compose_file" ]; then
|
||||
echo -e "${LIGHT_BLUE}[$folder] Checking $compose_file for obsolete version attribute"
|
||||
sed -i '/^version:/d' "$compose_file"
|
||||
if ! docker compose version >/dev/null 2>&1; then
|
||||
die "Docker Compose is not available"
|
||||
fi
|
||||
|
||||
log_success "Docker requirements satisfied"
|
||||
}
|
||||
|
||||
# Get SHA256 hash of a file
|
||||
get_file_hash() {
|
||||
local file="$1"
|
||||
sha256sum "$file" 2>/dev/null | awk '{print $1}' || echo ""
|
||||
}
|
||||
|
||||
# Get SHA256 hash from URL content
|
||||
get_url_hash() {
|
||||
local url="$1"
|
||||
curl -s "$url" 2>/dev/null | sha256sum | awk '{print $1}' || echo ""
|
||||
}
|
||||
|
||||
# Check if server file is accessible
|
||||
check_server_connectivity() {
|
||||
local url="$1"
|
||||
curl -s --head "$url" | head -n 1 | grep -E "HTTP/[12] [23].." >/dev/null 2>&1
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# AUTO-UPDATE FUNCTIONALITY
|
||||
#==============================================================================
|
||||
|
||||
# Perform self-update if newer version is available
|
||||
perform_self_update() {
|
||||
if [[ "$AUTO_UPDATE_ENABLED" != "true" ]]; then
|
||||
log_info "Auto-update is disabled"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local server_url="$SERVER_BASE_URL/$SCRIPT_NAME"
|
||||
|
||||
log_step "Checking for script updates..."
|
||||
|
||||
# Check if server file is accessible
|
||||
if ! check_server_connectivity "$server_url"; then
|
||||
log_warning "Cannot connect to update server, continuing with current version"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Compare local and server file hashes
|
||||
local local_hash
|
||||
local server_hash
|
||||
|
||||
local_hash=$(get_file_hash "$SCRIPT_NAME")
|
||||
server_hash=$(get_url_hash "$server_url")
|
||||
|
||||
if [[ -z "$local_hash" || -z "$server_hash" ]]; then
|
||||
log_warning "Cannot determine file hashes, skipping update"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "$local_hash" != "$server_hash" ]]; then
|
||||
log_info "Update available, downloading new version..."
|
||||
|
||||
# Create backup of current script
|
||||
local backup_file="${SCRIPT_NAME}.backup.$(date +%s)"
|
||||
cp "$SCRIPT_NAME" "$backup_file" || die "Failed to create backup"
|
||||
|
||||
# Download updated script
|
||||
if curl -s -o "$SCRIPT_NAME" "$server_url"; then
|
||||
chmod +x "$SCRIPT_NAME" || die "Failed to set executable permissions"
|
||||
log_success "Script updated successfully"
|
||||
|
||||
log_step "Running updated script..."
|
||||
exec ./"$SCRIPT_NAME" "$@"
|
||||
else
|
||||
# Restore backup on failure
|
||||
mv "$backup_file" "$SCRIPT_NAME"
|
||||
die "Failed to download updated script"
|
||||
fi
|
||||
else
|
||||
log_success "Script is already up to date"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# DOCKER COMPOSE MANAGEMENT
|
||||
#==============================================================================
|
||||
|
||||
# Find the active Docker Compose file in current directory
|
||||
find_compose_file() {
|
||||
for compose_file in "${COMPOSE_FILES[@]}"; do
|
||||
if [[ -f "$compose_file" ]]; then
|
||||
echo "$compose_file"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# Remove obsolete version attribute from Docker Compose files
|
||||
clean_compose_files() {
|
||||
local container_name="$1"
|
||||
|
||||
for compose_file in "${COMPOSE_FILES[@]}"; do
|
||||
if [[ -f "$compose_file" ]]; then
|
||||
log_container "$container_name" "Cleaning obsolete version attribute from $compose_file"
|
||||
sed -i '/^version:/d' "$compose_file" || log_warning "Failed to clean $compose_file"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Check if container should be skipped
|
||||
should_skip_container() {
|
||||
[[ -f ".ignore" ]]
|
||||
}
|
||||
|
||||
# Check if any containers are running in current directory
|
||||
has_running_containers() {
|
||||
local running_containers
|
||||
running_containers=$(docker compose ps -q 2>/dev/null || echo "")
|
||||
[[ -n "$running_containers" ]]
|
||||
}
|
||||
|
||||
# Update a single Docker Compose project
|
||||
update_docker_project() {
|
||||
local project_dir="$1"
|
||||
local container_name
|
||||
container_name=$(basename "$project_dir")
|
||||
|
||||
log_container "$container_name" "Checking for updates..."
|
||||
|
||||
# Change to project directory
|
||||
cd "$project_dir" || {
|
||||
log_error "Cannot access directory: $project_dir"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Check if container should be skipped
|
||||
if should_skip_container; then
|
||||
log_container "$container_name" "Skipping (found .ignore file)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Verify compose file exists
|
||||
local compose_file
|
||||
if ! compose_file=$(find_compose_file); then
|
||||
log_container "$container_name" "No Docker Compose file found, skipping"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Clean compose files
|
||||
clean_compose_files "$container_name"
|
||||
|
||||
# Check if containers are running
|
||||
if ! has_running_containers; then
|
||||
log_container "$container_name" "No running containers, skipping update"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Stop containers
|
||||
log_container "$container_name" "Stopping containers..."
|
||||
if ! docker compose down >/dev/null 2>&1; then
|
||||
log_error "Failed to stop containers in $container_name"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Pull updated images
|
||||
log_container "$container_name" "Pulling updated images..."
|
||||
if ! docker compose pull -q >/dev/null 2>&1; then
|
||||
log_warning "Failed to pull images for $container_name, attempting to restart anyway"
|
||||
fi
|
||||
|
||||
# Start containers
|
||||
log_container "$container_name" "Starting containers..."
|
||||
if ! docker compose up -d >/dev/null 2>&1; then
|
||||
log_error "Failed to start containers in $container_name"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_container "$container_name" "Update completed successfully!"
|
||||
return 0
|
||||
}
|
||||
|
||||
# Update all Docker Compose projects
|
||||
update_all_docker_projects() {
|
||||
log_step "Starting Docker container updates..."
|
||||
|
||||
# Check if Docker folder exists
|
||||
if [[ ! -d "$DOCKER_FOLDER" ]]; then
|
||||
die "Docker folder not found: $DOCKER_FOLDER"
|
||||
fi
|
||||
|
||||
# Change to Docker folder
|
||||
cd "$DOCKER_FOLDER" || die "Cannot access Docker folder: $DOCKER_FOLDER"
|
||||
|
||||
local updated_count=0
|
||||
local failed_count=0
|
||||
local skipped_count=0
|
||||
|
||||
# Process each subdirectory
|
||||
for project_dir in */; do
|
||||
if [[ -d "$project_dir" ]]; then
|
||||
local project_path="$DOCKER_FOLDER/$project_dir"
|
||||
|
||||
if update_docker_project "$project_path"; then
|
||||
if should_skip_container; then
|
||||
((skipped_count++))
|
||||
else
|
||||
((updated_count++))
|
||||
fi
|
||||
else
|
||||
((failed_count++))
|
||||
fi
|
||||
|
||||
# Return to Docker folder for next iteration
|
||||
cd "$DOCKER_FOLDER" || die "Cannot return to Docker folder"
|
||||
fi
|
||||
done
|
||||
|
||||
DOCKER_RUNNING=$(docker compose ps -q)
|
||||
|
||||
if [ -n "$DOCKER_RUNNING" ]; then
|
||||
echo -e "${LIGHT_BLUE}[$folder] Stopping Docker containers"
|
||||
docker compose down > /dev/null
|
||||
else
|
||||
echo -e "${LIGHT_BLUE}[$folder] No Docker containers running, will skip update"
|
||||
continue
|
||||
# Report results
|
||||
log_success "Docker update summary:"
|
||||
log_info " Updated: $updated_count projects"
|
||||
log_info " Skipped: $skipped_count projects"
|
||||
if [[ $failed_count -gt 0 ]]; then
|
||||
log_warning " Failed: $failed_count projects"
|
||||
fi
|
||||
}
|
||||
|
||||
echo -e "${LIGHT_BLUE}[$folder] Updating images"
|
||||
docker compose pull -q > /dev/null
|
||||
#==============================================================================
|
||||
# DOCKER CLEANUP
|
||||
#==============================================================================
|
||||
|
||||
echo -e "${LIGHT_BLUE}[$folder] Starting Docker containers"
|
||||
docker compose up -d > /dev/null
|
||||
# Clean up unused Docker resources
|
||||
cleanup_docker_resources() {
|
||||
log_step "Cleaning up unused Docker resources..."
|
||||
|
||||
echo -e "${LIGHT_GREEN}[$folder] Updated!"
|
||||
# Remove unused images
|
||||
log_info "Removing unused Docker images..."
|
||||
if docker image prune -af >/dev/null 2>&1; then
|
||||
log_success "Docker image cleanup completed"
|
||||
else
|
||||
log_warning "Docker image cleanup failed"
|
||||
fi
|
||||
}
|
||||
|
||||
cd $DOCKER_FOLDER
|
||||
done
|
||||
#==============================================================================
|
||||
# MAIN EXECUTION
|
||||
#==============================================================================
|
||||
|
||||
# Run Docker image prune
|
||||
main() {
|
||||
log_step "Starting Docker Container Updater"
|
||||
echo
|
||||
|
||||
echo -e "${LIGHT_BLUE}Running Docker image prune..."
|
||||
docker image prune -af
|
||||
# Check requirements
|
||||
check_docker_requirements
|
||||
|
||||
echo -e "${LIGHT_GREEN} All done!"
|
||||
# Perform self-update if enabled
|
||||
perform_self_update "$@"
|
||||
|
||||
# Update all Docker projects
|
||||
update_all_docker_projects
|
||||
|
||||
# Clean up Docker resources
|
||||
cleanup_docker_resources
|
||||
|
||||
echo
|
||||
log_success "Docker container update process completed!"
|
||||
}
|
||||
|
||||
# Execute main function with all arguments
|
||||
main "$@"
|
@@ -1,193 +1,292 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Usage:
|
||||
## curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/scripts-download.sh | bash
|
||||
# Usage: curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/scripts-download.sh | bash
|
||||
|
||||
# colors
|
||||
RED='\033[1;31m'
|
||||
GREEN='\033[1;32m'
|
||||
NC='\033[0m'
|
||||
LIGHT_BLUE='\033[1;34m'
|
||||
LIGHT_RED='\033[1;31m'
|
||||
LIGHT_GREEN='\033[1;32m'
|
||||
GREY='\033[1;30m'
|
||||
YELLOW='\033[1;33m'
|
||||
set -euo pipefail
|
||||
|
||||
FILES_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
|
||||
#==============================================================================
|
||||
# CONFIGURATION
|
||||
#==============================================================================
|
||||
|
||||
echo -e "\r${LIGHT_BLUE}[i] Running scripts-download.sh"
|
||||
# Color definitions for output formatting
|
||||
readonly RED='\033[1;31m'
|
||||
readonly GREEN='\033[1;32m'
|
||||
readonly NC='\033[0m'
|
||||
readonly LIGHT_BLUE='\033[1;34m'
|
||||
readonly LIGHT_RED='\033[1;31m'
|
||||
readonly LIGHT_GREEN='\033[1;32m'
|
||||
readonly GREY='\033[1;30m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
|
||||
# Detect OS (Debian or Alpine)
|
||||
echo -e "${GREY}[i] Detecting OS..."
|
||||
# Configuration
|
||||
readonly FILES_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
|
||||
readonly REQUIRED_PACKAGES=("zip" "unzip" "sha256sum" "curl" "crontab")
|
||||
readonly AVAILABLE_SCRIPTS=("clean.sh" "backup.sh" "docker-updater.sh")
|
||||
|
||||
DETECTED=""
|
||||
# Format: [script_name]="cron_schedule"
|
||||
declare -A CRONTAB_SCHEDULES=(
|
||||
["clean.sh"]="0 23 * * *" # Daily at 11 PM
|
||||
["backup.sh"]="30 23 * * 1,5" # Monday and Friday at 11:30 PM
|
||||
["docker-updater.sh"]="0 3 */4 * *" # Every 4 days at 3 AM
|
||||
)
|
||||
|
||||
if [ -x "$(command -v apk)" ]; then
|
||||
DETECTED="Alpine"
|
||||
fi
|
||||
#==============================================================================
|
||||
# UTILITY FUNCTIONS
|
||||
#==============================================================================
|
||||
|
||||
if [ -x "$(command -v apt)" ]; then
|
||||
DETECTED="Debian"
|
||||
fi
|
||||
# Print formatted log messages
|
||||
log_info() { echo -e "${GREY}[i] $1${NC}"; }
|
||||
log_success() { echo -e "${GREEN}[✓] $1${NC}"; }
|
||||
log_warning() { echo -e "${YELLOW}[!] $1${NC}"; }
|
||||
log_error() { echo -e "${RED}[x] $1${NC}" >&2; }
|
||||
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
|
||||
|
||||
if [ -z "$DETECTED" ]; then
|
||||
echo -e "${RED}[x] Error: OS not supported.${NC}" >&2
|
||||
# Exit with error message
|
||||
die() {
|
||||
log_error "$1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}[✓] Detected '$DETECTED' Linux.${NC}"
|
||||
|
||||
|
||||
echo -e "${GREY}[i] Checking if required packages are installed..."
|
||||
|
||||
PACKAGES=("zip" "unzip" "sha256sum" "curl" "crontab")
|
||||
NOT_INSLALLED=()
|
||||
detect_packages() {
|
||||
for PACKAGE in "${PACKAGES[@]}"; do
|
||||
if ! [ -x "$(command -v $PACKAGE)" ]; then
|
||||
echo -e "${YELLOW}[!] Error: $PACKAGE is not installed, will attempt to install later.${NC}" >&2
|
||||
NOT_INSLALLED+=($PACKAGE)
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
detect_packages
|
||||
# Check if a command exists
|
||||
command_exists() {
|
||||
command -v "$1" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
if [ ${#NOT_INSLALLED[@]} -ne 0 ]; then
|
||||
if [ "$DETECTED" == "Alpine" ]; then
|
||||
echo -e "${GREY}[i] Installing required packages using APK...${NC}"
|
||||
echo -e "${GREY}[i] Updating APK...${NC}"
|
||||
apk update >/dev/null
|
||||
echo -e "${GREY}[i] Installing packages...${NC}"
|
||||
apk add --no-cache ${NOT_INSLALLED[@]} >/dev/null
|
||||
# Check if a process is running
|
||||
process_running() {
|
||||
pgrep "$1" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED}[x] Error: Failed to install required packages.${NC}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo -e "${GREEN}[✓] All required packages should be installed.${NC}"
|
||||
#==============================================================================
|
||||
# MAIN FUNCTIONS
|
||||
#==============================================================================
|
||||
|
||||
# Detect the operating system
|
||||
detect_operating_system() {
|
||||
log_info "Detecting operating system..."
|
||||
|
||||
if command_exists apk; then
|
||||
echo "Alpine"
|
||||
elif command_exists apt; then
|
||||
echo "Debian"
|
||||
else
|
||||
die "Unsupported operating system. This script supports Alpine and Debian-based systems only."
|
||||
fi
|
||||
}
|
||||
|
||||
# Check for missing packages
|
||||
get_missing_packages() {
|
||||
local missing=()
|
||||
|
||||
for package in "${REQUIRED_PACKAGES[@]}"; do
|
||||
if ! command_exists "$package"; then
|
||||
missing+=("$package")
|
||||
fi
|
||||
elif [ "$DETECTED" == "Debian" ]; then
|
||||
echo -e "${GREY}[i] Installing required packages using APT...${NC}"
|
||||
echo -e "${GREY}[i] Updating APT...${NC}"
|
||||
apt-get update -y >/dev/null
|
||||
echo -e "${GREY}[i] Installing packages...${NC}"
|
||||
apt-get install -y ${NOT_INSLALLED[@]} >/dev/null
|
||||
done
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED}[x] Error: Failed to install required packages.${NC}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo -e "${GREEN}[✓] All required packages should be installed.${NC}"
|
||||
printf '%s\n' "${missing[@]}"
|
||||
}
|
||||
|
||||
# Install packages based on the detected OS
|
||||
install_packages() {
|
||||
local os="$1"
|
||||
shift
|
||||
local packages=("$@")
|
||||
|
||||
if [[ ${#packages[@]} -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_info "Installing required packages: ${packages[*]}"
|
||||
|
||||
case "$os" in
|
||||
"Alpine")
|
||||
log_info "Updating APK package index..."
|
||||
apk update >/dev/null || die "Failed to update APK package index"
|
||||
|
||||
log_info "Installing packages via APK..."
|
||||
apk add --no-cache "${packages[@]}" >/dev/null || die "Failed to install packages via APK"
|
||||
;;
|
||||
"Debian")
|
||||
log_info "Updating APT package index..."
|
||||
apt-get update -y >/dev/null || die "Failed to update APT package index"
|
||||
|
||||
log_info "Installing packages via APT..."
|
||||
apt-get install -y "${packages[@]}" >/dev/null || die "Failed to install packages via APT"
|
||||
;;
|
||||
*)
|
||||
die "Unknown operating system: $os"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Verify all required packages are available
|
||||
verify_packages() {
|
||||
log_info "Verifying package installation..."
|
||||
|
||||
local missing_packages
|
||||
readarray -t missing_packages < <(get_missing_packages)
|
||||
|
||||
if [[ ${#missing_packages[@]} -gt 0 ]]; then
|
||||
die "Failed to install required packages: ${missing_packages[*]}"
|
||||
fi
|
||||
|
||||
log_success "All required packages are available"
|
||||
}
|
||||
|
||||
# Check if crontab service is running
|
||||
check_crontab_service() {
|
||||
log_info "Checking crontab service status..."
|
||||
|
||||
if ! process_running "cron"; then
|
||||
die "Crontab service is not running. Please start the cron service first."
|
||||
fi
|
||||
|
||||
log_success "Crontab service is running"
|
||||
}
|
||||
|
||||
# Prompt user to select scripts for installation
|
||||
select_scripts() {
|
||||
local selected=()
|
||||
|
||||
log_info "Available scripts for download and installation:"
|
||||
echo
|
||||
|
||||
for script in "${AVAILABLE_SCRIPTS[@]}"; do
|
||||
local schedule="${CRONTAB_SCHEDULES[$script]:-"0 0 * * *"}"
|
||||
echo -e " ${LIGHT_BLUE}$script${NC} - Schedule: ${GREY}$schedule${NC}"
|
||||
done
|
||||
|
||||
echo
|
||||
log_info "Select scripts to download and install:"
|
||||
|
||||
for script in "${AVAILABLE_SCRIPTS[@]}"; do
|
||||
read -p "Install $script? [Y/n]: " choice </dev/tty
|
||||
if [[ "$choice" =~ ^[Yy]?$ ]]; then
|
||||
selected+=("$script")
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#selected[@]} -eq 0 ]]; then
|
||||
die "No scripts selected. Exiting..."
|
||||
fi
|
||||
|
||||
NOT_INSLALLED=()
|
||||
detect_packages
|
||||
printf '%s\n' "${selected[@]}"
|
||||
}
|
||||
|
||||
if [ ${#NOT_INSLALLED[@]} -ne 0 ]; then
|
||||
echo -e "${RED}[x] Error: Failed to run some of the required packages.${NC}" >&2
|
||||
echo -e "${RED}[x] [${NOT_INSLALLED[@]}] are not installed.${NC}" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
# Verify server connectivity for selected scripts
|
||||
verify_server_connectivity() {
|
||||
local scripts=("$@")
|
||||
|
||||
echo -e "${GREEN}[✓] All required packages are installed.${NC}"
|
||||
log_info "Verifying server connectivity..."
|
||||
|
||||
echo -e "${GREY}[i] Checking if crontab is running..."
|
||||
for script in "${scripts[@]}"; do
|
||||
local url="$FILES_URL/$script"
|
||||
if ! curl -s --head "$url" | head -n 1 | grep -E "HTTP/[12] [23].." >/dev/null; then
|
||||
die "Script '$script' not found on server: $url"
|
||||
fi
|
||||
done
|
||||
|
||||
# Check if crontab is running on the system using pgrep (crond or cron)
|
||||
if ! pgrep "cron" > /dev/null; then
|
||||
echo -e "${RED}[x] Error: Crontab is not running.${NC}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}[✓] Crontab is running.${NC}"
|
||||
|
||||
# Variables
|
||||
FILES=("clean.sh" "backup.sh" "docker-updater.sh")
|
||||
|
||||
# Prompt user to select files to download
|
||||
selected_files=()
|
||||
echo -e "${GREY}[i] Select files to download and install on crontab:${NC} "
|
||||
for FILE in "${FILES[@]}"; do
|
||||
read -p "Do you want to download and install $FILE? [Y/n]: " choice </dev/tty
|
||||
if [[ "$choice" == "y" || "$choice" == "Y" || -z "$choice" ]]; then
|
||||
selected_files+=("$FILE")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#selected_files[@]} -eq 0 ]; then
|
||||
echo -e "${RED}[x] No files selected. Exiting...${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check connection with the server for selected files
|
||||
echo -e "${GREY}[i] Checking connection with the server..."
|
||||
for FILE in "${selected_files[@]}"; do
|
||||
curl -s --head "$FILES_URL/$FILE" | head -n 1 | grep -E "HTTP/[12] [23].." > /dev/null
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED}[x] Error: $FILE not found on the server.${NC}" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo -e "${GREEN}[✓] Connection with the server established.${NC}"
|
||||
|
||||
echo -e "${GREY}[i] Downloading scripts..."
|
||||
log_success "Server connectivity verified"
|
||||
}
|
||||
|
||||
# Download selected scripts
|
||||
for FILE in "${selected_files[@]}"; do
|
||||
curl -s -o "./$FILE" "$FILES_URL/$FILE"
|
||||
done
|
||||
download_scripts() {
|
||||
local scripts=("$@")
|
||||
|
||||
echo -e "${GREEN}[✓] Scripts downloaded.${NC}"
|
||||
log_info "Downloading ${#scripts[@]} script(s)..."
|
||||
|
||||
CURRENT_WORKDIR=$(pwd)
|
||||
for script in "${scripts[@]}"; do
|
||||
local url="$FILES_URL/$script"
|
||||
log_step "Downloading $script..."
|
||||
|
||||
# Setup permissions
|
||||
echo -e "${GREY}[i] Setting up permissions..."
|
||||
if ! curl -s -o "./$script" "$url"; then
|
||||
die "Failed to download $script from $url"
|
||||
fi
|
||||
|
||||
# Setup permissions for selected files
|
||||
for FILE in "${selected_files[@]}"; do
|
||||
chmod +x "./$FILE"
|
||||
done
|
||||
# Set executable permissions
|
||||
chmod +x "./$script" || die "Failed to set executable permissions for $script"
|
||||
done
|
||||
|
||||
echo -e "${GREEN}[✓] Permissions set up.${NC}"
|
||||
log_success "All scripts downloaded and configured"
|
||||
}
|
||||
|
||||
# Setup crontab for selected files
|
||||
echo -e "${GREY}[i] Setting up crontab..."
|
||||
# Setup crontab entries for selected scripts
|
||||
setup_crontab() {
|
||||
local scripts=("$@")
|
||||
local current_workdir
|
||||
current_workdir=$(pwd)
|
||||
|
||||
# Add crontabs
|
||||
for FILE in "${selected_files[@]}"; do
|
||||
if crontab -l 2>/dev/null | grep -q $FILE; then
|
||||
echo -e "${LIGHT_BLUE}[i] [$FILE] Crontab already exists. Removing...${NC}"
|
||||
crontab -l | grep -v $FILE | crontab -
|
||||
fi
|
||||
echo -e "${LIGHT_BLUE}[i] [$FILE] Adding crontab...${NC}"
|
||||
log_info "Setting up crontab entries..."
|
||||
|
||||
if [ "$FILE" == "clean.sh" ]; then
|
||||
(crontab -l 2>/dev/null; echo "0 23 * * * ${CURRENT_WORKDIR}/$FILE > /tmp/clean.log") | crontab -
|
||||
elif [ "$FILE" == "backup.sh" ]; then
|
||||
(crontab -l 2>/dev/null; echo "30 23 * * 1,5 ${CURRENT_WORKDIR}/$FILE > /tmp/backup.log") | crontab -
|
||||
elif [ "$FILE" == "docker-updater.sh" ]; then
|
||||
(crontab -l 2>/dev/null; echo "0 3 */4 * * ${CURRENT_WORKDIR}/$FILE > /tmp/docker-updater.log") | crontab -
|
||||
else
|
||||
echo -e "${YELLOW}[w] [$FILE] Warning: Crontab specific schedule not setup.${NC}" >&2
|
||||
(crontab -l 2>/dev/null; echo "0 0 * * * ${CURRENT_WORKDIR}/$FILE" > /tmp/$FILE.log) | crontab -
|
||||
for script in "${scripts[@]}"; do
|
||||
local schedule="${CRONTAB_SCHEDULES[$script]:-"0 0 * * *"}"
|
||||
local log_file="/tmp/${script%.*}.log"
|
||||
local cron_entry="$schedule $current_workdir/$script > $log_file 2>&1"
|
||||
|
||||
log_step "Configuring crontab for $script (Schedule: $schedule)..."
|
||||
|
||||
# Remove existing crontab entry for this script
|
||||
if crontab -l 2>/dev/null | grep -q "$script"; then
|
||||
log_step "Removing existing crontab entry for $script..."
|
||||
crontab -l 2>/dev/null | grep -v "$script" | crontab - || die "Failed to remove existing crontab entry"
|
||||
fi
|
||||
|
||||
# Add new crontab entry
|
||||
(crontab -l 2>/dev/null; echo "$cron_entry") | crontab - || die "Failed to add crontab entry for $script"
|
||||
|
||||
# Verify the entry was added
|
||||
if ! crontab -l 2>/dev/null | grep -q "$script"; then
|
||||
die "Failed to verify crontab entry for $script"
|
||||
fi
|
||||
|
||||
log_success "Crontab configured for $script"
|
||||
done
|
||||
|
||||
log_success "All crontab entries configured successfully"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# MAIN EXECUTION
|
||||
#==============================================================================
|
||||
|
||||
main() {
|
||||
log_step "Starting Server Scripts Downloader"
|
||||
echo
|
||||
|
||||
# System detection and validation
|
||||
local detected_os
|
||||
detected_os=$(detect_operating_system)
|
||||
log_success "Detected $detected_os Linux"
|
||||
|
||||
# Package management
|
||||
local missing_packages
|
||||
readarray -t missing_packages < <(get_missing_packages)
|
||||
|
||||
if [[ ${#missing_packages[@]} -gt 0 ]]; then
|
||||
log_warning "Missing packages detected: ${missing_packages[*]}"
|
||||
install_packages "$detected_os" "${missing_packages[@]}"
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}[✓] [$FILE] Crontab added, double-checking set up...${NC}"
|
||||
verify_packages
|
||||
check_crontab_service
|
||||
|
||||
if ! crontab -l | grep -q $FILE; then
|
||||
echo -e "${RED}[x] [$FILE] Error: Crontab was not set up.${NC}" >&2
|
||||
exit 1
|
||||
fi
|
||||
# Script selection and installation
|
||||
local selected_scripts
|
||||
readarray -t selected_scripts < <(select_scripts)
|
||||
|
||||
echo -e "${GREEN}[✓] [$FILE] Crontab confirmed.${NC}"
|
||||
done
|
||||
log_info "Selected scripts: ${selected_scripts[*]}"
|
||||
|
||||
echo -e "${GREEN}[✓] Crontabs all set up.${NC}"
|
||||
verify_server_connectivity "${selected_scripts[@]}"
|
||||
download_scripts "${selected_scripts[@]}"
|
||||
setup_crontab "${selected_scripts[@]}"
|
||||
|
||||
echo -e "${GREEN}[✓] All done.${NC}"
|
||||
echo
|
||||
log_success "Installation completed successfully!"
|
||||
log_info "Scripts have been downloaded to: $(pwd)"
|
||||
log_info "Crontab entries have been configured. Use 'crontab -l' to view them."
|
||||
log_info "Log files will be created in /tmp/ directory."
|
||||
}
|
||||
|
||||
# Execute main function
|
||||
main "$@"
|
Reference in New Issue
Block a user