Compare commits

..

49 Commits

Author SHA1 Message Date
96548f4773 small adjustments
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 36s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Successful in 10m45s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Successful in 19s
2025-09-07 11:19:56 -03:00
c34ee5185d fixing docker update
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 2s
2025-08-24 21:07:13 -03:00
1489062943 removing auto update
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 36s
2025-08-24 20:59:49 -03:00
eb8ca78f4f tweaks to the scripts
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 41s
2025-08-24 20:55:14 -03:00
45567b2242 updating templates
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 2s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Successful in 10m0s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Successful in 11s
2025-08-23 22:01:01 -03:00
b0324ac9d8 forgot copy
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 3s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Successful in 10m0s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Failing after 17s
2025-08-23 21:36:32 -03:00
06cf78a4a6 adding error & deploy notification
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 3s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Successful in 10m1s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Failing after 11s
2025-08-23 20:44:32 -03:00
cd57837696 templates + arm64 build (slow)
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 36s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Successful in 10m5s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Successful in 17s
2025-08-23 20:27:49 -03:00
cedc435df0 maybe this gon work
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 6s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Successful in 32s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Successful in 22s
2025-08-22 22:35:50 -03:00
aa7a9b8548 back to only amd64
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 7s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Failing after 20s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Has been skipped
2025-08-22 22:33:09 -03:00
11e2a28bd4 removing crossbuild on docker
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 7s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Failing after 11m14s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Has been skipped
2025-08-22 22:00:11 -03:00
8b52bd2c45 yet anoter retry
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 7s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Has been cancelled
Haven Notify Build and Deploy / Build Haven Notify Image (push) Has been cancelled
2025-08-22 21:50:20 -03:00
442ff12039 new try
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 6s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Has been cancelled
Haven Notify Build and Deploy / Build Haven Notify Image (push) Has been cancelled
2025-08-22 21:41:31 -03:00
12920c10d4 anoter retry
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 6s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Has been cancelled
Haven Notify Build and Deploy / Build Haven Notify Image (push) Has been cancelled
2025-08-22 21:32:27 -03:00
99e110afb3 removing arch from go build
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 1m7s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Failing after 13m14s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Successful in 24s
2025-08-22 21:08:28 -03:00
8108ca7e7b add setup for buildx
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 8s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Successful in 24s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Has been cancelled
2025-08-21 22:06:25 -03:00
100262513b Improvements for haven-notify
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 1m19s
Haven Notify Build and Deploy / Build Haven Notify Image (push) Failing after 1m51s
Haven Notify Build and Deploy / Deploy Haven Notify (internal) (push) Successful in 33s
2025-08-21 21:11:22 -03:00
7520d70ce9 things 2025-08-17 12:40:37 -03:00
7016ced89e improving backup.sh
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 2s
2025-08-16 22:58:36 -03:00
928605a696 fixing URL
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 4s
2025-08-16 22:53:15 -03:00
b75240c693 trying to fix arch :c
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 4s
Haven Notify Build and Deploy / Build Haven Notify Image (amd64) (push) Successful in 16s
Haven Notify Build and Deploy / Deploy Haven Notify (push) Successful in 11s
2025-08-16 22:51:35 -03:00
9ab518e149 improving dockerfile
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 2s
Haven Notify Build and Deploy / Build Haven Notify Image (amd64) (push) Successful in 24s
Haven Notify Build and Deploy / Deploy Haven Notify (push) Successful in 10s
2025-08-16 22:48:14 -03:00
89f0afe334 improving logging
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 4s
Haven Notify Build and Deploy / Build Haven Notify Image (amd64) (push) Successful in 20s
Haven Notify Build and Deploy / Deploy Haven Notify (push) Successful in 10s
2025-08-16 22:46:02 -03:00
018a4a5d60 updating haven notify
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 3s
Haven Notify Build and Deploy / Build Haven Notify Image (amd64) (push) Successful in 15s
Haven Notify Build and Deploy / Deploy Haven Notify (push) Successful in 11s
2025-08-16 22:41:41 -03:00
cbd813a76e whoops
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 2s
Haven Notify Build and Deploy / Build Haven Notify Image (amd64) (push) Successful in 18s
Haven Notify Build and Deploy / Deploy Haven Notify (push) Successful in 11s
2025-08-16 22:38:07 -03:00
5f77376a46 updating backup.sh
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 2s
2025-08-16 22:35:10 -03:00
a32bcb34a3 haven notify!
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 2s
Haven Notify Build and Deploy / Build Haven Notify Image (amd64) (push) Successful in 24s
Haven Notify Build and Deploy / Deploy Haven Notify (push) Successful in 10s
2025-08-16 22:31:54 -03:00
20ef0eb4b5 haven notify only for amd64 2025-08-16 22:31:41 -03:00
6879e4d2bf pipe
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 4s
Haven Notify Build and Deploy / Build Haven Notify Image (amd64) (push) Successful in 15s
Haven Notify Build and Deploy / Deploy Haven Notify (push) Successful in 10s
2025-08-16 22:30:04 -03:00
a7fcd1b4fe pipe
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 4s
Haven Notify Build and Deploy / Build Haven Notify Image (amd64) (push) Successful in 9s
Haven Notify Build and Deploy / Build Haven Notify Image (arm64) (push) Successful in 2m23s
Haven Notify Build and Deploy / Deploy Haven Notify (push) Failing after 49s
2025-08-16 22:24:25 -03:00
ee03aff009 pipe
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 4s
2025-08-16 22:16:55 -03:00
65efcc4709 pipe
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 1s
2025-08-16 22:08:56 -03:00
4b323f5833 pipe
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 4s
2025-08-16 22:07:31 -03:00
7c01b5c8af fixing pipe lol
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 52s
2025-08-16 22:03:52 -03:00
b5bbe2628e fixing pipe
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 1s
2025-08-16 22:00:25 -03:00
72ec3e2477 haven notify :)
Some checks failed
Check scripts syntax / check-scripts-syntax (push) Successful in 37s
Haven Notify Build and Deploy / build_haven_notify (push) Failing after 3m30s
Haven Notify Build and Deploy / deploy_haven_notify (push) Has been skipped
2025-08-16 21:53:37 -03:00
df11d9dcf8 🤓
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 3s
2025-07-31 21:15:51 -03:00
76aaf0180c lol
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 3s
2025-07-31 21:10:44 -03:00
31e94b1f2e fixing good
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 3s
2025-07-31 21:09:25 -03:00
d8f0e0e8ee fix?
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 30s
2025-07-31 21:06:12 -03:00
bb0f653ac5 scripts v2.0
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 35s
2025-07-31 21:02:06 -03:00
68511a6915 fixing crontab
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 35s
2025-07-31 20:48:10 -03:00
b54fcffb66 fixing too many arguments
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 22s
2025-07-26 22:15:42 -03:00
f851a95bdf some tweaks lol
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 22s
2025-07-26 22:13:44 -03:00
cc416e5cd9 more updates lol
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 22s
2025-07-26 22:05:36 -03:00
78b706223d reorder again
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 22s
2025-07-26 21:56:37 -03:00
8c0235ebf2 updating script
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 22s
2025-07-26 21:55:47 -03:00
1d6dae86a7 creating more backup scripts!
All checks were successful
Check scripts syntax / check-scripts-syntax (push) Successful in 1m23s
2025-07-26 21:49:29 -03:00
3efec2a3b3 updating crontab for backup to match with the rest 2025-07-26 21:49:20 -03:00
16 changed files with 2024 additions and 336 deletions

View File

@@ -0,0 +1,92 @@
name: Haven Notify Build and Deploy
on:
push:
branches:
- main
paths:
- 'haven-notify/**'
- '.gitea/workflows/**'
workflow_dispatch: {}
env:
REGISTRY_HOST: git.ivanch.me
REGISTRY_USERNAME: ivanch
IMAGE_NOTIFY: ${{ env.REGISTRY_HOST }}/ivanch/haven-notify
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
jobs:
build_haven_notify:
name: Build Haven Notify Image
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Log in to Container Registry
run: |
echo "${{ secrets.REGISTRY_PASSWORD }}" \
| docker login "${{ env.REGISTRY_HOST }}" \
-u "${{ env.REGISTRY_USERNAME }}" \
--password-stdin
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and Push Multi-Arch Image
uses: docker/build-push-action@v6
with:
push: true
context: haven-notify
platforms: linux/amd64,linux/arm64
tags: |
${{ env.IMAGE_NOTIFY }}:latest
deploy_haven_notify:
name: Deploy Haven Notify (internal)
runs-on: ubuntu-amd64
needs: build_haven_notify
steps:
- name: Check KUBE_CONFIG validity
run: |
if [ -z "${KUBE_CONFIG}" ] || [ "${KUBE_CONFIG}" = "" ] || [ "${KUBE_CONFIG// }" = "" ]; then
echo "KUBE_CONFIG is not set or is empty."
exit 1
fi
- name: Check out repository
uses: actions/checkout@v2
- name: Download and install dependencies
run: |
apt-get update -y
apt-get install -y curl
curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
install -m 0755 kubectl /usr/local/bin/kubectl
kubectl version --client
- name: Set up kubeconfig
run: |
cd haven-notify/deploy
echo "$KUBE_CONFIG" > kubeconfig.yaml
env:
KUBE_CONFIG: ${{ env.KUBE_CONFIG }}
- name: Check connection to cluster
run: |
cd haven-notify/deploy
kubectl --kubeconfig=kubeconfig.yaml cluster-info
- name: Apply haven-notify deployment
run: |
cd haven-notify/deploy
kubectl --kubeconfig=kubeconfig.yaml apply -f haven-notify.yaml
- name: Rollout restart haven-notify
run: |
cd haven-notify/deploy
kubectl --kubeconfig=kubeconfig.yaml rollout restart deployment/haven-notify

View File

@@ -17,6 +17,10 @@ When you run the script, you will be prompted to select which scripts you want t
This script is used to backup a directory to a remote server using `rsync`. It is intended to be run as a cron job. This script is used to backup a directory to a remote server using `rsync`. It is intended to be run as a cron job.
#### `nas-gdrive-backup.sh`
This script is used to both install itself and to run a periodic backup to Google Drive for files that had changed.
### `clean.sh` ### `clean.sh`
This script is used to clean some of the files, docker dangling images, and docker stopped/unused containers. This script is used to clean some of the files, docker dangling images, and docker stopped/unused containers.

View File

@@ -1,40 +1,23 @@
#!/bin/bash #!/bin/bash
### AUTO-UPDATER ### # Function to send notification
# Variables HOSTNAME=$(cat /etc/hostname)
SERVER_FILE="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/backup.sh" NOTIFY_URL_ERROR="http://notify.haven/template/notify/error"
SERVER_OK=1 NOTIFY_URL_BACKUP="http://notify.haven/template/notify/backup"
send_error_notification() {
# Check if the server file exists local message="$1"
curl -s --head $SERVER_FILE | head -n 1 | grep -E "HTTP/[12] [23].." > /dev/null local critical="$2"
curl -s -X POST "$NOTIFY_URL_ERROR" \
if [ $? -ne 0 ]; then -H "Content-Type: application/json" \
echo "Error: $SERVER_FILE not found." >&2 -d "{\"caller\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\", \"critical\": $critical}"
SERVER_OK=0 }
fi send_backup_notification() {
local message="$1"
if [ $SERVER_OK -eq 1 ]; then local backup_size="$2"
echo "Running auto-update..." curl -s -X POST "$NOTIFY_URL_BACKUP" \
-H "Content-Type: application/json" \
# Compare the local and server files sha256sum to check if an update is needed -d "{\"title\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\", \"backupSizeInMB\": $backup_size}"
LOCAL_SHA256=$(sha256sum backup.sh | awk '{print $1}') }
SERVER_SHA256=$(curl -s $SERVER_FILE | sha256sum | awk '{print $1}')
if [ "$LOCAL_SHA256" != "$SERVER_SHA256" ]; then
echo "Updating backup.sh..."
curl -s -o backup.sh $SERVER_FILE
echo "backup.sh updated."
chmod +x backup.sh
echo "Permissions set up."
echo "Running updated backup.sh..."
./backup.sh
exit 0
else
echo "backup.sh is up to date.."
fi
fi
#################### ####################
@@ -46,21 +29,34 @@ REMOTE_HOST="nas.haven"
REMOTE_DIR="/export/Backup/Docker/$(cat /etc/hostname)" REMOTE_DIR="/export/Backup/Docker/$(cat /etc/hostname)"
# Create a compressed backup file # Create a compressed backup file
zip -r $BACKUP_FILE $SOURCE_DIR zip -q -r $BACKUP_FILE $SOURCE_DIR || true
if [ $? -ne 0 ]; then
send_error_notification "⚠️ Some files or folders in $SOURCE_DIR could not be backed up (possibly in use or locked). Backup archive created with available files." false
fi
# Check if remote path exists # Check if remote path exists
ssh $REMOTE_USER@$REMOTE_HOST "mkdir -p $REMOTE_DIR" if ! ssh $REMOTE_USER@$REMOTE_HOST "mkdir -p $REMOTE_DIR"; then
send_error_notification "❌ Failed to create remote directory: $REMOTE_DIR on $REMOTE_HOST" true
exit 1
fi
# Transfer the backup file to the remote server # Transfer the backup file to the remote server
scp $BACKUP_FILE $REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR if ! scp $BACKUP_FILE $REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR; then
send_error_notification "❌ Failed to transfer backup file to remote server: $REMOTE_HOST:$REMOTE_DIR" true
exit 1
fi
# Remove the backup file # Remove the backup file
BACKUP_SIZE=$(du -m $BACKUP_FILE | cut -f1)
rm $BACKUP_FILE rm $BACKUP_FILE
# Erase last 7 days backups from remote server # Erase last 7 days backups from remote server
ssh $REMOTE_USER@$REMOTE_HOST "find $REMOTE_DIR -type f -name 'docker_backup_*' -mtime +7 -exec rm {} \;" if ! ssh $REMOTE_USER@$REMOTE_HOST "find $REMOTE_DIR -type f -name 'docker_backup_*' -mtime +7 -exec rm {} \;"; then
send_error_notification "⚠️ Failed to clean old backups on remote server: $REMOTE_HOST:$REMOTE_DIR" false
fi
# Exit # Success notification
send_backup_notification "✅ Backup completed successfully for: $SOURCE_DIR to $REMOTE_HOST:$REMOTE_DIR" $BACKUP_SIZE
echo "Backup completed successfully" echo "Backup completed successfully"
exit 0 exit 0

513
clean.sh
View File

@@ -1,74 +1,477 @@
#!/bin/bash #!/bin/bash
### AUTO-UPDATER ### # System Cleanup and Maintenance Script
# Variables #
SERVER_FILE="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/clean.sh" # Description: Comprehensive system cleanup for Docker containers and Linux systems
SERVER_OK=1 # Features:
# - Docker resource cleanup (images, containers, volumes, networks)
# - Package manager cache cleanup (APK/APT)
# - System cache and temporary file cleanup
# - Log rotation and cleanup
# - Memory cache optimization
# - Journal cleanup (systemd)
# - Thumbnail and user cache cleanup
# Author: ivanch
# Version: 2.0
# Check if the server file exists set -euo pipefail # Exit on error, undefined vars, and pipe failures
curl -s --head $SERVER_FILE | head -n 1 | grep -E "HTTP/[12] [23].." > /dev/null
if [ $? -ne 0 ]; then #==============================================================================
echo "Error: $SERVER_FILE not found." >&2 # CONFIGURATION
SERVER_OK=0 #==============================================================================
fi
if [ $SERVER_OK -eq 1 ]; then # Color definitions for output formatting
echo "Running auto-update..." readonly NC='\033[0m'
readonly RED='\033[1;31m'
readonly GREEN='\033[1;32m'
readonly LIGHT_GREEN='\033[1;32m'
readonly LIGHT_BLUE='\033[1;34m'
readonly LIGHT_GREY='\033[0;37m'
readonly YELLOW='\033[1;33m'
# Compare the local and server files sha256sum to check if an update is needed # Cleanup configuration
LOCAL_SHA256=$(sha256sum clean.sh | awk '{print $1}') readonly LOG_RETENTION_DAYS=30
SERVER_SHA256=$(curl -s $SERVER_FILE | sha256sum | awk '{print $1}') readonly JOURNAL_RETENTION_DAYS=7
readonly TEMP_DIRS=("/tmp" "/var/tmp")
readonly CACHE_DIRS=("/var/cache" "/root/.cache")
if [ "$LOCAL_SHA256" != "$SERVER_SHA256" ]; then # Auto-update configuration
echo "Updating clean.sh..." readonly AUTO_UPDATE_ENABLED=true
curl -s -o clean.sh $SERVER_FILE
echo "clean.sh updated."
chmod +x clean.sh #==============================================================================
echo "Permissions set up." # UTILITY FUNCTIONS
#==============================================================================
echo "Running updated clean.sh..." # Print formatted log messages
./clean.sh log_info() { echo -e "${LIGHT_GREY}[i] $1${NC}"; }
exit 0 log_success() { echo -e "${LIGHT_GREEN}[✓] $1${NC}"; }
log_warning() { echo -e "${YELLOW}[!] $1${NC}"; }
log_error() { echo -e "${RED}[x] $1${NC}" >&2; }
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
# Exit with error message
die() {
log_error "$1"
exit 1
}
# Check if a command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# Get directory size in human readable format
get_dir_size() {
local dir="$1"
if [[ -d "$dir" ]]; then
du -sh "$dir" 2>/dev/null | cut -f1 || echo "0B"
else else
echo "clean.sh is up to date.." echo "0B"
fi fi
fi }
#################### # Safe directory cleanup with size reporting
clean_directory() {
local dir="$1"
local description="$2"
# Run Docker system prune if [[ ! -d "$dir" ]]; then
echo "Running Docker system prune..." return 0
docker image prune -af fi
docker system prune -af
# Clean APK cache from Alpine or apt for Debian local size_before
if [ -x "$(command -v apk)" ]; then size_before=$(get_dir_size "$dir")
echo "Cleaning APK cache..."
rm -rf /var/cache/apk/*
apk cache clean
apk update
fi
if [ -x "$(command -v apt)" ]; then if [[ "$size_before" == "0B" ]]; then
echo "Cleaning apt cache..." log_info "$description: already clean"
apt-get clean return 0
apt-get autoclean fi
apt-get update
fi
# Clean system caches log_step "$description (was $size_before)..."
echo "Cleaning system caches..."
rm -rf /var/cache/*
rm -rf /tmp/*
# General system maintenance # Use find with -delete for safer cleanup
echo "Performing general system maintenance..." if find "$dir" -mindepth 1 -delete 2>/dev/null; then
sync; echo 3 > /proc/sys/vm/drop_caches log_success "$description: freed $size_before"
else
# Fallback to rm if find fails
if rm -rf "$dir"/* 2>/dev/null; then
log_success "$description: freed $size_before"
else
log_warning "$description: partial cleanup completed"
fi
fi
}
# Remove old logs # Get system information for reporting
echo "Removing old logs..." get_system_info() {
find /var/log -type f -name "*.log" -mtime +30 -delete local info=""
echo "Maintenance completed." # Memory info
if [[ -f /proc/meminfo ]]; then
local mem_total mem_available
mem_total=$(grep MemTotal /proc/meminfo | awk '{print $2}')
mem_available=$(grep MemAvailable /proc/meminfo | awk '{print $2}')
if [[ -n "$mem_total" && -n "$mem_available" ]]; then
info+="Memory: $((mem_available/1024))MB available of $((mem_total/1024))MB total"
fi
fi
# Disk space info
if command_exists df; then
local disk_info
disk_info=$(df -h / 2>/dev/null | tail -1 | awk '{print $4 " available of " $2 " total"}')
if [[ -n "$disk_info" ]]; then
info+="${info:+, }Disk: $disk_info"
fi
fi
echo "$info"
}
#==============================================================================
# DOCKER CLEANUP FUNCTIONS
#==============================================================================
# Clean Docker resources
cleanup_docker() {
if ! command_exists docker; then
log_info "Docker not found, skipping Docker cleanup"
return 0
fi
log_step "Starting Docker cleanup..."
# Check if Docker daemon is running
if ! docker info >/dev/null 2>&1; then
log_warning "Docker daemon not running, skipping Docker cleanup"
return 0
fi
# Get initial Docker disk usage
local docker_usage_before=""
if docker system df >/dev/null 2>&1; then
docker_usage_before=$(docker system df 2>/dev/null || echo "")
fi
# Remove unused images
log_info "Removing unused Docker images..."
if docker image prune -af >/dev/null 2>&1; then
log_success "Docker images cleaned"
else
log_warning "Docker image cleanup failed"
fi
# Remove stopped containers
log_info "Removing stopped Docker containers..."
if docker container prune -f >/dev/null 2>&1; then
log_success "Docker containers cleaned"
else
log_warning "Docker container cleanup failed"
fi
# Remove unused volumes
log_info "Removing unused Docker volumes..."
if docker volume prune -f >/dev/null 2>&1; then
log_success "Docker volumes cleaned"
else
log_warning "Docker volume cleanup failed"
fi
# Remove unused networks
log_info "Removing unused Docker networks..."
if docker network prune -f >/dev/null 2>&1; then
log_success "Docker networks cleaned"
else
log_warning "Docker network cleanup failed"
fi
# Complete system cleanup
log_info "Running Docker system cleanup..."
if docker system prune -af >/dev/null 2>&1; then
log_success "Docker system cleanup completed"
else
log_warning "Docker system cleanup failed"
fi
# Show space freed if possible
if [[ -n "$docker_usage_before" ]] && docker system df >/dev/null 2>&1; then
log_info "Docker cleanup completed"
fi
}
#==============================================================================
# PACKAGE MANAGER CLEANUP FUNCTIONS
#==============================================================================
# Clean APK cache (Alpine Linux)
cleanup_apk() {
if ! command_exists apk; then
return 0
fi
log_step "Cleaning APK cache..."
# Clean APK cache
if [[ -d /var/cache/apk ]]; then
clean_directory "/var/cache/apk" "APK cache directory"
fi
# Clean APK cache using apk command
if apk cache clean >/dev/null 2>&1; then
log_success "APK cache cleaned"
fi
# Update package index
log_info "Updating APK package index..."
if apk update >/dev/null 2>&1; then
log_success "APK index updated"
else
log_warning "APK index update failed"
fi
}
# Clean APT cache (Debian/Ubuntu)
cleanup_apt() {
if ! command_exists apt-get; then
return 0
fi
log_step "Cleaning APT cache..."
# Clean downloaded packages
if apt-get clean >/dev/null 2>&1; then
log_success "APT cache cleaned"
else
log_warning "APT clean failed"
fi
# Remove orphaned packages
if apt-get autoclean >/dev/null 2>&1; then
log_success "APT autocleaned"
else
log_warning "APT autoclean failed"
fi
# Remove unnecessary packages
if apt-get autoremove -y >/dev/null 2>&1; then
log_success "Unnecessary packages removed"
else
log_warning "APT autoremove failed"
fi
# Update package index
log_info "Updating APT package index..."
if apt-get update >/dev/null 2>&1; then
log_success "APT index updated"
else
log_warning "APT index update failed"
fi
}
#==============================================================================
# SYSTEM CLEANUP FUNCTIONS
#==============================================================================
# Clean system temporary directories
cleanup_temp_dirs() {
log_step "Cleaning temporary directories..."
for temp_dir in "${TEMP_DIRS[@]}"; do
if [[ -d "$temp_dir" ]]; then
# Clean contents but preserve the directory
find "$temp_dir" -mindepth 1 -maxdepth 1 -mtime +1 -exec rm -rf {} + 2>/dev/null || true
log_success "Cleaned old files in $temp_dir"
fi
done
}
# Clean system cache directories
cleanup_cache_dirs() {
log_step "Cleaning cache directories..."
for cache_dir in "${CACHE_DIRS[@]}"; do
if [[ -d "$cache_dir" ]]; then
clean_directory "$cache_dir" "Cache directory $cache_dir"
fi
done
# Clean additional cache locations
local additional_caches=(
"/var/lib/apt/lists"
"/var/cache/debconf"
"/root/.npm"
"/root/.pip"
"/home/*/.cache"
"/home/*/.npm"
"/home/*/.pip"
)
for cache_pattern in "${additional_caches[@]}"; do
# Use shell expansion for patterns
for cache_path in $cache_pattern; do
if [[ -d "$cache_path" ]]; then
clean_directory "$cache_path" "Additional cache $cache_path"
fi
done 2>/dev/null || true
done
}
# Clean old log files
cleanup_logs() {
log_step "Cleaning old log files..."
# Clean logs older than retention period
if [[ -d /var/log ]]; then
local cleaned_count=0
# Find and remove old log files
while IFS= read -r -d '' logfile; do
rm -f "$logfile" 2>/dev/null && ((cleaned_count++))
done < <(find /var/log -type f -name "*.log" -mtime +"$LOG_RETENTION_DAYS" -print0 2>/dev/null || true)
# Clean compressed logs
while IFS= read -r -d '' logfile; do
rm -f "$logfile" 2>/dev/null && ((cleaned_count++))
done < <(find /var/log -type f \( -name "*.log.gz" -o -name "*.log.bz2" -o -name "*.log.xz" \) -mtime +"$LOG_RETENTION_DAYS" -print0 2>/dev/null || true)
if [[ $cleaned_count -gt 0 ]]; then
log_success "Removed $cleaned_count old log files"
else
log_info "No old log files to remove"
fi
fi
# Truncate large active log files
local large_logs
while IFS= read -r -d '' logfile; do
if [[ -f "$logfile" && -w "$logfile" ]]; then
truncate -s 0 "$logfile" 2>/dev/null || true
fi
done < <(find /var/log -type f -name "*.log" -size +100M -print0 2>/dev/null || true)
}
# Clean systemd journal
cleanup_journal() {
if ! command_exists journalctl; then
return 0
fi
log_step "Cleaning systemd journal..."
# Clean journal older than retention period
if journalctl --vacuum-time="${JOURNAL_RETENTION_DAYS}d" >/dev/null 2>&1; then
log_success "Journal cleaned (older than $JOURNAL_RETENTION_DAYS days)"
else
log_warning "Journal cleanup failed"
fi
# Limit journal size
if journalctl --vacuum-size=100M >/dev/null 2>&1; then
log_success "Journal size limited to 100MB"
fi
}
# Clean thumbnail caches
cleanup_thumbnails() {
log_step "Cleaning thumbnail caches..."
local thumbnail_dirs=(
"/root/.thumbnails"
"/root/.cache/thumbnails"
"/home/*/.thumbnails"
"/home/*/.cache/thumbnails"
)
for thumb_pattern in "${thumbnail_dirs[@]}"; do
for thumb_dir in $thumb_pattern; do
if [[ -d "$thumb_dir" ]]; then
clean_directory "$thumb_dir" "Thumbnail cache $thumb_dir"
fi
done 2>/dev/null || true
done
}
# Optimize memory caches
optimize_memory() {
log_step "Optimizing memory caches..."
# Sync filesystem
if sync; then
log_info "Filesystem synced"
fi
# Drop caches (page cache, dentries and inodes)
if [[ -w /proc/sys/vm/drop_caches ]]; then
echo 3 > /proc/sys/vm/drop_caches 2>/dev/null && log_success "Memory caches dropped" || log_warning "Failed to drop memory caches"
fi
}
#==============================================================================
# REPORTING FUNCTIONS
#==============================================================================
# Generate cleanup summary
generate_summary() {
log_step "Generating cleanup summary..."
local system_info
system_info=$(get_system_info)
if [[ -n "$system_info" ]]; then
log_info "System status: $system_info"
fi
# Show disk usage of important directories
local important_dirs=("/" "/var" "/tmp" "/var/log" "/var/cache")
for dir in "${important_dirs[@]}"; do
if [[ -d "$dir" ]]; then
local usage
usage=$(df -h "$dir" 2>/dev/null | tail -1 | awk '{print $5 " used (" $4 " available)"}' || echo "unknown")
log_info "$dir: $usage"
fi
done
}
#==============================================================================
# MAIN EXECUTION
#==============================================================================
main() {
log_step "Starting System Cleanup and Maintenance"
echo
# Show initial system status
local initial_info
initial_info=$(get_system_info)
if [[ -n "$initial_info" ]]; then
log_info "Initial system status: $initial_info"
echo
fi
# Docker cleanup
cleanup_docker
# Package manager cleanup
cleanup_apk
cleanup_apt
# System cleanup
cleanup_temp_dirs
cleanup_cache_dirs
cleanup_logs
cleanup_journal
cleanup_thumbnails
# Memory optimization
optimize_memory
# Generate summary
echo
generate_summary
echo
log_success "System cleanup and maintenance completed!"
}
# Execute main function with all arguments
main "$@"

View File

@@ -1,106 +1,280 @@
#!/bin/bash #!/bin/bash
NC='\033[0m' # Docker Container Updater
LIGHT_GREEN='\033[1;32m' #
LIGHT_BLUE='\033[1;34m' # Description: Automatically updates Docker containers and manages Docker images
LIGHT_GREEN='\033[1;32m' # Features:
LIGHT_GREY='\033[0;37m' # - Updates all Docker Compose projects in /root/docker
# - Skips containers with .ignore file
# - Removes obsolete Docker Compose version attributes
# - Cleans up unused Docker images
# Author: ivanch
# Version: 2.0
### AUTO-UPDATER ### set -euo pipefail # Exit on error, undefined vars, and pipe failures
FILE_NAME="docker-updater.sh" HOSTNAME=$(cat /etc/hostname)
SERVER_FILE="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/$FILE_NAME" NOTIFY_URL_ERROR="http://notify.haven/template/notify/error"
SERVER_OK=1 NOTIFY_URL_UPDATE="http://notify.haven/template/notify/update"
send_error_notification() {
local message="$1"
local critical="$2"
curl -s -X POST "$NOTIFY_URL_ERROR" \
-H "Content-Type: application/json" \
-d "{\"caller\": \"$HOSTNAME\", \"message\": \"$message\", \"critical\": $critical}"
}
send_update_notification() {
local script_time="$1"
curl -s -X POST "$NOTIFY_URL_UPDATE" \
-H "Content-Type: application/json" \
-d "{\"host\": \"$HOSTNAME\", \"asset\": \"Docker containers\", \"time\": $script_time}"
}
# Check if the server file exists #==============================================================================
curl -s --head $SERVER_FILE | head -n 1 | grep -E "HTTP/[12] [23].." > /dev/null # CONFIGURATION
#==============================================================================
if [ $? -ne 0 ]; then # Color definitions for output formatting
echo -e "${RED}[x] Error: $SERVER_FILE not found.${NC}" >&2 readonly NC='\033[0m'
SERVER_OK=0 readonly RED='\033[1;31m'
fi readonly GREEN='\033[1;32m'
readonly LIGHT_GREEN='\033[1;32m'
readonly LIGHT_BLUE='\033[1;34m'
readonly LIGHT_GREY='\033[0;37m'
readonly YELLOW='\033[1;33m'
if [ $SERVER_OK -eq 1 ]; then # Script configuration
echo -e "${LIGHT_BLUE}[i] Running auto-update" readonly DOCKER_FOLDER="/root/docker"
readonly COMPOSE_FILES=("docker-compose.yml" "docker-compose.yaml" "compose.yaml" "compose.yml")
# Compare the local and server files sha256sum to check if an update is needed # Auto-update configuration
LOCAL_SHA256=$(sha256sum $FILE_NAME | awk '{print $1}') readonly AUTO_UPDATE_ENABLED=true
SERVER_SHA256=$(curl -s $SERVER_FILE | sha256sum | awk '{print $1}')
if [ "$LOCAL_SHA256" != "$SERVER_SHA256" ]; then #==============================================================================
echo -e "${LIGHT_GREY}[i] Updating $FILE_NAME${NC}" # UTILITY FUNCTIONS
#==============================================================================
curl -s -o $FILE_NAME $SERVER_FILE # Print formatted log messages
chmod +x $FILE_NAME log_info() { echo -e "${LIGHT_GREY}[i] $1${NC}"; }
echo -e "${LIGHT_GREEN}[i] $FILE_NAME updated.${NC}" log_success() { echo -e "${LIGHT_GREEN}[] $1${NC}"; }
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
log_container() { echo -e "${LIGHT_BLUE}[$1] $2${NC}"; }
echo -e "${LIGHT_BLUE}[i] Running updated $FILE_NAME...${NC}" log_warning() {
./$FILE_NAME echo -e "${YELLOW}[!] $1${NC}";
exit 0 send_error_notification "$1" false
else }
echo -e "${LIGHT_GREEN}[i] $FILE_NAME is already up to date.${NC}" log_error() {
fi echo -e "${RED}[x] $1${NC}" >&2;
fi send_error_notification "$1" true
}
#################### # Exit with error message
die() {
# Navigate to docker folder log_error "$1"
DOCKER_FOLDER=/root/docker
if [ -d "$DOCKER_FOLDER" ]; then
cd $DOCKER_FOLDER
else
echo -e "${LIGHT_GREY}[i] Docker folder not found.${NC}"
exit 1 exit 1
fi }
# Updating Docker containers # Check if a command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
}
for folder in */; do # Check if Docker and Docker Compose are available
cd $DOCKER_FOLDER/$folder check_docker_requirements() {
log_info "Checking Docker requirements..."
# Remove trailing slash from folder name if it exists if ! command_exists docker; then
folder=${folder%/} die "Docker is not installed or not in PATH"
echo -e "${LIGHT_BLUE}[$folder] Checking for updates..."
# if .ignore file exists, skip the folder
if [ -f ".ignore" ]; then
echo -e "${LIGHT_BLUE}[$folder] Skipping docker container update"
cd ..
continue
fi fi
# Check compose files for obsolete version attribute if ! docker compose version >/dev/null 2>&1; then
for compose_file in "docker-compose.yml" "docker-compose.yaml" "compose.yaml" "compose.yml"; do die "Docker Compose is not available"
if [ -f "$compose_file" ]; then fi
echo -e "${LIGHT_BLUE}[$folder] Checking $compose_file for obsolete version attribute"
sed -i '/^version:/d' "$compose_file" log_success "Docker requirements satisfied"
}
# Get SHA256 hash of a file
get_file_hash() {
local file="$1"
sha256sum "$file" 2>/dev/null | awk '{print $1}' || echo ""
}
# Get SHA256 hash from URL content
get_url_hash() {
local url="$1"
curl -s "$url" 2>/dev/null | sha256sum | awk '{print $1}' || echo ""
}
#==============================================================================
# DOCKER COMPOSE MANAGEMENT
#==============================================================================
# Find the active Docker Compose file in current directory
find_compose_file() {
for compose_file in "${COMPOSE_FILES[@]}"; do
if [[ -f "$compose_file" ]]; then
echo "$compose_file"
return 0
fi fi
done done
return 1
}
DOCKER_RUNNING=$(docker compose ps -q) # Remove obsolete version attribute from Docker Compose files
clean_compose_files() {
local container_name="$1"
if [ -n "$DOCKER_RUNNING" ]; then for compose_file in "${COMPOSE_FILES[@]}"; do
echo -e "${LIGHT_BLUE}[$folder] Stopping Docker containers" if [[ -f "$compose_file" ]]; then
docker compose down > /dev/null log_container "$container_name" "Cleaning obsolete version attribute from $compose_file"
else sed -i '/^version:/d' "$compose_file" || log_warning "Failed to clean $compose_file"
echo -e "${LIGHT_BLUE}[$folder] No Docker containers running, will skip update" fi
continue done
}
# Check if container should be skipped
should_skip_container() {
[[ -f ".ignore" ]]
}
# Check if any containers are running in current directory
has_running_containers() {
local running_containers
running_containers=$(docker compose ps -q 2>/dev/null || echo "")
[[ -n "$running_containers" ]]
}
# Update a single Docker Compose project
update_docker_project() {
local project_dir="$1"
local container_name
container_name=$(basename "$project_dir")
log_container "$container_name" "Checking for updates..."
# Change to project directory
cd "$project_dir" || {
log_error "Cannot access directory: $project_dir"
return 1
}
# Check if container should be skipped
if should_skip_container; then
log_container "$container_name" "Skipping (found .ignore file)"
return 0
fi fi
echo -e "${LIGHT_BLUE}[$folder] Updating images" # Verify compose file exists
docker compose pull -q > /dev/null local compose_file
if ! compose_file=$(find_compose_file); then
log_container "$container_name" "No Docker Compose file found, skipping"
return 0
fi
echo -e "${LIGHT_BLUE}[$folder] Starting Docker containers" # Clean compose files
docker compose up -d > /dev/null clean_compose_files "$container_name"
echo -e "${LIGHT_GREEN}[$folder] Updated!" # Check if containers are running
if ! has_running_containers; then
log_container "$container_name" "No running containers, skipping update"
return 0
fi
cd $DOCKER_FOLDER # Stop containers
done log_container "$container_name" "Stopping containers..."
if ! docker compose down >/dev/null 2>&1; then
log_error "Failed to stop containers in $container_name"
return 1
fi
# Run Docker image prune # Pull updated images
log_container "$container_name" "Pulling updated images..."
if ! docker compose pull -q >/dev/null 2>&1; then
log_warning "Failed to pull images for $container_name, attempting to restart anyway"
fi
echo -e "${LIGHT_BLUE}Running Docker image prune..." # Start containers
docker image prune -af log_container "$container_name" "Starting containers..."
if ! docker compose up -d >/dev/null 2>&1; then
log_error "Failed to start containers in $container_name"
return 1
fi
echo -e "${LIGHT_GREEN} All done!" log_container "$container_name" "Update completed successfully!"
return 0
}
# Update all Docker Compose projects
update_all_docker_projects() {
log_step "Starting Docker container updates..."
# Check if Docker folder exists
if [[ ! -d "$DOCKER_FOLDER" ]]; then
die "Docker folder not found: $DOCKER_FOLDER"
fi
# Change to Docker folder
cd "$DOCKER_FOLDER" || die "Cannot access Docker folder: $DOCKER_FOLDER"
# Process each subdirectory
for project_dir in */; do
if [[ -d "$project_dir" ]]; then
local project_path="$DOCKER_FOLDER/$project_dir"
update_docker_project "$project_path"
# Return to Docker folder for next iteration
cd "$DOCKER_FOLDER" || die "Cannot return to Docker folder"
fi
done
}
#==============================================================================
# DOCKER CLEANUP
#==============================================================================
# Clean up unused Docker resources
cleanup_docker_resources() {
log_step "Cleaning up unused Docker resources..."
# Remove unused images
log_info "Removing unused Docker images..."
if docker image prune -af >/dev/null 2>&1; then
log_success "Docker image cleanup completed"
else
log_warning "Docker image cleanup failed"
fi
}
#==============================================================================
# MAIN EXECUTION
#==============================================================================
main() {
START_TIME=$(date +%s)
log_step "Starting Docker Container Updater"
echo
# Check requirements
check_docker_requirements
# Update all Docker projects
update_all_docker_projects
# Clean up Docker resources
cleanup_docker_resources
echo
log_success "Docker container update process completed!"
END_TIME=$(date +%s)
DURATION=$((END_TIME - START_TIME))
log_info "Total duration: $DURATION seconds"
send_update_notification $DURATION
}
# Execute main function with all arguments
main "$@"

20
haven-notify/Dockerfile Normal file
View File

@@ -0,0 +1,20 @@
# Start from the official Golang image for building
FROM --platform=$BUILDPLATFORM golang:1.22-alpine AS builder
ARG TARGETARCH
ARG TARGETOS
WORKDIR /app
COPY . .
# Build statically for Linux
RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -o haven-notify main.go
# Use Alpine for running, with CA certificates for TLS
FROM alpine:latest
WORKDIR /app
RUN apk --no-cache add ca-certificates
COPY template/ template/
COPY --from=builder /app/haven-notify .
EXPOSE 8080
ENV WEBHOOK_URL=""
ENTRYPOINT ["/app/haven-notify"]

95
haven-notify/README.md Normal file
View File

@@ -0,0 +1,95 @@
<div align="center">
<img src="./assets/widelogo.png" alt="Haven Notify Logo">
</div>
## Overview
Haven Notify is an internal service designed to send notifications to a specified Discord channel.
It's built in Go and can be deployed as a container or managed service.
## Prerequisites
- Go 1.18 or newer
- Docker
- A Discord Webhook URL
## API Specification
### Send Notification
- **Endpoint**: `/notify`
- **Method**: `POST`
- **Request Body**:
```json
{
"title": "Notification Title",
"message": "Notification Message"
}
```
### Send Backup Notification
- **Endpoint**: `/template/notify/backup`
- **Method**: `POST`
- **Request Body**:
```json
{
"title": "Notification Title",
"asset": "Notification Asset Name",
"backupSizeInMB": 500,
"extra": [
{
"name": "Additional Info",
"value": "Some extra information"
}
]
}
```
### Send Update Notification
- **Endpoint**: `/template/notify/update`
- **Method**: `POST`
- **Request Body**:
```json
{
"host": "Notification Title",
"asset": "Notification Message",
"time": 500 // in seconds
}
```
### Send Error Notification
- **Endpoint**: `/template/notify/error`
- **Method**: `POST`
- **Request Body**:
```json
{
"caller": "Who triggered the error",
"message": "Error while moving file",
"critical": true,
"extra": [
{
"name": "Additional Info",
"value": "Some extra information"
}
]
}
```
## Setup & Usage
### Docker
1. Build the Docker image:
```sh
docker build -t haven-notify .
```
2. Run the container:
```sh
docker run -e WEBHOOK_URL=your_webhook_url haven-notify
```
### Kubernetes
Deployment manifest is available at `deploy/haven-notify.yaml`.
1. Edit the manifest to set your environment variables.
2. Create a generic secret named `WEBHOOK_URL` with `discord-webhook=your_webhook_url`
3. Apply deployment:
```sh
kubectl apply -f deploy/haven-notify.yaml
```

Binary file not shown.

After

Width:  |  Height:  |  Size: 334 KiB

View File

@@ -0,0 +1,73 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: haven-notify
labels:
app: haven-notify
spec:
replicas: 1
selector:
matchLabels:
app: haven-notify
template:
metadata:
labels:
app: haven-notify
spec:
containers:
- name: haven-notify
image: git.ivanch.me/ivanch/haven-notify:latest
imagePullPolicy: Always
ports:
- containerPort: 8080
env:
- name: WEBHOOK_URL
valueFrom:
secretKeyRef:
name: discord-webhook
key: HAVEN_WEBHOOK_URL
readinessProbe:
httpGet:
path: /ready
port: 8080
initialDelaySeconds: 5
periodSeconds: 10
livenessProbe:
httpGet:
path: /live
port: 8080
initialDelaySeconds: 5
periodSeconds: 10
---
apiVersion: v1
kind: Service
metadata:
name: haven-notify
spec:
selector:
app: haven-notify
ports:
- protocol: TCP
port: 8080
targetPort: 8080
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: haven-notify
namespace: default
annotations:
traefik.ingress.kubernetes.io/router.entrypoints: web
spec:
ingressClassName: nginx
rules:
- host: notify.haven
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: haven-notify
port:
number: 8080

214
haven-notify/main.go Normal file
View File

@@ -0,0 +1,214 @@
package main
import (
"bytes"
"encoding/json"
"fmt"
"html/template"
"io/ioutil"
"log"
"net/http"
"os"
"strings"
"time"
)
// Notification payload
type Notification struct {
Title string `json:"title"`
Message string `json:"message"`
}
func main() {
http.HandleFunc("/notify", notifyHandler)
http.HandleFunc("/ready", readinessHandler)
http.HandleFunc("/live", livenessHandler)
http.HandleFunc("/template/notify/", templateNotifyHandler)
log.Println("Starting server on :8080...")
log.Fatal(http.ListenAndServe(":8080", nil))
}
func notifyHandler(w http.ResponseWriter, r *http.Request) {
log.Printf("Incoming %s request from %s to %s", r.Method, r.RemoteAddr, r.URL.Path)
if r.Method != http.MethodPost {
log.Printf("Method not allowed: %s", r.Method)
w.WriteHeader(http.StatusMethodNotAllowed)
w.Write([]byte("Method not allowed"))
return
}
var notif Notification
if err := json.NewDecoder(r.Body).Decode(&notif); err != nil {
log.Printf("Invalid payload: %v", err)
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte("Invalid payload"))
return
}
log.Printf("Received notification payload: Title='%s', Message='%s'", notif.Title, notif.Message)
// Call Discord notification function
if err := sendDiscordNotification(notif.Title, notif.Message); err != nil {
log.Printf("Failed to send Discord notification: %v", err)
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("Failed to send Discord notification"))
return
}
log.Printf("Notification sent successfully for Title='%s'", notif.Title)
w.WriteHeader(http.StatusOK)
w.Write([]byte("Notification sent"))
}
// Readiness handler
func readinessHandler(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
w.Write([]byte("Ready"))
}
// Liveness handler
func livenessHandler(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
w.Write([]byte("Alive"))
}
func sendDiscordNotification(title, message string) error {
webhookURL := os.Getenv("WEBHOOK_URL")
if webhookURL == "" {
log.Printf("WEBHOOK_URL environment variable not set")
return fmt.Errorf("WEBHOOK_URL environment variable not set")
}
// Discord webhook payload
type discordPayload struct {
Content string `json:"content"`
}
content := "**" + title + "**\n" + message
payload := discordPayload{Content: content}
jsonData, err := json.Marshal(payload)
if err != nil {
log.Printf("Failed to marshal Discord payload: %v", err)
return err
}
log.Printf("Sending Discord notification: Title='%s', Message='%s'", title, message)
resp, err := http.Post(webhookURL, "application/json", bytes.NewBuffer(jsonData))
if err != nil {
log.Printf("Error posting to Discord webhook: %v", err)
return err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
log.Printf("Discord webhook returned status: %s", resp.Status)
return fmt.Errorf("Discord webhook returned status: %s", resp.Status)
}
log.Printf("Discord notification sent successfully: Title='%s'", title)
return nil
}
func templateNotifyHandler(w http.ResponseWriter, r *http.Request) {
log.Printf("Incoming %s request from %s to %s", r.Method, r.RemoteAddr, r.URL.Path)
if r.Method != http.MethodPost {
log.Printf("Method not allowed: %s", r.Method)
w.WriteHeader(http.StatusMethodNotAllowed)
w.Write([]byte("Method not allowed"))
return
}
templateName := r.URL.Path[len("/template/notify/"):] // Extract template name
if templateName == "" {
log.Printf("Template name not provided")
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte("Template name not provided"))
return
}
templatePath := "template/" + templateName + ".tmpl"
templateData, err := ioutil.ReadFile(templatePath)
if err != nil {
log.Printf("Failed to read template: %v", err)
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("Failed to read template"))
return
}
tmpl, err := template.New(templateName).Funcs(template.FuncMap{
"formatSize": func(size float64) string {
if size > 1024 {
return fmt.Sprintf("%.2f GiB", size/1024)
}
return fmt.Sprintf("%.2f MiB", size)
},
"upper": strings.ToUpper,
"lower": strings.ToLower,
"title": strings.Title,
"now": func() string {
return fmt.Sprintf("%d", time.Now().Unix())
},
"formatTime": func(timestamp string) string {
if timestamp == "" {
return time.Now().Format("2006-01-02T15:04:05Z")
}
return timestamp
},
}).Parse(string(templateData))
if err != nil {
log.Printf("Failed to parse template: %v", err)
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("Failed to parse template"))
return
}
var rawPayload map[string]interface{}
if err := json.NewDecoder(r.Body).Decode(&rawPayload); err != nil {
log.Printf("Invalid payload: %v", err)
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte("Invalid payload"))
return
}
// Normalize keys to lowercase for case-insensitive parsing
payload := make(map[string]interface{})
for key, value := range rawPayload {
payload[strings.ToLower(key)] = value
}
var filledTemplate bytes.Buffer
if err := tmpl.Execute(&filledTemplate, payload); err != nil {
log.Printf("Failed to execute template: %v", err)
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("Failed to execute template"))
return
}
webhookURL := os.Getenv("WEBHOOK_URL")
if webhookURL == "" {
log.Printf("WEBHOOK_URL environment variable not set")
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("WEBHOOK_URL environment variable not set"))
return
}
resp, err := http.Post(webhookURL, "application/json", &filledTemplate)
if err != nil {
log.Printf("Error posting to Discord webhook: %v", err)
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("Failed to send notification"))
return
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
log.Printf("Discord webhook returned status: %s", resp.Status)
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("Failed to send notification"))
return
}
log.Printf("Notification sent successfully using template '%s'", templateName)
w.WriteHeader(http.StatusOK)
w.Write([]byte("Notification sent"))
}

View File

@@ -0,0 +1,39 @@
{{/*
Docker Backup Notification Template
Variables expected:
- .title: The backup title/name
- .asset: The asset being backed up
- .backupsizeinmb: The backup size in MB (will be formatted automatically)
- .extra: Optional array of additional fields with .name and .value
Template Functions Available:
- formatSize: Formats size in MB/GB automatically
*/}}
{
"embeds": [
{
"title": "📦 Backup - {{.title}}",
"description": "**{{.asset}}** has been backup-ed successfully! ✅🫡\n",
"color": 3066993,
"fields": [
{
"name": "💾 Backup Size",
"value": "{{if .backupsizeinmb}}{{formatSize .backupsizeinmb}}{{else}}Unknown{{end}}",
"inline": true
}
{{- if .extra}}
{{- range $index, $field := .extra}},
{
"name": "{{$field.name}}",
"value": "{{$field.value}}",
"inline": true
}
{{- end}}
{{- end}}
],
"footer": {
"text": "✨ Haven Notify ✨"
}
}
]
}

View File

@@ -0,0 +1,36 @@
{{/*
Error Notification Template
Variables expected:
- .caller: The caller of the error
- .message: The error message
- .critical: Boolean indicating if the error is critical
- .extra: Optional array of additional fields with .name and .value
*/}}
{
"embeds": [
{
"title": "❌ Error",
"description": "**{{.caller}}** encountered an error!",
"color": {{if .critical}}15158332{{else}}15844367{{end}},
"fields": [
{
"name": "📄 Message",
"value": "{{.message}}",
"inline": false
}
{{- if .extra}}
{{- range $index, $field := .extra}},
{
"name": "{{$field.name}}",
"value": "{{$field.value}}",
"inline": true
}
{{- end}}
{{- end}}
],
"footer": {
"text": "✨ Haven Notify ✨"
}
}
]
}

View File

@@ -0,0 +1,29 @@
{{/*
Update Notification Template
Variables expected:
- .host: The host where the update occurred
- .asset: The asset being updated (Docker or k8s)
- .time: The time in seconds that the script took to run
Template Functions Available:
- formatTime: Formats time in seconds to a human-readable format
*/}}
{
"embeds": [
{
"title": "🔄 Update - {{.asset}}",
"description": "**{{.host}}** has successfully updated **{{.asset}}**! ✅",
"color": 3447003,
"fields": [
{
"name": "⏱️ Time Taken",
"value": "{{if .time}}{{.time}}{{else}}Unknown{{end}} seconds",
"inline": true
}
],
"footer": {
"text": "✨ Haven Notify ✨"
}
}
]
}

292
nas-gdrive-backup.sh Normal file
View File

@@ -0,0 +1,292 @@
#!/bin/bash
# NAS Backup Script to Google Drive using rclone and 7zip
# For each folder on BACKUP_SOURCE, it gets the sha256 checksum of it, checks the checksum against the previous backup, and if it has changed, it creates a 7zip archive of the folder with encryption.
# It then uploads the archive to Google Drive using rclone.
# Install: curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/nas-gdrive-backup.sh | bash -s -- --install
# Run manually: /usr/local/bin/nas-gdrive-backup.sh
# Configuration
BACKUP_SOURCE="/export/Backup"
META_DIR="/export/Backup/.gdrive"
TMP_DIR="/export/Backup/.gdrive/tmp"
ZIP_PASSWORD="password"
GDRIVE_REMOTE="gdrive"
GDRIVE_PATH="/NAS-Backups"
ARCHIVE_NAME="backup.7z"
LOG_FILE="/var/log/nas-gdrive-backup.log"
# Function for logging
log() {
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" | tee -a "$LOG_FILE"
}
clean_up() {
log "Cleaning up temporary files"
rm -rf "$TMP_DIR"
}
trap clean_up EXIT
create_7z() {
local folder="$1"
local archive_name="$2"
log "Creating 7zip archive of $folder"
7z a -p"$ZIP_PASSWORD" -mhe=on -mx=3 "$archive_name" "$folder"
if [ $? -ne 0 ]; then
log "ERROR: Failed to create 7zip archive of $folder"
fi
}
upload_to_gdrive() {
local archive_name="$1"
log "Uploading $archive_name to Google Drive"
# Should replace existing file if it exists
rclone copy "$archive_name" "$GDRIVE_REMOTE:$GDRIVE_PATH" \
--progress \
--check-first \
--transfers 1 \
--checkers 1 \
--retries 1 \
--low-level-retries 10
if [ $? -ne 0 ]; then
log "ERROR: Failed to upload $archive_name to Google Drive"
fi
}
main() {
# Check if 7z is installed
if ! which 7z > /dev/null; then
log "ERROR: 7z is not installed"
exit 1
fi
# Check if rclone is installed
if ! which rclone > /dev/null; then
log "ERROR: rclone is not installed"
exit 1
fi
# Create meta directory if it doesn't exist
if [ ! -d "$META_DIR" ]; then
log "Creating meta directory: $META_DIR"
mkdir -p "$META_DIR"
fi
# Fix permissions for the meta directory (777 recursively)
chmod -R 777 "$META_DIR"
# Loop through each folder in the backup source
for folder in "$BACKUP_SOURCE"/*; do
if [ -d "$folder" ]; then
log "Processing folder: $folder"
# Get the sha256 checksum of the folder
CHECKSUM=$(find "$folder" -type f -exec sha256sum {} + | sha256sum | awk '{print $1}')
META_FILE="$META_DIR/$(basename "$folder").sha256"
# Check if the checksum file exists
if [ -f "$META_FILE" ]; then
# Read the previous checksum from the file
PREV_CHECKSUM=$(cat "$META_FILE")
# Compare the checksums
if [ "$CHECKSUM" != "$PREV_CHECKSUM" ]; then
log "Changes detected in $folder - creating new archive"
create_7z "$folder" "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
upload_to_gdrive "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
# Update the checksum file
echo "$CHECKSUM" > "$META_FILE"
# Remove the temporary archive file
log "Removing temporary archive file"
rm "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
else
log "No changes detected in $folder"
fi
else
log "No previous checksum found for $folder - creating new archive"
create_7z "$folder" "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
upload_to_gdrive "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
# Create a checksum file for the folder
echo "$CHECKSUM" > "$META_FILE"
# Remove the temporary archive file
log "Removing temporary archive file"
rm "$TMP_DIR/$(basename "$folder")_$ARCHIVE_NAME"
fi
else
log "Skipping $folder, not a directory"
fi
log ""
done
# Fix permissions for the meta directory (777 recursively)
chmod -R 777 "$META_DIR"
log "Backup process completed successfully"
# Exit with success
exit 0
}
###########################
### Installation script ###
###########################
# Function to install a dependency if not already installed
install_dependency() {
local package="$1"
if ! dpkg -l | grep -q "$package"; then
install_log_info "Installing $package"
apt-get update && apt-get install -y "$package"
if [ $? -ne 0 ]; then
install_log_error "ERROR: Failed to install $package"
exit 1
fi
install_log_ok "$package installed successfully"
else
install_log_ok "$package is already installed"
fi
}
install_log_ok() {
echo -e "\e[32m[✓]\e[0m $1"
}
install_log_error() {
echo -e "\e[31m[✗]\e[0m $1"
}
install_log_info() {
echo -e "\e[34m[!]\e[0m $1"
}
install_log_separator() {
echo -e "\e[36m========================================\e[0m"
}
install_script() {
echo -e ""
install_log_separator
install_log_info "Starting installation of NAS to Google Drive backup script"
install_log_separator
echo -e ""
install_log_separator
# Check if running as root
install_log_info "Checking if the script is running as root"
if [ "$(id -u)" -ne 0 ]; then
install_log_error "ERROR: This script must be run as root"
exit 1
else
install_log_ok "Running as root"
fi
install_log_separator
# Check for dependencies
install_log_info "Checking for required dependencies"
install_dependency "rclone"
install_dependency "p7zip-full"
install_log_separator
# Check if crontab exists
install_log_info "Checking if crontab is installed"
if ! command -v crontab &>/dev/null; then
install_log_error "crontab is not installed"
exit 1
else
install_log_ok "crontab is installed"
fi
install_log_separator
install_log_info "Installing script to /usr/local/bin/nas-gdrive-backup.sh"
curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/nas-gdrive-backup.sh -o /usr/local/bin/nas-gdrive-backup.sh
chmod +x /usr/local/bin/nas-gdrive-backup.sh
install_log_info "Setting up ZIP_PASSWORD in $0"
if [ -z "$ZIP_PASSWORD" ]; then
install_log_error "ERROR: ZIP_PASSWORD is not set"
exit 1
fi
read -p "Enter ZIP_PASSWORD: " ZIP_PASSWORD </dev/tty
if [ -z "$ZIP_PASSWORD" ]; then
install_log_error "ERROR: ZIP_PASSWORD cannot be empty"
exit 1
fi
# Update the ZIP_PASSWORD in the script
sed -i "s/^ZIP_PASSWORD=.*/ZIP_PASSWORD=\"$ZIP_PASSWORD\"/" /usr/local/bin/nas-gdrive-backup.sh
log "ZIP_PASSWORD updated in /usr/local/bin/nas-gdrive-backup.sh"
install_log_separator
# Check for existence of source directories
install_log_info "Checking if BACKUP_SOURCE, META_DIR, and TMP_DIR exists"
if ! [ -d "$BACKUP_SOURCE" ]; then
install_log_error "ERROR: BACKUP_SOURCE directory does not exist"
exit 1
else
install_log_ok "BACKUP_SOURCE directory exists: $BACKUP_SOURCE"
fi
if ! [ -d "$META_DIR" ]; then
install_log_info "Creating META_DIR: $META_DIR"
mkdir -p "$META_DIR"
fi
if ! [ -d "$TMP_DIR" ]; then
install_log_info "Creating TMP_DIR: $TMP_DIR"
mkdir -p "$TMP_DIR"
fi
install_log_info "Setting permissions for $META_DIR and $TMP_DIR to 777"
chmod -R 777 "$META_DIR" "$TMP_DIR"
install_log_ok "Directories checked and are ok"
# Check for existing .sha256 files, if there are any, prompt to remove them
install_log_info "Verifying existing .sha256 files in $META_DIR"
for file in "$META_DIR"/*; do
if [ -f "$file" ] && [[ "$file" == *.sha256 ]]; then
install_log_info "Found .sha256 file: \e[96m\e[4m$file\e[0m"
read -p "Do you want to remove this file? [y/N]: " choice </dev/tty
if [[ "$choice" == "y" || "$choice" == "Y" ]]; then
install_log_info "Removing $file"
rm "$file"
else
install_log_info "Skipping $file"
fi
fi
done
install_log_ok "Existing .sha256 files checked"
install_log_separator
install_log_info "Setting up rclone configuration"
if ! rclone config show gdrive &>/dev/null; then
install_log_error "ERROR: rclone gdrive remote is not configured"
install_log_error "Please run 'rclone config' to set up your Google Drive remote"
exit 1
fi
install_log_ok "rclone gdrive remote is configured"
install_log_separator
install_log_info "Setting up cron job for backup script"
(crontab -l 2>/dev/null; echo "55 23 * * 1 /usr/local/bin/nas-gdrive-backup.sh > /tmp/nas-gdrive-backup.log") | crontab -
install_log_ok "Cron job set up to run /usr/local/bin/nas-gdrive-backup.sh every Monday at 23:55"
install_log_separator
echo -e ""
install_log_separator
install_log_ok "Installation completed successfully!"
install_log_separator
echo -e ""
echo -e "You can now run the script manually with: \e[32mnas-gdrive-backup.sh\e[0m"
echo -e "Or it will run automatically according to the cron schedule."
# Exit with success
exit 0
}
# Check for install flag
if [[ "$1" == "--install" ]]; then
install_script
exit 0
fi
main "$@"
exit 0

View File

@@ -1,193 +1,330 @@
#!/bin/bash #!/bin/bash
# Usage: # Usage: curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/scripts-download.sh | bash
## curl -sSL https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/scripts-download.sh | bash
# colors set -euo pipefail
RED='\033[1;31m'
GREEN='\033[1;32m'
NC='\033[0m'
LIGHT_BLUE='\033[1;34m'
LIGHT_RED='\033[1;31m'
LIGHT_GREEN='\033[1;32m'
GREY='\033[1;30m'
YELLOW='\033[1;33m'
FILES_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main" #==============================================================================
# CONFIGURATION
#==============================================================================
echo -e "\r${LIGHT_BLUE}[i] Running scripts-download.sh" # Color definitions for output formatting
readonly RED='\033[1;31m'
readonly GREEN='\033[1;32m'
readonly NC='\033[0m'
readonly LIGHT_BLUE='\033[1;34m'
readonly LIGHT_RED='\033[1;31m'
readonly LIGHT_GREEN='\033[1;32m'
readonly GREY='\033[1;30m'
readonly YELLOW='\033[1;33m'
# Detect OS (Debian or Alpine) # Configuration
echo -e "${GREY}[i] Detecting OS..." readonly FILES_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
readonly REQUIRED_PACKAGES=("zip" "unzip" "curl")
readonly REQUIRED_COMMANDS=("zip" "unzip" "sha256sum" "curl" "crontab")
readonly AVAILABLE_SCRIPTS=("clean.sh" "backup.sh" "docker-updater.sh")
DETECTED="" # Format: [script_name]="cron_schedule"
declare -A CRONTAB_SCHEDULES=(
["clean.sh"]="0 3 * * *" # Daily at 3 AM
["backup.sh"]="0 23 * * 1,5" # Monday and Friday at 11 PM
["docker-updater.sh"]="0 3 * * 6" # Every Saturday at 3 AM
)
if [ -x "$(command -v apk)" ]; then #==============================================================================
DETECTED="Alpine" # UTILITY FUNCTIONS
fi #==============================================================================
if [ -x "$(command -v apt)" ]; then # Print formatted log messages
DETECTED="Debian" log_info() { echo -e "${GREY}[i] $1${NC}"; }
fi log_success() { echo -e "${GREEN}[✓] $1${NC}"; }
log_warning() { echo -e "${YELLOW}[!] $1${NC}"; }
log_error() { echo -e "${RED}[x] $1${NC}" >&2; }
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
if [ -z "$DETECTED" ]; then # Exit with error message
echo -e "${RED}[x] Error: OS not supported.${NC}" >&2 die() {
log_error "$1"
exit 1 exit 1
fi
echo -e "${GREEN}[✓] Detected '$DETECTED' Linux.${NC}"
echo -e "${GREY}[i] Checking if required packages are installed..."
PACKAGES=("zip" "unzip" "sha256sum" "curl" "crontab")
NOT_INSLALLED=()
detect_packages() {
for PACKAGE in "${PACKAGES[@]}"; do
if ! [ -x "$(command -v $PACKAGE)" ]; then
echo -e "${YELLOW}[!] Error: $PACKAGE is not installed, will attempt to install later.${NC}" >&2
NOT_INSLALLED+=($PACKAGE)
fi
done
} }
detect_packages # Check if a command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
}
if [ ${#NOT_INSLALLED[@]} -ne 0 ]; then # Check if a process is running
if [ "$DETECTED" == "Alpine" ]; then process_running() {
echo -e "${GREY}[i] Installing required packages using APK...${NC}" pgrep "$1" >/dev/null 2>&1
echo -e "${GREY}[i] Updating APK...${NC}" }
apk update >/dev/null
echo -e "${GREY}[i] Installing packages...${NC}"
apk add --no-cache ${NOT_INSLALLED[@]} >/dev/null
if [ $? -ne 0 ]; then #==============================================================================
echo -e "${RED}[x] Error: Failed to install required packages.${NC}" >&2 # MAIN FUNCTIONS
exit 1 #==============================================================================
# Detect the operating system
detect_operating_system() {
if command_exists apk; then
echo "Alpine"
elif command_exists apt; then
echo "Debian"
else else
echo -e "${GREEN}[✓] All required packages should be installed.${NC}" die "Unsupported operating system. This script supports Alpine and Debian-based systems only."
fi fi
elif [ "$DETECTED" == "Debian" ]; then }
echo -e "${GREY}[i] Installing required packages using APT...${NC}"
echo -e "${GREY}[i] Updating APT...${NC}"
apt-get update -y >/dev/null
echo -e "${GREY}[i] Installing packages...${NC}"
apt-get install -y ${NOT_INSLALLED[@]} >/dev/null
if [ $? -ne 0 ]; then # Check for missing packages
echo -e "${RED}[x] Error: Failed to install required packages.${NC}" >&2 get_missing_packages() {
exit 1 local missing=()
else
echo -e "${GREEN}[✓] All required packages should be installed.${NC}" # Check each required command and map to package names
if ! command_exists "zip"; then
missing+=("zip")
fi fi
if ! command_exists "unzip"; then
missing+=("unzip")
fi
if ! command_exists "curl"; then
missing+=("curl")
fi
# sha256sum is part of coreutils (usually pre-installed)
# crontab is part of cron package, but we'll check for cron service later
# Only print if there are missing packages
if [[ ${#missing[@]} -gt 0 ]]; then
printf '%s\n' "${missing[@]}"
fi
}
# Install packages based on the detected OS
install_packages() {
local os="$1"
shift
local packages=("$@")
if [[ ${#packages[@]} -eq 0 ]]; then
log_info "No packages to install"
return 0
fi fi
NOT_INSLALLED=() log_info "Installing required packages: ${packages[*]}"
detect_packages log_info "Debug: Installing ${#packages[@]} packages on $os"
if [ ${#NOT_INSLALLED[@]} -ne 0 ]; then case "$os" in
echo -e "${RED}[x] Error: Failed to run some of the required packages.${NC}" >&2 "Alpine")
echo -e "${RED}[x] [${NOT_INSLALLED[@]}] are not installed.${NC}" >&2 log_info "Updating APK package index..."
apk update >/dev/null || die "Failed to update APK package index"
log_info "Installing packages via APK..."
apk add --no-cache "${packages[@]}" >/dev/null || die "Failed to install packages via APK"
;;
"Debian")
log_info "Ensuring /var/cache/apt/archives/partial exists..."
mkdir -p /var/cache/apt/archives/partial || die "Failed to create /var/cache/apt/archives/partial"
log_info "Updating APT package index..."
apt-get update -y >/dev/null || die "Failed to update APT package index"
log_info "Installing packages via APT..."
apt-get install -y "${packages[@]}" >/dev/null || die "Failed to install packages via APT"
;;
*)
log_error "Debug info - OS variable content: '$os'"
log_error "Debug info - OS variable length: ${#os}"
die "Unknown operating system: '$os'"
;;
esac
}
# Verify all required packages are available
verify_packages() {
log_info "Verifying package installation..."
local missing_packages
readarray -t missing_packages < <(get_missing_packages)
if [[ ${#missing_packages[@]} -gt 0 ]]; then
log_error "Failed to install required packages: ${missing_packages[*]}"
die "Please install the missing packages manually and try again"
fi
log_success "All required packages are available"
}
# Check if crontab service is running
check_crontab_service() {
log_info "Checking crontab service status..."
if ! process_running "cron"; then
die "Crontab service is not running. Please start the cron service first."
fi
log_success "Crontab service is running"
}
# Prompt user to select scripts for installation
select_scripts() {
local selected=()
echo >&2 # Send to stderr so it doesn't get captured
echo -e "${GREY}[i] Available scripts for download and installation:${NC}" >&2
echo >&2
for script in "${AVAILABLE_SCRIPTS[@]}"; do
local schedule="${CRONTAB_SCHEDULES[$script]:-"0 0 * * *"}"
echo -e " ${LIGHT_BLUE}$script${NC} - Schedule: ${GREY}$schedule${NC}" >&2
done
echo >&2
echo -e "${GREY}[i] Select scripts to download and install:${NC}" >&2
for script in "${AVAILABLE_SCRIPTS[@]}"; do
read -p "Install $script? [Y/n]: " choice </dev/tty
if [[ "$choice" =~ ^[Yy]?$ ]]; then
selected+=("$script")
fi
done
if [[ ${#selected[@]} -eq 0 ]]; then
echo -e "${RED}[x] No scripts selected. Exiting...${NC}" >&2
exit 1 exit 1
fi fi
fi
echo -e "${GREEN}[✓] All required packages are installed.${NC}" # Only output the selected scripts to stdout
printf '%s\n' "${selected[@]}"
}
echo -e "${GREY}[i] Checking if crontab is running..." # Verify server connectivity for selected scripts
verify_server_connectivity() {
local scripts=("$@")
# Check if crontab is running on the system using pgrep (crond or cron) log_info "Verifying server connectivity..."
if ! pgrep "cron" > /dev/null; then
echo -e "${RED}[x] Error: Crontab is not running.${NC}" >&2
exit 1
fi
echo -e "${GREEN}[✓] Crontab is running.${NC}" for script in "${scripts[@]}"; do
local url="$FILES_URL/$script"
# Variables if ! curl -s --head "$url" | head -n 1 | grep -E "HTTP/[12] [23].." >/dev/null; then
FILES=("clean.sh" "backup.sh" "docker-updater.sh") die "Script '$script' not found on server: $url"
# Prompt user to select files to download
selected_files=()
echo -e "${GREY}[i] Select files to download and install on crontab:${NC} "
for FILE in "${FILES[@]}"; do
read -p "Do you want to download and install $FILE? [Y/n]: " choice </dev/tty
if [[ "$choice" == "y" || "$choice" == "Y" || -z "$choice" ]]; then
selected_files+=("$FILE")
fi fi
done done
if [ ${#selected_files[@]} -eq 0 ]; then log_success "Server connectivity verified"
echo -e "${RED}[x] No files selected. Exiting...${NC}" }
exit 1
fi
# Check connection with the server for selected files
echo -e "${GREY}[i] Checking connection with the server..."
for FILE in "${selected_files[@]}"; do
curl -s --head "$FILES_URL/$FILE" | head -n 1 | grep -E "HTTP/[12] [23].." > /dev/null
if [ $? -ne 0 ]; then
echo -e "${RED}[x] Error: $FILE not found on the server.${NC}" >&2
exit 1
fi
done
echo -e "${GREEN}[✓] Connection with the server established.${NC}"
echo -e "${GREY}[i] Downloading scripts..."
# Download selected scripts # Download selected scripts
for FILE in "${selected_files[@]}"; do download_scripts() {
curl -s -o "./$FILE" "$FILES_URL/$FILE" local scripts=("$@")
done
echo -e "${GREEN}[✓] Scripts downloaded.${NC}" log_info "Downloading ${#scripts[@]} script(s)..."
CURRENT_WORKDIR=$(pwd) for script in "${scripts[@]}"; do
local url="$FILES_URL/$script"
log_step "Downloading $script..."
# Setup permissions if ! curl -s -o "./$script" "$url"; then
echo -e "${GREY}[i] Setting up permissions..." die "Failed to download $script from $url"
# Setup permissions for selected files
for FILE in "${selected_files[@]}"; do
chmod +x "./$FILE"
done
echo -e "${GREEN}[✓] Permissions set up.${NC}"
# Setup crontab for selected files
echo -e "${GREY}[i] Setting up crontab..."
# Add crontabs
for FILE in "${selected_files[@]}"; do
if crontab -l 2>/dev/null | grep -q $FILE; then
echo -e "${LIGHT_BLUE}[i] [$FILE] Crontab already exists. Removing...${NC}"
crontab -l | grep -v $FILE | crontab -
fi fi
echo -e "${LIGHT_BLUE}[i] [$FILE] Adding crontab...${NC}"
if [ "$FILE" == "clean.sh" ]; then # Set executable permissions
(crontab -l 2>/dev/null; echo "0 0 * * * ${CURRENT_WORKDIR}/$FILE > /tmp/clean.log") | crontab - chmod +x "./$script" || die "Failed to set executable permissions for $script"
elif [ "$FILE" == "backup.sh" ]; then done
(crontab -l 2>/dev/null; echo "0 1 * * * ${CURRENT_WORKDIR}/$FILE > /tmp/backup.log") | crontab -
elif [ "$FILE" == "docker-updater.sh" ]; then log_success "All scripts downloaded and configured"
(crontab -l 2>/dev/null; echo "0 3 */4 * * ${CURRENT_WORKDIR}/$FILE > /tmp/docker-updater.log") | crontab - }
# Setup crontab entries for selected scripts
setup_crontab() {
local scripts=("$@")
local current_workdir
current_workdir=$(pwd)
log_info "Setting up crontab entries..."
for script in "${scripts[@]}"; do
local schedule="${CRONTAB_SCHEDULES[$script]:-"0 0 * * *"}"
local log_file="/tmp/${script%.*}.log"
local cron_entry="$schedule $current_workdir/$script > $log_file 2>&1"
log_step "Configuring crontab for $script (Schedule: $schedule)..."
# Remove existing crontab entry for this script
if crontab -l 2>/dev/null | grep -q "$script"; then
log_step "Removing existing crontab entry for $script..."
crontab -l 2>/dev/null | grep -v "$script" | crontab - || die "Failed to remove existing crontab entry"
fi
# Add new crontab entry
(crontab -l 2>/dev/null; echo "$cron_entry") | crontab - || die "Failed to add crontab entry for $script"
# Verify the entry was added
if ! crontab -l 2>/dev/null | grep -q "$script"; then
die "Failed to verify crontab entry for $script"
fi
log_success "Crontab configured for $script"
done
log_success "All crontab entries configured successfully"
}
#==============================================================================
# MAIN EXECUTION
#==============================================================================
main() {
log_step "Starting Server Scripts Downloader"
echo
# System detection and validation
log_info "Detecting operating system..."
local detected_os
detected_os=$(detect_operating_system)
detected_os=$(echo "$detected_os" | tr -d '\n\r' | xargs) # Clean any whitespace/newlines
log_success "Detected $detected_os Linux"
# Package management
local missing_packages
readarray -t missing_packages < <(get_missing_packages)
# Filter out empty strings
local filtered_packages=()
for pkg in "${missing_packages[@]}"; do
if [[ -n "$pkg" ]]; then
filtered_packages+=("$pkg")
fi
done
missing_packages=("${filtered_packages[@]}")
# Debug output
log_info "Debug: Found ${#missing_packages[@]} missing packages"
for i in "${!missing_packages[@]}"; do
log_info "Debug: Missing package $((i+1)): '${missing_packages[i]}'"
done
if [[ ${#missing_packages[@]} -gt 0 ]]; then
log_warning "Missing packages detected: ${missing_packages[*]}"
install_packages "$detected_os" "${missing_packages[@]}"
else else
echo -e "${YELLOW}[w] [$FILE] Warning: Crontab specific schedule not setup.${NC}" >&2 log_success "All required packages are already installed"
(crontab -l 2>/dev/null; echo "0 0 * * * ${CURRENT_WORKDIR}/$FILE" > /tmp/$FILE.log) | crontab -
fi fi
echo -e "${GREEN}[✓] [$FILE] Crontab added, double-checking set up...${NC}" verify_packages
check_crontab_service
if ! crontab -l | grep -q $FILE; then # Script selection and installation
echo -e "${RED}[x] [$FILE] Error: Crontab was not set up.${NC}" >&2 local selected_scripts
exit 1 readarray -t selected_scripts < <(select_scripts)
fi
echo -e "${GREEN}[✓] [$FILE] Crontab confirmed.${NC}" log_info "Selected scripts: ${selected_scripts[*]}"
done
echo -e "${GREEN}[✓] Crontabs all set up.${NC}" verify_server_connectivity "${selected_scripts[@]}"
download_scripts "${selected_scripts[@]}"
setup_crontab "${selected_scripts[@]}"
echo -e "${GREEN}[✓] All done.${NC}" echo
log_success "Installation completed successfully!"
log_info "Scripts have been downloaded to: $(pwd)"
log_info "Crontab entries have been configured. Use 'crontab -l' to view them."
log_info "Log files will be created in /tmp/ directory."
}
# Execute main function
main "$@"

84
windows-backup.ps1 Normal file
View File

@@ -0,0 +1,84 @@
$7zipPath = "$env:ProgramFiles\7-Zip\7z.exe"
# Check if 7-Zip is installed
if (!(Test-Path "$env:ProgramFiles\7-Zip\7z.exe")) {
Write-Host "7-Zip is not installed. Please install it to use this script."
exit 1
Send-Notify "❌ 7-Zip is not installed. Backup aborted."
}
$BackupSource = @(
"$env:USERPROFILE\Documents",
"$env:USERPROFILE\Desktop",
"$env:USERPROFILE\Pictures",
"$env:USERPROFILE\.ssh",
"$env:USERPROFILE\.kube"
)
$NASDestination = "\\OMV\Backup\$env:COMPUTERNAME"
$TempDir = "$env:TEMP\BackupTemp"
$Date = Get-Date -Format "yyyy-MM-dd"
$NotifyUrl = "http://notify.haven/notify"
function Send-Notify {
param (
[string]$Message
)
if (-not $NotifyUrl) {
Write-Host "NOTIFY_URL environment variable is not set. Notification not sent."
return
}
$Title = "Backup - $env:COMPUTERNAME"
$Body = @{ title = $Title; message = $Message } | ConvertTo-Json
try {
Invoke-RestMethod -Uri $NotifyUrl -Method Post -ContentType 'application/json' -Body $Body | Out-Null
Write-Host "Notification sent: $Title - $Message"
} catch {
Write-Host "Failed to send notification: $_"
}
}
# Create temp directory
New-Item -ItemType Directory -Path $TempDir -Force
# Create NAS destination if it doesn't exist
if (!(Test-Path $NASDestination)) {
New-Item -ItemType Directory -Path $NASDestination -Force
}
foreach ($Folder in $BackupSource) {
if (Test-Path $Folder) {
$FolderName = Split-Path $Folder -Leaf
$ZipFile = "$TempDir\$FolderName-$Date.zip"
Write-Host "Compressing $Folder..."
$compressResult = & "$7zipPath" a -tzip "$ZipFile" "$Folder\*" -mx=9
if ($LASTEXITCODE -ne 0) {
Write-Host "Compression failed for $Folder."
Send-Notify "❌ Compression failed for $Folder."
continue
}
Write-Host "Copying $ZipFile to NAS..."
Copy-Item $ZipFile $NASDestination -Force
Write-Host "Removing $ZipFile..."
Remove-Item $ZipFile
} else {
Write-Host "Source folder not found: $Folder"
Send-Notify "⚠️ Source folder not found: $Folder"
}
}
Write-Host "Removing Files older than 7 days from $NASDestination..."
$OldFiles = Get-ChildItem -Path $NASDestination -File | Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-7) }
foreach ($OldFile in $OldFiles) {
Remove-Item $OldFile.FullName -Force
Write-Host "Removed: $($OldFile.FullName)"
}
# Cleanup
Remove-Item $TempDir -Recurse -Force
Write-Host "Backup completed!"
Send-Notify "✅ Backup completed successfully."