Compare commits
16 Commits
100262513b
...
main
Author | SHA1 | Date | |
---|---|---|---|
96548f4773 | |||
c34ee5185d | |||
1489062943 | |||
eb8ca78f4f | |||
45567b2242 | |||
b0324ac9d8 | |||
06cf78a4a6 | |||
cd57837696 | |||
cedc435df0 | |||
aa7a9b8548 | |||
11e2a28bd4 | |||
8b52bd2c45 | |||
442ff12039 | |||
12920c10d4 | |||
99e110afb3 | |||
8108ca7e7b |
@@ -18,7 +18,7 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
build_haven_notify:
|
build_haven_notify:
|
||||||
name: Build Haven Notify Image
|
name: Build Haven Notify Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
@@ -31,28 +31,25 @@ jobs:
|
|||||||
-u "${{ env.REGISTRY_USERNAME }}" \
|
-u "${{ env.REGISTRY_USERNAME }}" \
|
||||||
--password-stdin
|
--password-stdin
|
||||||
|
|
||||||
- name: Display Directory Structure
|
- name: Set up QEMU
|
||||||
run: |
|
uses: docker/setup-qemu-action@v3
|
||||||
echo "Current directory structure:"
|
|
||||||
ls -lR .
|
|
||||||
|
|
||||||
- name: Build and Push Image
|
- name: Set up Docker Buildx
|
||||||
run: |
|
uses: docker/setup-buildx-action@v3
|
||||||
TAG=latest
|
|
||||||
|
|
||||||
cd haven-notify
|
- name: Build and Push Multi-Arch Image
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
docker buildx build \
|
with:
|
||||||
-t "${{ env.IMAGE_NOTIFY }}:${TAG}" \
|
push: true
|
||||||
--platform linux/amd64,linux/arm64 \
|
context: haven-notify
|
||||||
-f Dockerfile .
|
platforms: linux/amd64,linux/arm64
|
||||||
|
tags: |
|
||||||
docker push "${{ env.IMAGE_NOTIFY }}:${TAG}"
|
${{ env.IMAGE_NOTIFY }}:latest
|
||||||
|
|
||||||
deploy_haven_notify:
|
deploy_haven_notify:
|
||||||
name: Deploy Haven Notify (internal)
|
name: Deploy Haven Notify (internal)
|
||||||
runs-on: ubuntu-amd64
|
runs-on: ubuntu-amd64
|
||||||
needs: build_haven_notify_amd64
|
needs: build_haven_notify
|
||||||
steps:
|
steps:
|
||||||
- name: Check KUBE_CONFIG validity
|
- name: Check KUBE_CONFIG validity
|
||||||
run: |
|
run: |
|
||||||
@@ -79,7 +76,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
KUBE_CONFIG: ${{ env.KUBE_CONFIG }}
|
KUBE_CONFIG: ${{ env.KUBE_CONFIG }}
|
||||||
|
|
||||||
- name: Test connection to cluster
|
- name: Check connection to cluster
|
||||||
run: |
|
run: |
|
||||||
cd haven-notify/deploy
|
cd haven-notify/deploy
|
||||||
kubectl --kubeconfig=kubeconfig.yaml cluster-info
|
kubectl --kubeconfig=kubeconfig.yaml cluster-info
|
||||||
@@ -92,4 +89,4 @@ jobs:
|
|||||||
- name: Rollout restart haven-notify
|
- name: Rollout restart haven-notify
|
||||||
run: |
|
run: |
|
||||||
cd haven-notify/deploy
|
cd haven-notify/deploy
|
||||||
kubectl --kubeconfig=kubeconfig.yaml rollout restart deployment/haven-notify
|
kubectl --kubeconfig=kubeconfig.yaml rollout restart deployment/haven-notify
|
71
backup.sh
71
backup.sh
@@ -1,53 +1,23 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
### AUTO-UPDATER ###
|
|
||||||
# Variables
|
|
||||||
SERVER_FILE="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/backup.sh"
|
|
||||||
SERVER_OK=1
|
|
||||||
HOSTNAME=$(cat /etc/hostname)
|
|
||||||
|
|
||||||
NOTIFY_URL="http://notify.haven/notify"
|
|
||||||
|
|
||||||
# Function to send notification
|
# Function to send notification
|
||||||
send_notify() {
|
HOSTNAME=$(cat /etc/hostname)
|
||||||
local title="$1"
|
NOTIFY_URL_ERROR="http://notify.haven/template/notify/error"
|
||||||
local message="$2"
|
NOTIFY_URL_BACKUP="http://notify.haven/template/notify/backup"
|
||||||
curl -s -X POST "$NOTIFY_URL" \
|
send_error_notification() {
|
||||||
|
local message="$1"
|
||||||
|
local critical="$2"
|
||||||
|
curl -s -X POST "$NOTIFY_URL_ERROR" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d "{\"title\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\"}"
|
-d "{\"caller\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\", \"critical\": $critical}"
|
||||||
|
}
|
||||||
|
send_backup_notification() {
|
||||||
|
local message="$1"
|
||||||
|
local backup_size="$2"
|
||||||
|
curl -s -X POST "$NOTIFY_URL_BACKUP" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"title\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\", \"backupSizeInMB\": $backup_size}"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Check if the server file exists
|
|
||||||
curl -s --head $SERVER_FILE | head -n 1 | grep -E "HTTP/[12] [23].." > /dev/null
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "Error: $SERVER_FILE not found." >&2
|
|
||||||
send_notify "" "❌ Server file not found: $SERVER_FILE"
|
|
||||||
SERVER_OK=0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ $SERVER_OK -eq 1 ]; then
|
|
||||||
echo "Running auto-update..."
|
|
||||||
|
|
||||||
# Compare the local and server files sha256sum to check if an update is needed
|
|
||||||
LOCAL_SHA256=$(sha256sum backup.sh | awk '{print $1}')
|
|
||||||
SERVER_SHA256=$(curl -s $SERVER_FILE | sha256sum | awk '{print $1}')
|
|
||||||
|
|
||||||
if [ "$LOCAL_SHA256" != "$SERVER_SHA256" ]; then
|
|
||||||
echo "Updating backup.sh..."
|
|
||||||
curl -s -o backup.sh $SERVER_FILE
|
|
||||||
echo "backup.sh updated."
|
|
||||||
|
|
||||||
chmod +x backup.sh
|
|
||||||
echo "Permissions set up."
|
|
||||||
|
|
||||||
echo "Running updated backup.sh..."
|
|
||||||
./backup.sh
|
|
||||||
exit 0
|
|
||||||
else
|
|
||||||
echo "backup.sh is up to date.."
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
####################
|
####################
|
||||||
|
|
||||||
@@ -61,31 +31,32 @@ REMOTE_DIR="/export/Backup/Docker/$(cat /etc/hostname)"
|
|||||||
# Create a compressed backup file
|
# Create a compressed backup file
|
||||||
zip -q -r $BACKUP_FILE $SOURCE_DIR || true
|
zip -q -r $BACKUP_FILE $SOURCE_DIR || true
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
send_notify "" "⚠️ Some files or folders in $SOURCE_DIR could not be backed up (possibly in use or locked). Backup archive created with available files."
|
send_error_notification "⚠️ Some files or folders in $SOURCE_DIR could not be backed up (possibly in use or locked). Backup archive created with available files." false
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if remote path exists
|
# Check if remote path exists
|
||||||
if ! ssh $REMOTE_USER@$REMOTE_HOST "mkdir -p $REMOTE_DIR"; then
|
if ! ssh $REMOTE_USER@$REMOTE_HOST "mkdir -p $REMOTE_DIR"; then
|
||||||
send_notify "" "❌ Failed to create remote directory: $REMOTE_DIR on $REMOTE_HOST"
|
send_error_notification "❌ Failed to create remote directory: $REMOTE_DIR on $REMOTE_HOST" true
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Transfer the backup file to the remote server
|
# Transfer the backup file to the remote server
|
||||||
if ! scp $BACKUP_FILE $REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR; then
|
if ! scp $BACKUP_FILE $REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR; then
|
||||||
send_notify "" "❌ Failed to transfer backup file to remote server: $REMOTE_HOST:$REMOTE_DIR"
|
send_error_notification "❌ Failed to transfer backup file to remote server: $REMOTE_HOST:$REMOTE_DIR" true
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Remove the backup file
|
# Remove the backup file
|
||||||
|
BACKUP_SIZE=$(du -m $BACKUP_FILE | cut -f1)
|
||||||
rm $BACKUP_FILE
|
rm $BACKUP_FILE
|
||||||
|
|
||||||
# Erase last 7 days backups from remote server
|
# Erase last 7 days backups from remote server
|
||||||
if ! ssh $REMOTE_USER@$REMOTE_HOST "find $REMOTE_DIR -type f -name 'docker_backup_*' -mtime +7 -exec rm {} \;"; then
|
if ! ssh $REMOTE_USER@$REMOTE_HOST "find $REMOTE_DIR -type f -name 'docker_backup_*' -mtime +7 -exec rm {} \;"; then
|
||||||
send_notify "" "⚠️ Failed to clean old backups on remote server: $REMOTE_HOST:$REMOTE_DIR"
|
send_error_notification "⚠️ Failed to clean old backups on remote server: $REMOTE_HOST:$REMOTE_DIR" false
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Success notification
|
# Success notification
|
||||||
send_notify "" "✅ Backup completed successfully for: $SOURCE_DIR to $REMOTE_HOST:$REMOTE_DIR"
|
send_backup_notification "✅ Backup completed successfully for: $SOURCE_DIR to $REMOTE_HOST:$REMOTE_DIR" $BACKUP_SIZE
|
||||||
echo "Backup completed successfully"
|
echo "Backup completed successfully"
|
||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
|
81
clean.sh
81
clean.sh
@@ -4,7 +4,6 @@
|
|||||||
#
|
#
|
||||||
# Description: Comprehensive system cleanup for Docker containers and Linux systems
|
# Description: Comprehensive system cleanup for Docker containers and Linux systems
|
||||||
# Features:
|
# Features:
|
||||||
# - Self-updating capability
|
|
||||||
# - Docker resource cleanup (images, containers, volumes, networks)
|
# - Docker resource cleanup (images, containers, volumes, networks)
|
||||||
# - Package manager cache cleanup (APK/APT)
|
# - Package manager cache cleanup (APK/APT)
|
||||||
# - System cache and temporary file cleanup
|
# - System cache and temporary file cleanup
|
||||||
@@ -30,10 +29,6 @@ readonly LIGHT_BLUE='\033[1;34m'
|
|||||||
readonly LIGHT_GREY='\033[0;37m'
|
readonly LIGHT_GREY='\033[0;37m'
|
||||||
readonly YELLOW='\033[1;33m'
|
readonly YELLOW='\033[1;33m'
|
||||||
|
|
||||||
# Script configuration
|
|
||||||
readonly SCRIPT_NAME="clean.sh"
|
|
||||||
readonly SERVER_BASE_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
|
|
||||||
|
|
||||||
# Cleanup configuration
|
# Cleanup configuration
|
||||||
readonly LOG_RETENTION_DAYS=30
|
readonly LOG_RETENTION_DAYS=30
|
||||||
readonly JOURNAL_RETENTION_DAYS=7
|
readonly JOURNAL_RETENTION_DAYS=7
|
||||||
@@ -133,79 +128,6 @@ get_system_info() {
|
|||||||
echo "$info"
|
echo "$info"
|
||||||
}
|
}
|
||||||
|
|
||||||
#==============================================================================
|
|
||||||
# AUTO-UPDATE FUNCTIONALITY
|
|
||||||
#==============================================================================
|
|
||||||
|
|
||||||
# Check server connectivity
|
|
||||||
check_server_connectivity() {
|
|
||||||
local url="$1"
|
|
||||||
curl -s --head "$url" | head -n 1 | grep -E "HTTP/[12] [23].." >/dev/null 2>&1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get SHA256 hash of a file
|
|
||||||
get_file_hash() {
|
|
||||||
local file="$1"
|
|
||||||
sha256sum "$file" 2>/dev/null | awk '{print $1}' || echo ""
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get SHA256 hash from URL content
|
|
||||||
get_url_hash() {
|
|
||||||
local url="$1"
|
|
||||||
curl -s "$url" 2>/dev/null | sha256sum | awk '{print $1}' || echo ""
|
|
||||||
}
|
|
||||||
|
|
||||||
# Perform self-update if newer version is available
|
|
||||||
perform_self_update() {
|
|
||||||
if [[ "$AUTO_UPDATE_ENABLED" != "true" ]]; then
|
|
||||||
log_info "Auto-update is disabled"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
local server_url="$SERVER_BASE_URL/$SCRIPT_NAME"
|
|
||||||
|
|
||||||
log_step "Checking for script updates..."
|
|
||||||
|
|
||||||
# Check if server file is accessible
|
|
||||||
if ! check_server_connectivity "$server_url"; then
|
|
||||||
log_warning "Cannot connect to update server, continuing with current version"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Compare local and server file hashes
|
|
||||||
local local_hash server_hash
|
|
||||||
local_hash=$(get_file_hash "$SCRIPT_NAME")
|
|
||||||
server_hash=$(get_url_hash "$server_url")
|
|
||||||
|
|
||||||
if [[ -z "$local_hash" || -z "$server_hash" ]]; then
|
|
||||||
log_warning "Cannot determine file hashes, skipping update"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$local_hash" != "$server_hash" ]]; then
|
|
||||||
log_info "Update available, downloading new version..."
|
|
||||||
|
|
||||||
# Create backup of current script
|
|
||||||
local backup_file="${SCRIPT_NAME}.backup.$(date +%s)"
|
|
||||||
cp "$SCRIPT_NAME" "$backup_file" || die "Failed to create backup"
|
|
||||||
|
|
||||||
# Download updated script
|
|
||||||
if curl -s -o "$SCRIPT_NAME" "$server_url"; then
|
|
||||||
chmod +x "$SCRIPT_NAME" || die "Failed to set executable permissions"
|
|
||||||
log_success "Script updated successfully"
|
|
||||||
|
|
||||||
log_step "Running updated script..."
|
|
||||||
exec ./"$SCRIPT_NAME" "$@"
|
|
||||||
else
|
|
||||||
# Restore backup on failure
|
|
||||||
mv "$backup_file" "$SCRIPT_NAME"
|
|
||||||
die "Failed to download updated script"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
log_success "Script is already up to date"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
#==============================================================================
|
#==============================================================================
|
||||||
# DOCKER CLEANUP FUNCTIONS
|
# DOCKER CLEANUP FUNCTIONS
|
||||||
#==============================================================================
|
#==============================================================================
|
||||||
@@ -526,9 +448,6 @@ main() {
|
|||||||
echo
|
echo
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Perform self-update if enabled
|
|
||||||
perform_self_update "$@"
|
|
||||||
|
|
||||||
# Docker cleanup
|
# Docker cleanup
|
||||||
cleanup_docker
|
cleanup_docker
|
||||||
|
|
||||||
|
@@ -4,7 +4,6 @@
|
|||||||
#
|
#
|
||||||
# Description: Automatically updates Docker containers and manages Docker images
|
# Description: Automatically updates Docker containers and manages Docker images
|
||||||
# Features:
|
# Features:
|
||||||
# - Self-updating capability
|
|
||||||
# - Updates all Docker Compose projects in /root/docker
|
# - Updates all Docker Compose projects in /root/docker
|
||||||
# - Skips containers with .ignore file
|
# - Skips containers with .ignore file
|
||||||
# - Removes obsolete Docker Compose version attributes
|
# - Removes obsolete Docker Compose version attributes
|
||||||
@@ -13,6 +12,22 @@
|
|||||||
# Version: 2.0
|
# Version: 2.0
|
||||||
|
|
||||||
set -euo pipefail # Exit on error, undefined vars, and pipe failures
|
set -euo pipefail # Exit on error, undefined vars, and pipe failures
|
||||||
|
HOSTNAME=$(cat /etc/hostname)
|
||||||
|
NOTIFY_URL_ERROR="http://notify.haven/template/notify/error"
|
||||||
|
NOTIFY_URL_UPDATE="http://notify.haven/template/notify/update"
|
||||||
|
send_error_notification() {
|
||||||
|
local message="$1"
|
||||||
|
local critical="$2"
|
||||||
|
curl -s -X POST "$NOTIFY_URL_ERROR" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"caller\": \"$HOSTNAME\", \"message\": \"$message\", \"critical\": $critical}"
|
||||||
|
}
|
||||||
|
send_update_notification() {
|
||||||
|
local script_time="$1"
|
||||||
|
curl -s -X POST "$NOTIFY_URL_UPDATE" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"host\": \"$HOSTNAME\", \"asset\": \"Docker containers\", \"time\": $script_time}"
|
||||||
|
}
|
||||||
|
|
||||||
#==============================================================================
|
#==============================================================================
|
||||||
# CONFIGURATION
|
# CONFIGURATION
|
||||||
@@ -28,8 +43,6 @@ readonly LIGHT_GREY='\033[0;37m'
|
|||||||
readonly YELLOW='\033[1;33m'
|
readonly YELLOW='\033[1;33m'
|
||||||
|
|
||||||
# Script configuration
|
# Script configuration
|
||||||
readonly SCRIPT_NAME="docker-updater.sh"
|
|
||||||
readonly SERVER_BASE_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
|
|
||||||
readonly DOCKER_FOLDER="/root/docker"
|
readonly DOCKER_FOLDER="/root/docker"
|
||||||
readonly COMPOSE_FILES=("docker-compose.yml" "docker-compose.yaml" "compose.yaml" "compose.yml")
|
readonly COMPOSE_FILES=("docker-compose.yml" "docker-compose.yaml" "compose.yaml" "compose.yml")
|
||||||
|
|
||||||
@@ -43,11 +56,18 @@ readonly AUTO_UPDATE_ENABLED=true
|
|||||||
# Print formatted log messages
|
# Print formatted log messages
|
||||||
log_info() { echo -e "${LIGHT_GREY}[i] $1${NC}"; }
|
log_info() { echo -e "${LIGHT_GREY}[i] $1${NC}"; }
|
||||||
log_success() { echo -e "${LIGHT_GREEN}[✓] $1${NC}"; }
|
log_success() { echo -e "${LIGHT_GREEN}[✓] $1${NC}"; }
|
||||||
log_warning() { echo -e "${YELLOW}[!] $1${NC}"; }
|
|
||||||
log_error() { echo -e "${RED}[x] $1${NC}" >&2; }
|
|
||||||
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
|
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
|
||||||
log_container() { echo -e "${LIGHT_BLUE}[$1] $2${NC}"; }
|
log_container() { echo -e "${LIGHT_BLUE}[$1] $2${NC}"; }
|
||||||
|
|
||||||
|
log_warning() {
|
||||||
|
echo -e "${YELLOW}[!] $1${NC}";
|
||||||
|
send_error_notification "$1" false
|
||||||
|
}
|
||||||
|
log_error() {
|
||||||
|
echo -e "${RED}[x] $1${NC}" >&2;
|
||||||
|
send_error_notification "$1" true
|
||||||
|
}
|
||||||
|
|
||||||
# Exit with error message
|
# Exit with error message
|
||||||
die() {
|
die() {
|
||||||
log_error "$1"
|
log_error "$1"
|
||||||
@@ -86,69 +106,6 @@ get_url_hash() {
|
|||||||
curl -s "$url" 2>/dev/null | sha256sum | awk '{print $1}' || echo ""
|
curl -s "$url" 2>/dev/null | sha256sum | awk '{print $1}' || echo ""
|
||||||
}
|
}
|
||||||
|
|
||||||
# Check if server file is accessible
|
|
||||||
check_server_connectivity() {
|
|
||||||
local url="$1"
|
|
||||||
curl -s --head "$url" | head -n 1 | grep -E "HTTP/[12] [23].." >/dev/null 2>&1
|
|
||||||
}
|
|
||||||
|
|
||||||
#==============================================================================
|
|
||||||
# AUTO-UPDATE FUNCTIONALITY
|
|
||||||
#==============================================================================
|
|
||||||
|
|
||||||
# Perform self-update if newer version is available
|
|
||||||
perform_self_update() {
|
|
||||||
if [[ "$AUTO_UPDATE_ENABLED" != "true" ]]; then
|
|
||||||
log_info "Auto-update is disabled"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
local server_url="$SERVER_BASE_URL/$SCRIPT_NAME"
|
|
||||||
|
|
||||||
log_step "Checking for script updates..."
|
|
||||||
|
|
||||||
# Check if server file is accessible
|
|
||||||
if ! check_server_connectivity "$server_url"; then
|
|
||||||
log_warning "Cannot connect to update server, continuing with current version"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Compare local and server file hashes
|
|
||||||
local local_hash
|
|
||||||
local server_hash
|
|
||||||
|
|
||||||
local_hash=$(get_file_hash "$SCRIPT_NAME")
|
|
||||||
server_hash=$(get_url_hash "$server_url")
|
|
||||||
|
|
||||||
if [[ -z "$local_hash" || -z "$server_hash" ]]; then
|
|
||||||
log_warning "Cannot determine file hashes, skipping update"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$local_hash" != "$server_hash" ]]; then
|
|
||||||
log_info "Update available, downloading new version..."
|
|
||||||
|
|
||||||
# Create backup of current script
|
|
||||||
local backup_file="${SCRIPT_NAME}.backup.$(date +%s)"
|
|
||||||
cp "$SCRIPT_NAME" "$backup_file" || die "Failed to create backup"
|
|
||||||
|
|
||||||
# Download updated script
|
|
||||||
if curl -s -o "$SCRIPT_NAME" "$server_url"; then
|
|
||||||
chmod +x "$SCRIPT_NAME" || die "Failed to set executable permissions"
|
|
||||||
log_success "Script updated successfully"
|
|
||||||
|
|
||||||
log_step "Running updated script..."
|
|
||||||
exec ./"$SCRIPT_NAME" "$@"
|
|
||||||
else
|
|
||||||
# Restore backup on failure
|
|
||||||
mv "$backup_file" "$SCRIPT_NAME"
|
|
||||||
die "Failed to download updated script"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
log_success "Script is already up to date"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
#==============================================================================
|
#==============================================================================
|
||||||
# DOCKER COMPOSE MANAGEMENT
|
# DOCKER COMPOSE MANAGEMENT
|
||||||
#==============================================================================
|
#==============================================================================
|
||||||
@@ -260,37 +217,17 @@ update_all_docker_projects() {
|
|||||||
# Change to Docker folder
|
# Change to Docker folder
|
||||||
cd "$DOCKER_FOLDER" || die "Cannot access Docker folder: $DOCKER_FOLDER"
|
cd "$DOCKER_FOLDER" || die "Cannot access Docker folder: $DOCKER_FOLDER"
|
||||||
|
|
||||||
local updated_count=0
|
|
||||||
local failed_count=0
|
|
||||||
local skipped_count=0
|
|
||||||
|
|
||||||
# Process each subdirectory
|
# Process each subdirectory
|
||||||
for project_dir in */; do
|
for project_dir in */; do
|
||||||
if [[ -d "$project_dir" ]]; then
|
if [[ -d "$project_dir" ]]; then
|
||||||
local project_path="$DOCKER_FOLDER/$project_dir"
|
local project_path="$DOCKER_FOLDER/$project_dir"
|
||||||
|
|
||||||
if update_docker_project "$project_path"; then
|
update_docker_project "$project_path"
|
||||||
if should_skip_container; then
|
|
||||||
((skipped_count++))
|
|
||||||
else
|
|
||||||
((updated_count++))
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
((failed_count++))
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Return to Docker folder for next iteration
|
# Return to Docker folder for next iteration
|
||||||
cd "$DOCKER_FOLDER" || die "Cannot return to Docker folder"
|
cd "$DOCKER_FOLDER" || die "Cannot return to Docker folder"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Report results
|
|
||||||
log_success "Docker update summary:"
|
|
||||||
log_info " Updated: $updated_count projects"
|
|
||||||
log_info " Skipped: $skipped_count projects"
|
|
||||||
if [[ $failed_count -gt 0 ]]; then
|
|
||||||
log_warning " Failed: $failed_count projects"
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#==============================================================================
|
#==============================================================================
|
||||||
@@ -315,15 +252,14 @@ cleanup_docker_resources() {
|
|||||||
#==============================================================================
|
#==============================================================================
|
||||||
|
|
||||||
main() {
|
main() {
|
||||||
|
START_TIME=$(date +%s)
|
||||||
|
|
||||||
log_step "Starting Docker Container Updater"
|
log_step "Starting Docker Container Updater"
|
||||||
echo
|
echo
|
||||||
|
|
||||||
# Check requirements
|
# Check requirements
|
||||||
check_docker_requirements
|
check_docker_requirements
|
||||||
|
|
||||||
# Perform self-update if enabled
|
|
||||||
perform_self_update "$@"
|
|
||||||
|
|
||||||
# Update all Docker projects
|
# Update all Docker projects
|
||||||
update_all_docker_projects
|
update_all_docker_projects
|
||||||
|
|
||||||
@@ -332,6 +268,12 @@ main() {
|
|||||||
|
|
||||||
echo
|
echo
|
||||||
log_success "Docker container update process completed!"
|
log_success "Docker container update process completed!"
|
||||||
|
|
||||||
|
END_TIME=$(date +%s)
|
||||||
|
DURATION=$((END_TIME - START_TIME))
|
||||||
|
log_info "Total duration: $DURATION seconds"
|
||||||
|
|
||||||
|
send_update_notification $DURATION
|
||||||
}
|
}
|
||||||
|
|
||||||
# Execute main function with all arguments
|
# Execute main function with all arguments
|
||||||
|
@@ -1,15 +1,19 @@
|
|||||||
# Start from the official Golang image for building
|
# Start from the official Golang image for building
|
||||||
ARG TARGETARCH=amd64
|
FROM --platform=$BUILDPLATFORM golang:1.22-alpine AS builder
|
||||||
FROM golang:1.25 AS builder
|
|
||||||
|
ARG TARGETARCH
|
||||||
|
ARG TARGETOS
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY . .
|
COPY . .
|
||||||
# Build statically for Linux and selected architecture
|
# Build statically for Linux
|
||||||
RUN GOOS=linux GOARCH=${TARGETARCH} CGO_ENABLED=0 go build -o haven-notify main.go
|
RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -o haven-notify main.go
|
||||||
|
|
||||||
# Use Alpine for running, with CA certificates for TLS
|
# Use Alpine for running, with CA certificates for TLS
|
||||||
FROM alpine:latest
|
FROM alpine:latest
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN apk --no-cache add ca-certificates
|
RUN apk --no-cache add ca-certificates
|
||||||
|
COPY template/ template/
|
||||||
COPY --from=builder /app/haven-notify .
|
COPY --from=builder /app/haven-notify .
|
||||||
EXPOSE 8080
|
EXPOSE 8080
|
||||||
ENV WEBHOOK_URL=""
|
ENV WEBHOOK_URL=""
|
||||||
|
@@ -1,4 +1,6 @@
|
|||||||
# Haven Notify
|
<div align="center">
|
||||||
|
<img src="./assets/widelogo.png" alt="Haven Notify Logo">
|
||||||
|
</div>
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
Haven Notify is an internal service designed to send notifications to a specified Discord channel.
|
Haven Notify is an internal service designed to send notifications to a specified Discord channel.
|
||||||
@@ -23,13 +25,14 @@ It's built in Go and can be deployed as a container or managed service.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- **Endpoint**: `/template/notify/backup_template`
|
### Send Backup Notification
|
||||||
|
- **Endpoint**: `/template/notify/backup`
|
||||||
- **Method**: `POST`
|
- **Method**: `POST`
|
||||||
- **Request Body**:
|
- **Request Body**:
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"title": "Notification Title",
|
"title": "Notification Title",
|
||||||
"message": "Notification Message",
|
"asset": "Notification Asset Name",
|
||||||
"backupSizeInMB": 500,
|
"backupSizeInMB": 500,
|
||||||
"extra": [
|
"extra": [
|
||||||
{
|
{
|
||||||
@@ -40,6 +43,36 @@ It's built in Go and can be deployed as a container or managed service.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Send Update Notification
|
||||||
|
- **Endpoint**: `/template/notify/update`
|
||||||
|
- **Method**: `POST`
|
||||||
|
- **Request Body**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"host": "Notification Title",
|
||||||
|
"asset": "Notification Message",
|
||||||
|
"time": 500 // in seconds
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Send Error Notification
|
||||||
|
- **Endpoint**: `/template/notify/error`
|
||||||
|
- **Method**: `POST`
|
||||||
|
- **Request Body**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"caller": "Who triggered the error",
|
||||||
|
"message": "Error while moving file",
|
||||||
|
"critical": true,
|
||||||
|
"extra": [
|
||||||
|
{
|
||||||
|
"name": "Additional Info",
|
||||||
|
"value": "Some extra information"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Setup & Usage
|
## Setup & Usage
|
||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
|
BIN
haven-notify/assets/widelogo.png
Normal file
BIN
haven-notify/assets/widelogo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 334 KiB |
@@ -38,8 +38,6 @@ spec:
|
|||||||
port: 8080
|
port: 8080
|
||||||
initialDelaySeconds: 5
|
initialDelaySeconds: 5
|
||||||
periodSeconds: 10
|
periodSeconds: 10
|
||||||
nodeSelector:
|
|
||||||
kubernetes.io/arch: amd64
|
|
||||||
---
|
---
|
||||||
apiVersion: v1
|
apiVersion: v1
|
||||||
kind: Service
|
kind: Service
|
||||||
|
@@ -7,12 +7,12 @@ Variables expected:
|
|||||||
- .extra: Optional array of additional fields with .name and .value
|
- .extra: Optional array of additional fields with .name and .value
|
||||||
|
|
||||||
Template Functions Available:
|
Template Functions Available:
|
||||||
- formatSize: Formats size in MB/GiB automatically
|
- formatSize: Formats size in MB/GB automatically
|
||||||
*/}}
|
*/}}
|
||||||
{
|
{
|
||||||
"embeds": [
|
"embeds": [
|
||||||
{
|
{
|
||||||
"title": "📦 Docker Backup - {{.title}}",
|
"title": "📦 Backup - {{.title}}",
|
||||||
"description": "**{{.asset}}** has been backup-ed successfully! ✅🫡\n",
|
"description": "**{{.asset}}** has been backup-ed successfully! ✅🫡\n",
|
||||||
"color": 3066993,
|
"color": 3066993,
|
||||||
"fields": [
|
"fields": [
|
||||||
@@ -33,8 +33,7 @@ Template Functions Available:
|
|||||||
],
|
],
|
||||||
"footer": {
|
"footer": {
|
||||||
"text": "✨ Haven Notify ✨"
|
"text": "✨ Haven Notify ✨"
|
||||||
}{{if .timestamp}},
|
}
|
||||||
"timestamp": "{{formatTime .timestamp}}"{{end}}
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
36
haven-notify/template/error.tmpl
Normal file
36
haven-notify/template/error.tmpl
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{{/*
|
||||||
|
Error Notification Template
|
||||||
|
Variables expected:
|
||||||
|
- .caller: The caller of the error
|
||||||
|
- .message: The error message
|
||||||
|
- .critical: Boolean indicating if the error is critical
|
||||||
|
- .extra: Optional array of additional fields with .name and .value
|
||||||
|
*/}}
|
||||||
|
{
|
||||||
|
"embeds": [
|
||||||
|
{
|
||||||
|
"title": "❌ Error",
|
||||||
|
"description": "**{{.caller}}** encountered an error!",
|
||||||
|
"color": {{if .critical}}15158332{{else}}15844367{{end}},
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"name": "📄 Message",
|
||||||
|
"value": "{{.message}}",
|
||||||
|
"inline": false
|
||||||
|
}
|
||||||
|
{{- if .extra}}
|
||||||
|
{{- range $index, $field := .extra}},
|
||||||
|
{
|
||||||
|
"name": "{{$field.name}}",
|
||||||
|
"value": "{{$field.value}}",
|
||||||
|
"inline": true
|
||||||
|
}
|
||||||
|
{{- end}}
|
||||||
|
{{- end}}
|
||||||
|
],
|
||||||
|
"footer": {
|
||||||
|
"text": "✨ Haven Notify ✨"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
29
haven-notify/template/update.tmpl
Normal file
29
haven-notify/template/update.tmpl
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{{/*
|
||||||
|
Update Notification Template
|
||||||
|
Variables expected:
|
||||||
|
- .host: The host where the update occurred
|
||||||
|
- .asset: The asset being updated (Docker or k8s)
|
||||||
|
- .time: The time in seconds that the script took to run
|
||||||
|
|
||||||
|
Template Functions Available:
|
||||||
|
- formatTime: Formats time in seconds to a human-readable format
|
||||||
|
*/}}
|
||||||
|
{
|
||||||
|
"embeds": [
|
||||||
|
{
|
||||||
|
"title": "🔄 Update - {{.asset}}",
|
||||||
|
"description": "**{{.host}}** has successfully updated **{{.asset}}**! ✅",
|
||||||
|
"color": 3447003,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"name": "⏱️ Time Taken",
|
||||||
|
"value": "{{if .time}}{{.time}}{{else}}Unknown{{end}} seconds",
|
||||||
|
"inline": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"footer": {
|
||||||
|
"text": "✨ Haven Notify ✨"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@@ -26,9 +26,9 @@ readonly AVAILABLE_SCRIPTS=("clean.sh" "backup.sh" "docker-updater.sh")
|
|||||||
|
|
||||||
# Format: [script_name]="cron_schedule"
|
# Format: [script_name]="cron_schedule"
|
||||||
declare -A CRONTAB_SCHEDULES=(
|
declare -A CRONTAB_SCHEDULES=(
|
||||||
["clean.sh"]="0 23 * * *" # Daily at 11 PM
|
["clean.sh"]="0 3 * * *" # Daily at 3 AM
|
||||||
["backup.sh"]="30 23 * * 1,5" # Monday and Friday at 11:30 PM
|
["backup.sh"]="0 23 * * 1,5" # Monday and Friday at 11 PM
|
||||||
["docker-updater.sh"]="0 3 */4 * *" # Every 4 days at 3 AM
|
["docker-updater.sh"]="0 3 * * 6" # Every Saturday at 3 AM
|
||||||
)
|
)
|
||||||
|
|
||||||
#==============================================================================
|
#==============================================================================
|
||||||
|
Reference in New Issue
Block a user