Compare commits
18 Commits
7016ced89e
...
main
Author | SHA1 | Date | |
---|---|---|---|
96548f4773 | |||
c34ee5185d | |||
1489062943 | |||
eb8ca78f4f | |||
45567b2242 | |||
b0324ac9d8 | |||
06cf78a4a6 | |||
cd57837696 | |||
cedc435df0 | |||
aa7a9b8548 | |||
11e2a28bd4 | |||
8b52bd2c45 | |||
442ff12039 | |||
12920c10d4 | |||
99e110afb3 | |||
8108ca7e7b | |||
100262513b | |||
7520d70ce9 |
@@ -16,41 +16,9 @@ env:
|
||||
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
||||
|
||||
jobs:
|
||||
# build_haven_notify:
|
||||
# name: Build Haven Notify Image (arm64)
|
||||
# runs-on: ubuntu-arm64
|
||||
|
||||
# steps:
|
||||
# - name: Check out repository
|
||||
# uses: actions/checkout@v2
|
||||
|
||||
# - name: Install Docker
|
||||
# run: |
|
||||
# apt-get update
|
||||
# apt-get install -y docker.io
|
||||
|
||||
# - name: Log in to Container Registry
|
||||
# run: |
|
||||
# echo "${{ secrets.REGISTRY_PASSWORD }}" \
|
||||
# | docker login "${{ env.REGISTRY_HOST }}" \
|
||||
# -u "${{ env.REGISTRY_USERNAME }}" \
|
||||
# --password-stdin
|
||||
|
||||
# - name: Build and Push Image
|
||||
# run: |
|
||||
# TAG=latest
|
||||
|
||||
# cd haven-notify
|
||||
|
||||
# docker build \
|
||||
# -t "${{ env.IMAGE_NOTIFY }}:${TAG}" \
|
||||
# -f Dockerfile .
|
||||
|
||||
# docker push "${{ env.IMAGE_NOTIFY }}:${TAG}"
|
||||
|
||||
build_haven_notify_amd64:
|
||||
name: Build Haven Notify Image (amd64)
|
||||
runs-on: ubuntu-latest
|
||||
build_haven_notify:
|
||||
name: Build Haven Notify Image
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
@@ -63,27 +31,25 @@ jobs:
|
||||
-u "${{ env.REGISTRY_USERNAME }}" \
|
||||
--password-stdin
|
||||
|
||||
- name: Display Directory Structure
|
||||
run: |
|
||||
echo "Current directory structure:"
|
||||
ls -lR .
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Build and Push Image
|
||||
run: |
|
||||
TAG=latest
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
cd haven-notify
|
||||
|
||||
docker build \
|
||||
-t "${{ env.IMAGE_NOTIFY }}:${TAG}" \
|
||||
-f Dockerfile .
|
||||
|
||||
docker push "${{ env.IMAGE_NOTIFY }}:${TAG}"
|
||||
- name: Build and Push Multi-Arch Image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
context: haven-notify
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
${{ env.IMAGE_NOTIFY }}:latest
|
||||
|
||||
deploy_haven_notify:
|
||||
name: Deploy Haven Notify
|
||||
name: Deploy Haven Notify (internal)
|
||||
runs-on: ubuntu-amd64
|
||||
needs: build_haven_notify_amd64
|
||||
needs: build_haven_notify
|
||||
steps:
|
||||
- name: Check KUBE_CONFIG validity
|
||||
run: |
|
||||
@@ -110,7 +76,7 @@ jobs:
|
||||
env:
|
||||
KUBE_CONFIG: ${{ env.KUBE_CONFIG }}
|
||||
|
||||
- name: Test connection to cluster
|
||||
- name: Check connection to cluster
|
||||
run: |
|
||||
cd haven-notify/deploy
|
||||
kubectl --kubeconfig=kubeconfig.yaml cluster-info
|
||||
|
71
backup.sh
71
backup.sh
@@ -1,53 +1,23 @@
|
||||
#!/bin/bash
|
||||
|
||||
### AUTO-UPDATER ###
|
||||
# Variables
|
||||
SERVER_FILE="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main/backup.sh"
|
||||
SERVER_OK=1
|
||||
HOSTNAME=$(cat /etc/hostname)
|
||||
|
||||
NOTIFY_URL="http://notify.haven/notify"
|
||||
|
||||
# Function to send notification
|
||||
send_notify() {
|
||||
local title="$1"
|
||||
local message="$2"
|
||||
curl -s -X POST "$NOTIFY_URL" \
|
||||
HOSTNAME=$(cat /etc/hostname)
|
||||
NOTIFY_URL_ERROR="http://notify.haven/template/notify/error"
|
||||
NOTIFY_URL_BACKUP="http://notify.haven/template/notify/backup"
|
||||
send_error_notification() {
|
||||
local message="$1"
|
||||
local critical="$2"
|
||||
curl -s -X POST "$NOTIFY_URL_ERROR" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"title\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\"}"
|
||||
-d "{\"caller\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\", \"critical\": $critical}"
|
||||
}
|
||||
send_backup_notification() {
|
||||
local message="$1"
|
||||
local backup_size="$2"
|
||||
curl -s -X POST "$NOTIFY_URL_BACKUP" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"title\": \"Docker Backup - $HOSTNAME\", \"message\": \"$message\", \"backupSizeInMB\": $backup_size}"
|
||||
}
|
||||
|
||||
# Check if the server file exists
|
||||
curl -s --head $SERVER_FILE | head -n 1 | grep -E "HTTP/[12] [23].." > /dev/null
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error: $SERVER_FILE not found." >&2
|
||||
send_notify "" "❌ Server file not found: $SERVER_FILE"
|
||||
SERVER_OK=0
|
||||
fi
|
||||
|
||||
if [ $SERVER_OK -eq 1 ]; then
|
||||
echo "Running auto-update..."
|
||||
|
||||
# Compare the local and server files sha256sum to check if an update is needed
|
||||
LOCAL_SHA256=$(sha256sum backup.sh | awk '{print $1}')
|
||||
SERVER_SHA256=$(curl -s $SERVER_FILE | sha256sum | awk '{print $1}')
|
||||
|
||||
if [ "$LOCAL_SHA256" != "$SERVER_SHA256" ]; then
|
||||
echo "Updating backup.sh..."
|
||||
curl -s -o backup.sh $SERVER_FILE
|
||||
echo "backup.sh updated."
|
||||
|
||||
chmod +x backup.sh
|
||||
echo "Permissions set up."
|
||||
|
||||
echo "Running updated backup.sh..."
|
||||
./backup.sh
|
||||
exit 0
|
||||
else
|
||||
echo "backup.sh is up to date.."
|
||||
fi
|
||||
fi
|
||||
|
||||
####################
|
||||
|
||||
@@ -61,31 +31,32 @@ REMOTE_DIR="/export/Backup/Docker/$(cat /etc/hostname)"
|
||||
# Create a compressed backup file
|
||||
zip -q -r $BACKUP_FILE $SOURCE_DIR || true
|
||||
if [ $? -ne 0 ]; then
|
||||
send_notify "" "⚠️ Some files or folders in $SOURCE_DIR could not be backed up (possibly in use or locked). Backup archive created with available files."
|
||||
send_error_notification "⚠️ Some files or folders in $SOURCE_DIR could not be backed up (possibly in use or locked). Backup archive created with available files." false
|
||||
fi
|
||||
|
||||
# Check if remote path exists
|
||||
if ! ssh $REMOTE_USER@$REMOTE_HOST "mkdir -p $REMOTE_DIR"; then
|
||||
send_notify "" "❌ Failed to create remote directory: $REMOTE_DIR on $REMOTE_HOST"
|
||||
send_error_notification "❌ Failed to create remote directory: $REMOTE_DIR on $REMOTE_HOST" true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Transfer the backup file to the remote server
|
||||
if ! scp $BACKUP_FILE $REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR; then
|
||||
send_notify "" "❌ Failed to transfer backup file to remote server: $REMOTE_HOST:$REMOTE_DIR"
|
||||
send_error_notification "❌ Failed to transfer backup file to remote server: $REMOTE_HOST:$REMOTE_DIR" true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove the backup file
|
||||
BACKUP_SIZE=$(du -m $BACKUP_FILE | cut -f1)
|
||||
rm $BACKUP_FILE
|
||||
|
||||
# Erase last 7 days backups from remote server
|
||||
if ! ssh $REMOTE_USER@$REMOTE_HOST "find $REMOTE_DIR -type f -name 'docker_backup_*' -mtime +7 -exec rm {} \;"; then
|
||||
send_notify "" "⚠️ Failed to clean old backups on remote server: $REMOTE_HOST:$REMOTE_DIR"
|
||||
send_error_notification "⚠️ Failed to clean old backups on remote server: $REMOTE_HOST:$REMOTE_DIR" false
|
||||
fi
|
||||
|
||||
# Success notification
|
||||
send_notify "" "✅ Backup completed successfully for: $SOURCE_DIR to $REMOTE_HOST:$REMOTE_DIR"
|
||||
send_backup_notification "✅ Backup completed successfully for: $SOURCE_DIR to $REMOTE_HOST:$REMOTE_DIR" $BACKUP_SIZE
|
||||
echo "Backup completed successfully"
|
||||
exit 0
|
||||
|
||||
|
81
clean.sh
81
clean.sh
@@ -4,7 +4,6 @@
|
||||
#
|
||||
# Description: Comprehensive system cleanup for Docker containers and Linux systems
|
||||
# Features:
|
||||
# - Self-updating capability
|
||||
# - Docker resource cleanup (images, containers, volumes, networks)
|
||||
# - Package manager cache cleanup (APK/APT)
|
||||
# - System cache and temporary file cleanup
|
||||
@@ -30,10 +29,6 @@ readonly LIGHT_BLUE='\033[1;34m'
|
||||
readonly LIGHT_GREY='\033[0;37m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
|
||||
# Script configuration
|
||||
readonly SCRIPT_NAME="clean.sh"
|
||||
readonly SERVER_BASE_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
|
||||
|
||||
# Cleanup configuration
|
||||
readonly LOG_RETENTION_DAYS=30
|
||||
readonly JOURNAL_RETENTION_DAYS=7
|
||||
@@ -133,79 +128,6 @@ get_system_info() {
|
||||
echo "$info"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# AUTO-UPDATE FUNCTIONALITY
|
||||
#==============================================================================
|
||||
|
||||
# Check server connectivity
|
||||
check_server_connectivity() {
|
||||
local url="$1"
|
||||
curl -s --head "$url" | head -n 1 | grep -E "HTTP/[12] [23].." >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# Get SHA256 hash of a file
|
||||
get_file_hash() {
|
||||
local file="$1"
|
||||
sha256sum "$file" 2>/dev/null | awk '{print $1}' || echo ""
|
||||
}
|
||||
|
||||
# Get SHA256 hash from URL content
|
||||
get_url_hash() {
|
||||
local url="$1"
|
||||
curl -s "$url" 2>/dev/null | sha256sum | awk '{print $1}' || echo ""
|
||||
}
|
||||
|
||||
# Perform self-update if newer version is available
|
||||
perform_self_update() {
|
||||
if [[ "$AUTO_UPDATE_ENABLED" != "true" ]]; then
|
||||
log_info "Auto-update is disabled"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local server_url="$SERVER_BASE_URL/$SCRIPT_NAME"
|
||||
|
||||
log_step "Checking for script updates..."
|
||||
|
||||
# Check if server file is accessible
|
||||
if ! check_server_connectivity "$server_url"; then
|
||||
log_warning "Cannot connect to update server, continuing with current version"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Compare local and server file hashes
|
||||
local local_hash server_hash
|
||||
local_hash=$(get_file_hash "$SCRIPT_NAME")
|
||||
server_hash=$(get_url_hash "$server_url")
|
||||
|
||||
if [[ -z "$local_hash" || -z "$server_hash" ]]; then
|
||||
log_warning "Cannot determine file hashes, skipping update"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "$local_hash" != "$server_hash" ]]; then
|
||||
log_info "Update available, downloading new version..."
|
||||
|
||||
# Create backup of current script
|
||||
local backup_file="${SCRIPT_NAME}.backup.$(date +%s)"
|
||||
cp "$SCRIPT_NAME" "$backup_file" || die "Failed to create backup"
|
||||
|
||||
# Download updated script
|
||||
if curl -s -o "$SCRIPT_NAME" "$server_url"; then
|
||||
chmod +x "$SCRIPT_NAME" || die "Failed to set executable permissions"
|
||||
log_success "Script updated successfully"
|
||||
|
||||
log_step "Running updated script..."
|
||||
exec ./"$SCRIPT_NAME" "$@"
|
||||
else
|
||||
# Restore backup on failure
|
||||
mv "$backup_file" "$SCRIPT_NAME"
|
||||
die "Failed to download updated script"
|
||||
fi
|
||||
else
|
||||
log_success "Script is already up to date"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# DOCKER CLEANUP FUNCTIONS
|
||||
#==============================================================================
|
||||
@@ -526,9 +448,6 @@ main() {
|
||||
echo
|
||||
fi
|
||||
|
||||
# Perform self-update if enabled
|
||||
perform_self_update "$@"
|
||||
|
||||
# Docker cleanup
|
||||
cleanup_docker
|
||||
|
||||
|
@@ -4,7 +4,6 @@
|
||||
#
|
||||
# Description: Automatically updates Docker containers and manages Docker images
|
||||
# Features:
|
||||
# - Self-updating capability
|
||||
# - Updates all Docker Compose projects in /root/docker
|
||||
# - Skips containers with .ignore file
|
||||
# - Removes obsolete Docker Compose version attributes
|
||||
@@ -13,6 +12,22 @@
|
||||
# Version: 2.0
|
||||
|
||||
set -euo pipefail # Exit on error, undefined vars, and pipe failures
|
||||
HOSTNAME=$(cat /etc/hostname)
|
||||
NOTIFY_URL_ERROR="http://notify.haven/template/notify/error"
|
||||
NOTIFY_URL_UPDATE="http://notify.haven/template/notify/update"
|
||||
send_error_notification() {
|
||||
local message="$1"
|
||||
local critical="$2"
|
||||
curl -s -X POST "$NOTIFY_URL_ERROR" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"caller\": \"$HOSTNAME\", \"message\": \"$message\", \"critical\": $critical}"
|
||||
}
|
||||
send_update_notification() {
|
||||
local script_time="$1"
|
||||
curl -s -X POST "$NOTIFY_URL_UPDATE" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"host\": \"$HOSTNAME\", \"asset\": \"Docker containers\", \"time\": $script_time}"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# CONFIGURATION
|
||||
@@ -28,8 +43,6 @@ readonly LIGHT_GREY='\033[0;37m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
|
||||
# Script configuration
|
||||
readonly SCRIPT_NAME="docker-updater.sh"
|
||||
readonly SERVER_BASE_URL="https://git.ivanch.me/ivanch/server-scripts/raw/branch/main"
|
||||
readonly DOCKER_FOLDER="/root/docker"
|
||||
readonly COMPOSE_FILES=("docker-compose.yml" "docker-compose.yaml" "compose.yaml" "compose.yml")
|
||||
|
||||
@@ -43,11 +56,18 @@ readonly AUTO_UPDATE_ENABLED=true
|
||||
# Print formatted log messages
|
||||
log_info() { echo -e "${LIGHT_GREY}[i] $1${NC}"; }
|
||||
log_success() { echo -e "${LIGHT_GREEN}[✓] $1${NC}"; }
|
||||
log_warning() { echo -e "${YELLOW}[!] $1${NC}"; }
|
||||
log_error() { echo -e "${RED}[x] $1${NC}" >&2; }
|
||||
log_step() { echo -e "${LIGHT_BLUE}[i] $1${NC}"; }
|
||||
log_container() { echo -e "${LIGHT_BLUE}[$1] $2${NC}"; }
|
||||
|
||||
log_warning() {
|
||||
echo -e "${YELLOW}[!] $1${NC}";
|
||||
send_error_notification "$1" false
|
||||
}
|
||||
log_error() {
|
||||
echo -e "${RED}[x] $1${NC}" >&2;
|
||||
send_error_notification "$1" true
|
||||
}
|
||||
|
||||
# Exit with error message
|
||||
die() {
|
||||
log_error "$1"
|
||||
@@ -86,69 +106,6 @@ get_url_hash() {
|
||||
curl -s "$url" 2>/dev/null | sha256sum | awk '{print $1}' || echo ""
|
||||
}
|
||||
|
||||
# Check if server file is accessible
|
||||
check_server_connectivity() {
|
||||
local url="$1"
|
||||
curl -s --head "$url" | head -n 1 | grep -E "HTTP/[12] [23].." >/dev/null 2>&1
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# AUTO-UPDATE FUNCTIONALITY
|
||||
#==============================================================================
|
||||
|
||||
# Perform self-update if newer version is available
|
||||
perform_self_update() {
|
||||
if [[ "$AUTO_UPDATE_ENABLED" != "true" ]]; then
|
||||
log_info "Auto-update is disabled"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local server_url="$SERVER_BASE_URL/$SCRIPT_NAME"
|
||||
|
||||
log_step "Checking for script updates..."
|
||||
|
||||
# Check if server file is accessible
|
||||
if ! check_server_connectivity "$server_url"; then
|
||||
log_warning "Cannot connect to update server, continuing with current version"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Compare local and server file hashes
|
||||
local local_hash
|
||||
local server_hash
|
||||
|
||||
local_hash=$(get_file_hash "$SCRIPT_NAME")
|
||||
server_hash=$(get_url_hash "$server_url")
|
||||
|
||||
if [[ -z "$local_hash" || -z "$server_hash" ]]; then
|
||||
log_warning "Cannot determine file hashes, skipping update"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "$local_hash" != "$server_hash" ]]; then
|
||||
log_info "Update available, downloading new version..."
|
||||
|
||||
# Create backup of current script
|
||||
local backup_file="${SCRIPT_NAME}.backup.$(date +%s)"
|
||||
cp "$SCRIPT_NAME" "$backup_file" || die "Failed to create backup"
|
||||
|
||||
# Download updated script
|
||||
if curl -s -o "$SCRIPT_NAME" "$server_url"; then
|
||||
chmod +x "$SCRIPT_NAME" || die "Failed to set executable permissions"
|
||||
log_success "Script updated successfully"
|
||||
|
||||
log_step "Running updated script..."
|
||||
exec ./"$SCRIPT_NAME" "$@"
|
||||
else
|
||||
# Restore backup on failure
|
||||
mv "$backup_file" "$SCRIPT_NAME"
|
||||
die "Failed to download updated script"
|
||||
fi
|
||||
else
|
||||
log_success "Script is already up to date"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# DOCKER COMPOSE MANAGEMENT
|
||||
#==============================================================================
|
||||
@@ -260,37 +217,17 @@ update_all_docker_projects() {
|
||||
# Change to Docker folder
|
||||
cd "$DOCKER_FOLDER" || die "Cannot access Docker folder: $DOCKER_FOLDER"
|
||||
|
||||
local updated_count=0
|
||||
local failed_count=0
|
||||
local skipped_count=0
|
||||
|
||||
# Process each subdirectory
|
||||
for project_dir in */; do
|
||||
if [[ -d "$project_dir" ]]; then
|
||||
local project_path="$DOCKER_FOLDER/$project_dir"
|
||||
|
||||
if update_docker_project "$project_path"; then
|
||||
if should_skip_container; then
|
||||
((skipped_count++))
|
||||
else
|
||||
((updated_count++))
|
||||
fi
|
||||
else
|
||||
((failed_count++))
|
||||
fi
|
||||
update_docker_project "$project_path"
|
||||
|
||||
# Return to Docker folder for next iteration
|
||||
cd "$DOCKER_FOLDER" || die "Cannot return to Docker folder"
|
||||
fi
|
||||
done
|
||||
|
||||
# Report results
|
||||
log_success "Docker update summary:"
|
||||
log_info " Updated: $updated_count projects"
|
||||
log_info " Skipped: $skipped_count projects"
|
||||
if [[ $failed_count -gt 0 ]]; then
|
||||
log_warning " Failed: $failed_count projects"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
@@ -315,15 +252,14 @@ cleanup_docker_resources() {
|
||||
#==============================================================================
|
||||
|
||||
main() {
|
||||
START_TIME=$(date +%s)
|
||||
|
||||
log_step "Starting Docker Container Updater"
|
||||
echo
|
||||
|
||||
# Check requirements
|
||||
check_docker_requirements
|
||||
|
||||
# Perform self-update if enabled
|
||||
perform_self_update "$@"
|
||||
|
||||
# Update all Docker projects
|
||||
update_all_docker_projects
|
||||
|
||||
@@ -332,6 +268,12 @@ main() {
|
||||
|
||||
echo
|
||||
log_success "Docker container update process completed!"
|
||||
|
||||
END_TIME=$(date +%s)
|
||||
DURATION=$((END_TIME - START_TIME))
|
||||
log_info "Total duration: $DURATION seconds"
|
||||
|
||||
send_update_notification $DURATION
|
||||
}
|
||||
|
||||
# Execute main function with all arguments
|
||||
|
@@ -1,15 +1,19 @@
|
||||
# Start from the official Golang image for building
|
||||
ARG TARGETARCH=amd64
|
||||
FROM golang:1.25 AS builder
|
||||
FROM --platform=$BUILDPLATFORM golang:1.22-alpine AS builder
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETOS
|
||||
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
# Build statically for Linux and selected architecture
|
||||
RUN GOOS=linux GOARCH=${TARGETARCH} CGO_ENABLED=0 go build -o haven-notify main.go
|
||||
# Build statically for Linux
|
||||
RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -o haven-notify main.go
|
||||
|
||||
# Use Alpine for running, with CA certificates for TLS
|
||||
FROM alpine:latest
|
||||
WORKDIR /app
|
||||
RUN apk --no-cache add ca-certificates
|
||||
COPY template/ template/
|
||||
COPY --from=builder /app/haven-notify .
|
||||
EXPOSE 8080
|
||||
ENV WEBHOOK_URL=""
|
||||
|
95
haven-notify/README.md
Normal file
95
haven-notify/README.md
Normal file
@@ -0,0 +1,95 @@
|
||||
<div align="center">
|
||||
<img src="./assets/widelogo.png" alt="Haven Notify Logo">
|
||||
</div>
|
||||
|
||||
## Overview
|
||||
Haven Notify is an internal service designed to send notifications to a specified Discord channel.
|
||||
|
||||
It's built in Go and can be deployed as a container or managed service.
|
||||
|
||||
## Prerequisites
|
||||
- Go 1.18 or newer
|
||||
- Docker
|
||||
- A Discord Webhook URL
|
||||
|
||||
## API Specification
|
||||
|
||||
### Send Notification
|
||||
- **Endpoint**: `/notify`
|
||||
- **Method**: `POST`
|
||||
- **Request Body**:
|
||||
```json
|
||||
{
|
||||
"title": "Notification Title",
|
||||
"message": "Notification Message"
|
||||
}
|
||||
```
|
||||
|
||||
### Send Backup Notification
|
||||
- **Endpoint**: `/template/notify/backup`
|
||||
- **Method**: `POST`
|
||||
- **Request Body**:
|
||||
```json
|
||||
{
|
||||
"title": "Notification Title",
|
||||
"asset": "Notification Asset Name",
|
||||
"backupSizeInMB": 500,
|
||||
"extra": [
|
||||
{
|
||||
"name": "Additional Info",
|
||||
"value": "Some extra information"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Send Update Notification
|
||||
- **Endpoint**: `/template/notify/update`
|
||||
- **Method**: `POST`
|
||||
- **Request Body**:
|
||||
```json
|
||||
{
|
||||
"host": "Notification Title",
|
||||
"asset": "Notification Message",
|
||||
"time": 500 // in seconds
|
||||
}
|
||||
```
|
||||
|
||||
### Send Error Notification
|
||||
- **Endpoint**: `/template/notify/error`
|
||||
- **Method**: `POST`
|
||||
- **Request Body**:
|
||||
```json
|
||||
{
|
||||
"caller": "Who triggered the error",
|
||||
"message": "Error while moving file",
|
||||
"critical": true,
|
||||
"extra": [
|
||||
{
|
||||
"name": "Additional Info",
|
||||
"value": "Some extra information"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Setup & Usage
|
||||
|
||||
### Docker
|
||||
1. Build the Docker image:
|
||||
```sh
|
||||
docker build -t haven-notify .
|
||||
```
|
||||
2. Run the container:
|
||||
```sh
|
||||
docker run -e WEBHOOK_URL=your_webhook_url haven-notify
|
||||
```
|
||||
|
||||
### Kubernetes
|
||||
Deployment manifest is available at `deploy/haven-notify.yaml`.
|
||||
1. Edit the manifest to set your environment variables.
|
||||
2. Create a generic secret named `WEBHOOK_URL` with `discord-webhook=your_webhook_url`
|
||||
3. Apply deployment:
|
||||
```sh
|
||||
kubectl apply -f deploy/haven-notify.yaml
|
||||
```
|
BIN
haven-notify/assets/widelogo.png
Normal file
BIN
haven-notify/assets/widelogo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 334 KiB |
@@ -26,8 +26,18 @@ spec:
|
||||
secretKeyRef:
|
||||
name: discord-webhook
|
||||
key: HAVEN_WEBHOOK_URL
|
||||
nodeSelector:
|
||||
kubernetes.io/arch: amd64
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /ready
|
||||
port: 8080
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 10
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /live
|
||||
port: 8080
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 10
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
|
@@ -1,13 +1,16 @@
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Notification payload
|
||||
@@ -18,6 +21,9 @@ type Notification struct {
|
||||
|
||||
func main() {
|
||||
http.HandleFunc("/notify", notifyHandler)
|
||||
http.HandleFunc("/ready", readinessHandler)
|
||||
http.HandleFunc("/live", livenessHandler)
|
||||
http.HandleFunc("/template/notify/", templateNotifyHandler)
|
||||
log.Println("Starting server on :8080...")
|
||||
log.Fatal(http.ListenAndServe(":8080", nil))
|
||||
}
|
||||
@@ -53,6 +59,18 @@ func notifyHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte("Notification sent"))
|
||||
}
|
||||
|
||||
// Readiness handler
|
||||
func readinessHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte("Ready"))
|
||||
}
|
||||
|
||||
// Liveness handler
|
||||
func livenessHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte("Alive"))
|
||||
}
|
||||
|
||||
func sendDiscordNotification(title, message string) error {
|
||||
webhookURL := os.Getenv("WEBHOOK_URL")
|
||||
if webhookURL == "" {
|
||||
@@ -90,3 +108,107 @@ func sendDiscordNotification(title, message string) error {
|
||||
log.Printf("Discord notification sent successfully: Title='%s'", title)
|
||||
return nil
|
||||
}
|
||||
|
||||
func templateNotifyHandler(w http.ResponseWriter, r *http.Request) {
|
||||
log.Printf("Incoming %s request from %s to %s", r.Method, r.RemoteAddr, r.URL.Path)
|
||||
if r.Method != http.MethodPost {
|
||||
log.Printf("Method not allowed: %s", r.Method)
|
||||
w.WriteHeader(http.StatusMethodNotAllowed)
|
||||
w.Write([]byte("Method not allowed"))
|
||||
return
|
||||
}
|
||||
|
||||
templateName := r.URL.Path[len("/template/notify/"):] // Extract template name
|
||||
if templateName == "" {
|
||||
log.Printf("Template name not provided")
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
w.Write([]byte("Template name not provided"))
|
||||
return
|
||||
}
|
||||
|
||||
templatePath := "template/" + templateName + ".tmpl"
|
||||
templateData, err := ioutil.ReadFile(templatePath)
|
||||
if err != nil {
|
||||
log.Printf("Failed to read template: %v", err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.Write([]byte("Failed to read template"))
|
||||
return
|
||||
}
|
||||
|
||||
tmpl, err := template.New(templateName).Funcs(template.FuncMap{
|
||||
"formatSize": func(size float64) string {
|
||||
if size > 1024 {
|
||||
return fmt.Sprintf("%.2f GiB", size/1024)
|
||||
}
|
||||
return fmt.Sprintf("%.2f MiB", size)
|
||||
},
|
||||
"upper": strings.ToUpper,
|
||||
"lower": strings.ToLower,
|
||||
"title": strings.Title,
|
||||
"now": func() string {
|
||||
return fmt.Sprintf("%d", time.Now().Unix())
|
||||
},
|
||||
"formatTime": func(timestamp string) string {
|
||||
if timestamp == "" {
|
||||
return time.Now().Format("2006-01-02T15:04:05Z")
|
||||
}
|
||||
return timestamp
|
||||
},
|
||||
}).Parse(string(templateData))
|
||||
if err != nil {
|
||||
log.Printf("Failed to parse template: %v", err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.Write([]byte("Failed to parse template"))
|
||||
return
|
||||
}
|
||||
|
||||
var rawPayload map[string]interface{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&rawPayload); err != nil {
|
||||
log.Printf("Invalid payload: %v", err)
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
w.Write([]byte("Invalid payload"))
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize keys to lowercase for case-insensitive parsing
|
||||
payload := make(map[string]interface{})
|
||||
for key, value := range rawPayload {
|
||||
payload[strings.ToLower(key)] = value
|
||||
}
|
||||
|
||||
var filledTemplate bytes.Buffer
|
||||
if err := tmpl.Execute(&filledTemplate, payload); err != nil {
|
||||
log.Printf("Failed to execute template: %v", err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.Write([]byte("Failed to execute template"))
|
||||
return
|
||||
}
|
||||
|
||||
webhookURL := os.Getenv("WEBHOOK_URL")
|
||||
if webhookURL == "" {
|
||||
log.Printf("WEBHOOK_URL environment variable not set")
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.Write([]byte("WEBHOOK_URL environment variable not set"))
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := http.Post(webhookURL, "application/json", &filledTemplate)
|
||||
if err != nil {
|
||||
log.Printf("Error posting to Discord webhook: %v", err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.Write([]byte("Failed to send notification"))
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
log.Printf("Discord webhook returned status: %s", resp.Status)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.Write([]byte("Failed to send notification"))
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Notification sent successfully using template '%s'", templateName)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte("Notification sent"))
|
||||
}
|
||||
|
39
haven-notify/template/backup.tmpl
Normal file
39
haven-notify/template/backup.tmpl
Normal file
@@ -0,0 +1,39 @@
|
||||
{{/*
|
||||
Docker Backup Notification Template
|
||||
Variables expected:
|
||||
- .title: The backup title/name
|
||||
- .asset: The asset being backed up
|
||||
- .backupsizeinmb: The backup size in MB (will be formatted automatically)
|
||||
- .extra: Optional array of additional fields with .name and .value
|
||||
|
||||
Template Functions Available:
|
||||
- formatSize: Formats size in MB/GB automatically
|
||||
*/}}
|
||||
{
|
||||
"embeds": [
|
||||
{
|
||||
"title": "📦 Backup - {{.title}}",
|
||||
"description": "**{{.asset}}** has been backup-ed successfully! ✅🫡\n",
|
||||
"color": 3066993,
|
||||
"fields": [
|
||||
{
|
||||
"name": "💾 Backup Size",
|
||||
"value": "{{if .backupsizeinmb}}{{formatSize .backupsizeinmb}}{{else}}Unknown{{end}}",
|
||||
"inline": true
|
||||
}
|
||||
{{- if .extra}}
|
||||
{{- range $index, $field := .extra}},
|
||||
{
|
||||
"name": "{{$field.name}}",
|
||||
"value": "{{$field.value}}",
|
||||
"inline": true
|
||||
}
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
],
|
||||
"footer": {
|
||||
"text": "✨ Haven Notify ✨"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
36
haven-notify/template/error.tmpl
Normal file
36
haven-notify/template/error.tmpl
Normal file
@@ -0,0 +1,36 @@
|
||||
{{/*
|
||||
Error Notification Template
|
||||
Variables expected:
|
||||
- .caller: The caller of the error
|
||||
- .message: The error message
|
||||
- .critical: Boolean indicating if the error is critical
|
||||
- .extra: Optional array of additional fields with .name and .value
|
||||
*/}}
|
||||
{
|
||||
"embeds": [
|
||||
{
|
||||
"title": "❌ Error",
|
||||
"description": "**{{.caller}}** encountered an error!",
|
||||
"color": {{if .critical}}15158332{{else}}15844367{{end}},
|
||||
"fields": [
|
||||
{
|
||||
"name": "📄 Message",
|
||||
"value": "{{.message}}",
|
||||
"inline": false
|
||||
}
|
||||
{{- if .extra}}
|
||||
{{- range $index, $field := .extra}},
|
||||
{
|
||||
"name": "{{$field.name}}",
|
||||
"value": "{{$field.value}}",
|
||||
"inline": true
|
||||
}
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
],
|
||||
"footer": {
|
||||
"text": "✨ Haven Notify ✨"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
29
haven-notify/template/update.tmpl
Normal file
29
haven-notify/template/update.tmpl
Normal file
@@ -0,0 +1,29 @@
|
||||
{{/*
|
||||
Update Notification Template
|
||||
Variables expected:
|
||||
- .host: The host where the update occurred
|
||||
- .asset: The asset being updated (Docker or k8s)
|
||||
- .time: The time in seconds that the script took to run
|
||||
|
||||
Template Functions Available:
|
||||
- formatTime: Formats time in seconds to a human-readable format
|
||||
*/}}
|
||||
{
|
||||
"embeds": [
|
||||
{
|
||||
"title": "🔄 Update - {{.asset}}",
|
||||
"description": "**{{.host}}** has successfully updated **{{.asset}}**! ✅",
|
||||
"color": 3447003,
|
||||
"fields": [
|
||||
{
|
||||
"name": "⏱️ Time Taken",
|
||||
"value": "{{if .time}}{{.time}}{{else}}Unknown{{end}} seconds",
|
||||
"inline": true
|
||||
}
|
||||
],
|
||||
"footer": {
|
||||
"text": "✨ Haven Notify ✨"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@@ -26,9 +26,9 @@ readonly AVAILABLE_SCRIPTS=("clean.sh" "backup.sh" "docker-updater.sh")
|
||||
|
||||
# Format: [script_name]="cron_schedule"
|
||||
declare -A CRONTAB_SCHEDULES=(
|
||||
["clean.sh"]="0 23 * * *" # Daily at 11 PM
|
||||
["backup.sh"]="30 23 * * 1,5" # Monday and Friday at 11:30 PM
|
||||
["docker-updater.sh"]="0 3 */4 * *" # Every 4 days at 3 AM
|
||||
["clean.sh"]="0 3 * * *" # Daily at 3 AM
|
||||
["backup.sh"]="0 23 * * 1,5" # Monday and Friday at 11 PM
|
||||
["docker-updater.sh"]="0 3 * * 6" # Every Saturday at 3 AM
|
||||
)
|
||||
|
||||
#==============================================================================
|
||||
|
@@ -4,18 +4,41 @@ $7zipPath = "$env:ProgramFiles\7-Zip\7z.exe"
|
||||
if (!(Test-Path "$env:ProgramFiles\7-Zip\7z.exe")) {
|
||||
Write-Host "7-Zip is not installed. Please install it to use this script."
|
||||
exit 1
|
||||
Send-Notify "❌ 7-Zip is not installed. Backup aborted."
|
||||
}
|
||||
|
||||
$BackupSource = @(
|
||||
"$env:USERPROFILE\Documents",
|
||||
"$env:USERPROFILE\Desktop",
|
||||
"$env:USERPROFILE\Pictures"
|
||||
"$env:USERPROFILE\Pictures",
|
||||
"$env:USERPROFILE\.ssh",
|
||||
"$env:USERPROFILE\.kube"
|
||||
)
|
||||
|
||||
$NASDestination = "\\OMV\Backup\$env:COMPUTERNAME"
|
||||
$TempDir = "$env:TEMP\BackupTemp"
|
||||
$Date = Get-Date -Format "yyyy-MM-dd"
|
||||
|
||||
$NotifyUrl = "http://notify.haven/notify"
|
||||
|
||||
function Send-Notify {
|
||||
param (
|
||||
[string]$Message
|
||||
)
|
||||
if (-not $NotifyUrl) {
|
||||
Write-Host "NOTIFY_URL environment variable is not set. Notification not sent."
|
||||
return
|
||||
}
|
||||
$Title = "Backup - $env:COMPUTERNAME"
|
||||
$Body = @{ title = $Title; message = $Message } | ConvertTo-Json
|
||||
try {
|
||||
Invoke-RestMethod -Uri $NotifyUrl -Method Post -ContentType 'application/json' -Body $Body | Out-Null
|
||||
Write-Host "Notification sent: $Title - $Message"
|
||||
} catch {
|
||||
Write-Host "Failed to send notification: $_"
|
||||
}
|
||||
}
|
||||
|
||||
# Create temp directory
|
||||
New-Item -ItemType Directory -Path $TempDir -Force
|
||||
|
||||
@@ -30,18 +53,26 @@ foreach ($Folder in $BackupSource) {
|
||||
$ZipFile = "$TempDir\$FolderName-$Date.zip"
|
||||
|
||||
Write-Host "Compressing $Folder..."
|
||||
& "$7zipPath" a -tzip "$ZipFile" "$Folder\*" -mx=9
|
||||
$compressResult = & "$7zipPath" a -tzip "$ZipFile" "$Folder\*" -mx=9
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Host "Compression failed for $Folder."
|
||||
Send-Notify "❌ Compression failed for $Folder."
|
||||
continue
|
||||
}
|
||||
|
||||
Write-Host "Copying $ZipFile to NAS..."
|
||||
Copy-Item $ZipFile $NASDestination -Force
|
||||
|
||||
Write-Host "Removing $ZipFile..."
|
||||
Remove-Item $ZipFile
|
||||
} else {
|
||||
Write-Host "Source folder not found: $Folder"
|
||||
Send-Notify "⚠️ Source folder not found: $Folder"
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "Removing Files older than 15 days from $NASDestination..."
|
||||
$OldFiles = Get-ChildItem -Path $NASDestination -File | Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-15) }
|
||||
Write-Host "Removing Files older than 7 days from $NASDestination..."
|
||||
$OldFiles = Get-ChildItem -Path $NASDestination -File | Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-7) }
|
||||
foreach ($OldFile in $OldFiles) {
|
||||
Remove-Item $OldFile.FullName -Force
|
||||
Write-Host "Removed: $($OldFile.FullName)"
|
||||
@@ -50,3 +81,4 @@ foreach ($OldFile in $OldFiles) {
|
||||
# Cleanup
|
||||
Remove-Item $TempDir -Recurse -Force
|
||||
Write-Host "Backup completed!"
|
||||
Send-Notify "✅ Backup completed successfully."
|
Reference in New Issue
Block a user