setup cron local mail reporting and longhorn recurring backup job

This commit is contained in:
2025-09-01 19:38:50 +02:00
parent 2d4cb5d8a5
commit 7fb534d737
11 changed files with 275 additions and 25 deletions

View File

@@ -1,6 +1,6 @@
---
# template source: https://github.com/bretfisher/docker-build-workflow/blob/main/templates/call-docker-build.yaml
name: Postgres
name: IAC
on: #[push,pull_request]
workflow_dispatch: {}
@@ -28,7 +28,7 @@ concurrency:
method: jwt
path: gitea_jwt
secrets: |
kvv1/google/credentials credentials | GOOGLE_BACKEND_CREDENTIALS ;
kvv1/google/credentials credentials | GOOGLE_CREDENTIALS ;
kvv1/admin/gitea token | GITEA_TOKEN
jobs:

View File

@@ -0,0 +1,13 @@
# Backups
Ecris les scripts de backup (pour écrire des archives dans /mnt/backups)
Ecris dans crontab pour les executer périodiquement
Aller sur la machine et utiliser
```sh
sudo su
mails
```
pour lire les erreurs
Longhorn se charge de snapshot le contenu de /mnt/backups, de le répliquer et d'en envoyer une version dans le cloud.

View File

@@ -1,9 +1,12 @@
---
# - name: postgres
# ansible.builtin.import_playbook: postgres.yml
# vars:
# backup_root_dir: "/mnt/backups"
# backup_dirname: "postgres"
- name: setup cron report
ansible.builtin.import_playbook: cron_report.yml
- name: postgres
ansible.builtin.import_playbook: postgres.yml
vars:
backup_root_dir: "/mnt/backups"
backup_dirname: "postgres"
- name: gitea
ansible.builtin.import_playbook: gitea.yml

View File

@@ -0,0 +1,127 @@
- name: MTA local complet pour Raspberry Pi avec livraison automatique de cron à root
hosts: raspberries:&local
become: yes
vars:
msmtp_log_dir: "/var/log/msmtp"
msmtp_log_file: "{{ msmtp_log_dir }}/msmtp.log"
msmtp_log_retention_days: 7
rotate_script: "/usr/local/bin/rotate_msmtp_logs.sh"
pre_tasks:
- name: Vérifier si le script de rotation existe
stat:
path: "{{ rotate_script }}"
register: rotate_script_stat
- name: Ignorer le reste du playbook si le script existe
meta: end_play
when: rotate_script_stat.stat.exists
tasks:
- name: Installer Postfix, msmtp et mailutils
apt:
name:
- postfix
- msmtp
- msmtp-mta
- mailutils
state: present
update_cache: yes
- name: Configurer Postfix en mode local only
debconf:
name: postfix
question: "postfix/main_mailer_type"
value: "Local only"
vtype: "string"
- name: Redémarrer Postfix
service:
name: postfix
state: restarted
enabled: yes
use: init
ignore_errors: true
- name: Créer répertoire de logs msmtp
file:
path: "{{ msmtp_log_dir }}"
state: directory
owner: root
group: root
mode: '0755'
- name: Créer fichier de log msmtp sécurisé
file:
path: "{{ msmtp_log_file }}"
state: touch
owner: root
group: root
mode: '0600'
- name: Configurer msmtp pour envoyer via Postfix local
copy:
dest: /etc/msmtprc
owner: root
group: root
mode: '0600'
content: |
defaults
logfile {{ msmtp_log_file }}
auth off
tls off
account default
host localhost
port 25
from root
- name: Créer script de rotation quotidienne des logs msmtp
copy:
dest: "{{ rotate_script }}"
owner: root
group: root
mode: '0755'
content: |
#!/bin/bash
TODAY=$(date +%Y%m%d)
if [ -f "{{ msmtp_log_file }}" ]; then
mv "{{ msmtp_log_file }}" "{{ msmtp_log_dir }}/msmtp.log.$TODAY"
touch "{{ msmtp_log_file }}"
chmod 600 "{{ msmtp_log_file }}"
fi
find "{{ msmtp_log_dir }}" -type f -name 'msmtp.log.*' -mtime +{{ msmtp_log_retention_days }} -exec rm -f {} \;
- name: Créer cron pour rotation quotidienne des logs msmtp
cron:
name: "Rotation quotidienne msmtp logs"
user: root
minute: 0
hour: 3
job: "{{ rotate_script }}"
- name: S'assurer que tous les mails root arrivent dans la boîte locale
lineinfile:
path: /etc/aliases
regexp: '^root:'
line: "root: root"
create: yes
- name: Mettre à jour les alias
command: newaliases
- name: Tester lenvoi de mail local
shell: |
echo "Test mail MTA local Pi" | mail -s "Test msmtp/Postfix Pi" root
register: mail_test
ignore_errors: yes
- name: Alerter si test mail échoue
debug:
msg: "⚠️ Envoi de mail via msmtp/Postfix sur Raspberry Pi a échoué !"
when: mail_test.rc != 0
- name: mail utility
ansible.builtin.import_playbook: cron_report_mailutility.yml

View File

@@ -0,0 +1,48 @@
- name: Installer et configurer neomutt pour root
hosts: raspberries:&local
become: yes
vars:
neomutt_config_file: "/root/.muttrc"
tasks:
- name: Installer neomutt
apt:
name: neomutt
state: present
update_cache: yes
- name: Créer fichier de configuration neomutt pour root
copy:
dest: "/root/.muttrc"
owner: root
group: root
mode: '0600'
content: |
{% raw %}
# Fichier de configuration neomutt pour root
set spoolfile="/var/mail/root"
set folder="/var/mail"
# Affichage
set index_format="%4C %Z %{%b %d} %-15.15F (%4l) %s"
# Navigation rapide
set pager_index_lines=20
set markers=yes
set sort=reverse-date
# Sauvegarde des mails lus
set record="/var/mail/root"
# Confirmation avant suppression
set confirmappend=yes
{% endraw %}
- name: Créer alias pratique pour ouvrir neomutt
lineinfile:
path: /root/.bashrc
line: 'alias mails="neomutt -f /var/mail/root"'
create: yes
state: present

View File

@@ -27,6 +27,16 @@
- name: test backup_cmd
ansible.builtin.shell: |
{{ backup_cmd }} > /dev/null
- name: Créer le script de backup
copy:
dest: "{{ scripts_dir }}/backup.sh"
mode: '0755'
content: |
#!/bin/bash
set -e
{{ backup_cmd }} > {{ backup_dir }}/backup_$(date +\%Y\%m\%d).gitea.gz
find {{ backup_dir }} -type f -name 'backup_*.gitea.gz' -mtime +{{ keep_days }} -delete
- name: Ajouter une tâche cron pour backup Gitea tous les jours à 4h
cron:
@@ -34,9 +44,7 @@
minute: "0"
hour: "4"
user: root
job: >-
{{ backup_cmd }} > {{ backup_dir }}/backup_$(date +\\%Y\\%m\\%d).gitea.gz
&& find {{ backup_dir }} -type f -name 'backup_*.gitea.gz' -mtime +{{ keep_days }} -delete
job: "{{ scripts_dir }}/backup.sh"
- name: Créer le script de restauration
copy:

View File

@@ -25,6 +25,16 @@
- name: test backup_cmd
ansible.builtin.shell: |
{{ backup_cmd }} > /dev/null
- name: Créer le script de backup
copy:
dest: "{{ scripts_dir }}/backup.sh"
mode: '0755'
content: |
#!/bin/bash
set -e
{{ backup_cmd }} | gzip > {{ backup_dir }}/backup_$(date +\%Y\%m\%d).sql.gz
find {{ backup_dir }} -type f -name 'backup_*.sql.gz' -mtime +{{ keep_days }} -delete
- name: Ajouter une tâche cron pour dump PostgreSQL tous les jours à 4h avec compression
cron:
@@ -32,9 +42,7 @@
minute: "0"
hour: "4"
user: root
job: >-
{{ backup_cmd }} | gzip > {{ backup_dir }}/backup_$(date +\\%Y\\%m\\%d).sql.gz
&& find {{ backup_dir }} -type f -name 'backup_*.sql.gz' -mtime +{{ keep_days }} -delete
job: "{{ scripts_dir }}/backup.sh"
- name: Créer le script de restauration
copy:

View File

@@ -9,6 +9,7 @@
backup_size: 50Gi
access_mode: ReadWriteMany
storage_class: longhorn
recurring_job: daily-backup
tasks:
- name: Créer PVC RWX dans longhorn-system
@@ -40,6 +41,36 @@
- name: Extraire le nom du volume
set_fact:
pvc_internal_name: "{{ pvc_info.resources[0].spec.volumeName }}"
- name: Créer un RecurringJob pour backup quotidien à 5h du matin
kubernetes.core.k8s:
state: present
definition:
apiVersion: longhorn.io/v1beta2
kind: RecurringJob
metadata:
name: "{{ recurring_job }}"
namespace: "{{ namespace_longhorn }}"
spec:
name: "{{ recurring_job }}"
groups: []
task: backup
cron: "0 5 * * *"
retain: 7
concurrency: 1
- name: Attacher le volume au recurring job
kubernetes.core.k8s_json_patch:
api_version: longhorn.io/v1beta2
kind: Volume
namespace: "{{ namespace_longhorn }}"
name: "{{ pvc_internal_name }}"
patch:
- op: replace
path: /spec/recurringJobSelector
value:
- name: "{{ recurring_job }}"
isGroup: false
- name: Lancer un pod temporaire pour déclencher NFS
tags: never

View File

@@ -35,4 +35,15 @@ spec:
refreshAfter: 1h
vaultAuthRef: longhorn-vault-secret-reader
vaultAuthRef: longhorn-vault-secret-reader
---
apiVersion: v1
kind: ConfigMap
metadata:
name: longhorn-default-resource
namespace: longhorn-system
data:
default-resource.yaml: |
"backup-target": "s3://arcodange-backup@us-east-1/"
"backup-target-credential-secret": "longhorn-gcs-backup-credentials"
"backupstore-poll-interval": "180"

View File

@@ -1,7 +1,7 @@
# https://longhorn.io/docs/1.9.1/snapshots-and-backups/backup-and-restore/set-backup-target/#set-up-gcp-cloud-storage-backupstore
resource "google_storage_bucket" "longhorn_backup" {
name = "arcodange-backup"
location = "US-EAST1"
location = "NAM4" # https://cloud.google.com/storage/docs/locations#location-dr
force_destroy = true
public_access_prevention = "enforced"
@@ -17,8 +17,8 @@ resource "google_storage_bucket_iam_member" "longhorn_backup" {
member = "serviceAccount:${google_service_account.longhorn_backup.email}"
}
resource "google_service_account_key" "longhorn_backup" {
service_account_id = google_service_account.longhorn_backup.account_id
resource "google_storage_hmac_key" "longhorn_backup" {
service_account_email = google_service_account.longhorn_backup.email
}
@@ -34,9 +34,11 @@ resource "vault_kv_secret_v2" "longhorn_gcs_backup" {
name = "longhorn/gcs-backup"
cas = 1
delete_all_versions = true
data_json = base64decode(
google_service_account_key.longhorn_backup.private_key
)
data_json = jsonencode({
AWS_ACCESS_KEY_ID = google_storage_hmac_key.longhorn_backup.access_id
AWS_SECRET_ACCESS_KEY = google_storage_hmac_key.longhorn_backup.secret
AWS_ENDPOINTS: "https://storage.googleapis.com"
})
}
data "vault_policy_document" "longhorn_gcs_backup" {

View File

@@ -22,11 +22,10 @@ provider "gitea" { # https://registry.terraform.io/providers/go-gitea/gitea/late
provider "vault" {
address = "https://vault.arcodange.duckdns.org"
token = "hvs.CAESINCaMZanSRV-JM2rhHijIcFjT3mNE63jNpy_LInw-qy_Gh4KHGh2cy5PcndCWVhRUWpORmdyZzJISFNZYzlLVGk"
# auth_login_jwt { # TERRAFORM_VAULT_AUTH_JWT environment variable
# mount = "gitea_jwt"
# role = "gitea_cicd"
# }
auth_login_jwt { # TERRAFORM_VAULT_AUTH_JWT environment variable
mount = "gitea_jwt"
role = "gitea_cicd"
}
}
provider "google" {