ansible: HD setup

This commit is contained in:
2024-07-10 11:08:29 +02:00
parent 397a5d3b7f
commit b97af4010f
17 changed files with 272 additions and 1 deletions

1
.gitignore vendored
View File

@@ -0,0 +1 @@
.terraform

View File

@@ -67,4 +67,6 @@ flowchart
ansible == deploy ==> Gitea
ansible --- ansible_scripts
```
```
🏹💻🪽

23
ansible/Dockerfile Normal file
View File

@@ -0,0 +1,23 @@
# docker build -f ansible/Dockerfile -t arcodange-ansible:0.0.0 ansible/
FROM python:slim
RUN apt update && apt install openssh-client socat gosu -y
COPY nonroot_ssh_proxy_setup.sh /usr/local/bin/nonroot_ssh_proxy_setup.sh
COPY docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh
RUN chmod +x /usr/local/bin/*.sh
ENV SSH_AUTH_SOCK=/home/arcodange/.ssh/socket
RUN useradd -rm -d /home/arcodange -s /bin/bash -g root -G sudo,root -u 1000 arcodange
USER 1000
WORKDIR /home/arcodange/code
ENV PATH=/home/arcodange/.local/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
RUN pip install ansible-core jmespath
ENV GALAXY_SERVER=https://beta-galaxy.ansible.com/api/
RUN ansible-galaxy collection install --token 11bebd8fd1ad4009f700bdedbeb80b19743ce3d3 \
community.general ansible.posix
ENV ANSIBLE_HOST_KEY_CHECKING=False
ENV ANSIBLE_FORCE_COLOR=True=True
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]

38
ansible/README.md Normal file
View File

@@ -0,0 +1,38 @@
# Use Ansible
## Run with docker ssh agent side proxy
### build docker images
```sh
git clone -q --depth 1 --branch master https://github.com/arcodange/ssh-agent.git /tmp/ssh-agent
(cd /tmp/ssh-agent ; docker build -t docker-ssh-agent:latest -f Dockerfile . ; rm -rf /tmp/ssh-agent)
(cd ansible; docker build -t arcodange-ansible:0.0.0 .)
```
### run in container
```sh
# git clone -q --depth 1 --branch master https://github.com/arcodange/ssh-agent.git /tmp/ssh-agent
# (cd /tmp/ssh-agent ; docker build -t docker-ssh-agent:latest -f Dockerfile . ; rm -rf /tmp/ssh-agent)
# (cd ansible; docker build -t arcodange-ansible:0.0.0 .)
docker run -d --name=ssh-agent docker-ssh-agent:latest
docker run --rm --volumes-from=ssh-agent -v ~/.ssh:/.ssh -it docker-ssh-agent:latest ssh-add /root/.ssh/id_rsa
docker run --rm -u root --name test --volumes-from=ssh-agent -v $PWD:/home/arcodange/code arcodange-ansible:0.0.0 \
ansible-playbook ansible/arcodange/factory/playbooks/setup/setup.yml -i pi1,pi2 -u pi -vv
```
### a tool to reuse a ssh agent (not required)
```sh
FIND_SSH_AGENT=$HOME/.local/bin/ssh-find-agent
curl -s https://raw.githubusercontent.com/wwalker/ssh-find-agent/master/ssh-find-agent.sh > $FIND_SSH_AGENT
chmod +x $FIND_SSH_AGENT
echo 'ssh_find_agent "$@"' >> $FIND_SSH_AGENT
which brew && brew install coreutils # if on macos
```
```sh
eval "$(ssh-agent -s)"
ssh-add ~/.ssh/id_rsa
```

View File

@@ -0,0 +1,35 @@
# awesome commands:
# sudo fdisk -l
# sudo parted -l
# sudo gparted -- partitionnement graphique <-- utilisé pour créer la partition et donner le nom+label 'gitea_data'
# sudo testdisk -- recuperation
# sudo blkid -- uuid des partitions pour configurer gstab (mount auto)
# lsblk -fe7 -- uuid des partitions
---
- name: Setup Hard Disk
hosts: pi2
gather_facts: yes
become: yes
vars:
mount_points:
- gitea_data
- pg_data
verify_partitions: true # Change this to false if you don't want to verify partitions
tasks:
- name: Setup partitions
include_role:
name: setup_partition
loop: "{{ mount_points }}"
loop_control:
loop_var: mount_point
- name: Set ACL for group docker on /arcodange
ansible.posix.acl:
path: "/arcodange"
entity: "docker"
etype: "group"
permissions: "rwx"
recursive: yes
state: present

View File

@@ -0,0 +1,38 @@
Role Name
=========
A brief description of the role goes here.
Requirements
------------
Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required.
Role Variables
--------------
A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well.
Dependencies
------------
A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
Example Playbook
----------------
Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
- hosts: servers
roles:
- { role: username.rolename, x: 42 }
License
-------
BSD
Author Information
------------------
An optional section for the role authors to include contact information, or a website (HTML is not allowed).

View File

@@ -0,0 +1,4 @@
---
# defaults file for roles/setup_partition
mount_points: []
verify_partitions: false

View File

@@ -0,0 +1,2 @@
---
# handlers file for roles/setup_partition

View File

@@ -0,0 +1,34 @@
galaxy_info:
author: your name
description: your role description
company: your company (optional)
# If the issue tracker for your role is not on github, uncomment the
# next line and provide a value
# issue_tracker_url: http://example.com/issue/tracker
# Choose a valid license ID from https://spdx.org - some suggested licenses:
# - BSD-3-Clause (default)
# - MIT
# - GPL-2.0-or-later
# - GPL-3.0-only
# - Apache-2.0
# - CC-BY-4.0
license: license (GPL-2.0-or-later, MIT, etc)
min_ansible_version: 2.1
# If this a Container Enabled role, provide the minimum Ansible Container version.
# min_ansible_container_version:
galaxy_tags: []
# List tags for your role here, one per line. A tag is a keyword that describes
# and categorizes the role. Users find roles by searching for tags. Be sure to
# remove the '[]' above, if you add tags to this list.
#
# NOTE: A tag is limited to a single word comprised of alphanumeric characters.
# Maximum 20 tags per role.
dependencies: []
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.

View File

@@ -0,0 +1,44 @@
---
- name: Optionally verify partition existence
ansible.builtin.set_fact:
device: "/dev/sda"
when: verify_partitions | default(false)
- name: Read device information
community.general.parted:
device: "{{ device }}"
unit: GiB
register: device_info
when: verify_partitions | default(false)
- name: Select partition
ansible.builtin.set_fact:
disk: |-
{{ device + (
device_info | to_json | from_json
| community.general.json_query(jmes_path) | string
)
}}
vars:
jmes_path: partitions[?name == '{{ mount_point }}'].num | [0]
failed_when: disk.endswith('None')
when: verify_partitions | default(false)
- name: Check if mount directory exists
stat:
path: "/arcodange/{{ mount_point }}"
register: mount_dir_stat
- name: Create the mount directory
file:
path: "/arcodange/{{ mount_point }}"
state: directory
when: not mount_dir_stat.stat.exists
- name: Declare mount point
ansible.builtin.lineinfile:
path: /etc/fstab
line: "LABEL={{ mount_point }} /arcodange/{{ mount_point }} ext4 defaults 0 0"
- name: Use updated mount list
command: mount -a

View File

@@ -0,0 +1,2 @@
localhost

View File

@@ -0,0 +1,5 @@
---
- hosts: localhost
remote_user: root
roles:
- roles/setup_partition

View File

@@ -0,0 +1,2 @@
---
# vars file for roles/setup_partition

View File

@@ -0,0 +1,11 @@
---
- name: setup
hosts: all
tasks:
- name: hello world
ansible.builtin.debug:
msg: Hello world!
- name: setup hard disk
ansible.builtin.import_playbook: hard_disk.yml

View File

@@ -0,0 +1,10 @@
#!/bin/sh
# Exécuter le script d'initialisation en tant que root
if [ "$(id -u)" = '0' ]; then
. /usr/local/bin/nonroot_ssh_proxy_setup.sh
exec env SSH_AUTH_SOCK=$SSH_AUTH_SOCK gosu arcodange "$0" "$@"
fi
# Exécuter le reste des commandes en tant que non-root
exec "$@"

View File

@@ -0,0 +1,20 @@
#!/bin/sh
set -e
# Chemin du fichier de socket
SOCKET_FILE="/.ssh-agent/socket"
if [ -S "$SOCKET_FILE" ]; then
echo "Le fichier de socket $SOCKET_FILE est présent. Exécution du script..."
else
echo "Erreur : Le fichier de socket $SOCKET_FILE est absent. Arrêt du script."
echo "Avez vous bien utiliiser --volumes-from=ssh-agent du conteneur docker-ssh-agent:latest ?"
exit 1
fi
export SSH_AUTH_SOCK=/home/arcodange/.ssh/socket
mkdir `dirname $SSH_AUTH_SOCK`
socat UNIX-LISTEN:$SSH_AUTH_SOCK,fork UNIX-CONNECT:$SOCKET_FILE &
chown -R 1000 `dirname $SSH_AUTH_SOCK`
echo "export SSH_AUTH_SOCK=$SSH_AUTH_SOCK" >> ~/.profile