Functional base playbook

master
Quentin Duchemin 2021-02-12 22:13:37 +01:00
commit 1d649fee18
Signed by: Chosto
GPG Key ID: 0547178FEEDE7D6B
25 changed files with 385 additions and 0 deletions

1
.gitignore vendored 100644
View File

@ -0,0 +1 @@
~/.vault_password

42
README.md 100644
View File

@ -0,0 +1,42 @@
### Install Ansible
```
pip install -r requirements.txt
```
### Ansible Vault
To manage secrets, this repository use Ansible Vault.
Create a secret
```
ansible-vault create inv/host_vars/new.chosto.me/secrets.yml
```
Edit a secret
```
ansible-vault edit inv/host_vars/new.chosto.me/secrets.yml
```
### Server
All servers managed should have this base configuration in order to work :
- a user `chosto`
- SSH access allowed for this user
- a root access (`sudo`) for user `chosto` with a password specified as `ansible_become_pass` in inventory
*e.g.*
```
# adduser chosto
# adduser chosto sudo
```
## Usage
```
ansible-playbook [-t tag1 tag2...] all.yml
```

18
all.yml 100644
View File

@ -0,0 +1,18 @@
---
- hosts: all
become: yes
roles:
- role: base
tags: ["base"]
- role: cron
tags: ["cron"]
- role: ufw
tags: ["ufw"]
- role: fail2ban
tags: ["fail2ban"]
- hosts: all
become: yes
roles:
- role: "node-exporter"
tags: ["node-exporter"]

35
ansible.cfg 100644
View File

@ -0,0 +1,35 @@
[defaults]
# No cows because I am not a funny person
nocows = 1
force_color = True
stdout_callback = unixy
# Default inventory file, override with -i
inventory = ./inv/static.yml
# Where to load roles
roles_path = ./roles
# Smart facts gathering : https://docs.ansible.com/ansible/latest/reference_appendices/config.html#default-gathering
gathering = smart
# Local file with vault password
vault_password_file = ~/.vault_password
# Do not create .retry files
retry_files_enabled = False
# python interpreter auto-discovery
interpreter_python = /usr/bin/python3
# Fail on undefined variables
error_on_undefined_vars = True
[privilege_escalation]
# Default sudo user
become_user = root
[ssh_connection]
pipelining = True
scp_if_ssh = True

View File

@ -0,0 +1,10 @@
ihl_base_users:
- name: chosto
group: chosto
groups:
- sudo
ssh_keys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC6fs07sdt9PbzzQ6IA2tkbPLqspaWXAMLzUK/LtXQBhN/+q4Bxz4I4eUyF7regsWgvOObjKoMilGfOXi2Q8ZZIXZ3DBqSMJpYkwVC0qqU3YJfVLhNa1NU5m+QYqhmREu8HIIFty4jZRTmkAxjy5Zv8af4l3q0VEWUxGV0MoDsJYroao5UcOPKo/Qv75ZIy7x4KWLcXMM1jYLczjF0E6XR99aOWex1XuAvW5bDS9wC+fAN/nMRS7A6Wsvt2tyth9lR40fM0IaUjG+TjQ2CCbmp7zFBpa0LQcCktksSXpfd4Vj6eczHz7N+l1V+inbAd99s84K7LKoTH0tXv02c42XSK52pVBUzyif2EO85dgHJ3NFmTcNdRJ1v5bSRu3vm0Gcq0kgWjYZq2TCHUYOkm5WWdJuDLIbPLydaSr8LGfv+QqL6RHJnKBt5CZ5Nrrei/ZPTjZCm/OgCrmbMQQeoSpE8ZBEMSlN5An5+ZuaR7BPfCNFoQK/mcbA2cqs6ZiNUQdoeb93DAbsQMqDtCnevDf7hbiy282rRMkRYX5Cj6vt3jpUKS7vwoDMIKWYAOKASPZ2IgvC6Buj8aE4p24qR3xbiTHF4iJyxdo2B/x1pwT+kRmCdmCK5XnlkrQfrwP1Kkaiz88XOxkqfsvpRWd1JUUcipYbO+7zTvVQZkLnKZ2AzciQ== quentinduchemin@tuta.io
ihl_base_ssh_users:
- chosto

View File

@ -0,0 +1,7 @@
$ANSIBLE_VAULT;1.1;AES256
33343337653333343366613634323137303835383230363964333832666562633230656233636530
3462643333626535363437656337363239653533633830320a633762323565393235616431626361
65313861666266313336626537383636666566383634363234613532373631343061323837633636
3762333033636462330a323362323034336365326432373030626634396265343365393162326538
33663862303831646239383735353766616464386532346665316664376434666363633034396363
3763316133353034653439316538383563353739323132626164

View File

@ -0,0 +1,10 @@
firewall_in_ports:
- "80"
- "443"
- "{{ ssh_port }}"
hostname: chosto
ssh_port: "2220"
prometheus_server_ip: "51.178.182.35"

6
inv/static.yml 100644
View File

@ -0,0 +1,6 @@
all:
hosts:
new.chosto.me:
ansible_port: 2220
ansible_user: chosto
ansible_ssh_private_key_file: ~/.ssh/scaleway

1
requirements.txt 100644
View File

@ -0,0 +1 @@
ansible==2.9.6

View File

@ -0,0 +1,6 @@
ihl_base_apt_packages_addons:
ihl_base_apt_cache_time: 3600
ihl_base_additional_groups: []
ihl_base_users: []
ihl_base_ssh_users: []

View File

@ -0,0 +1,4 @@
- name: Restarts sshd
service:
name: ssh
state: restarted

View File

@ -0,0 +1,15 @@
- name: Update apt cache
apt:
update_cache: yes
cache_valid_time: "{{ ihl_base_apt_cache_time }}"
tags: ["update-apt-cache"]
- name: Install base packages
apt:
name: "{{ ihl_base_apt_packages }}"
state: present
- name: Install additional packages
apt:
name: "{{ ihl_base_apt_packages_addons }}"
state: present

View File

@ -0,0 +1,11 @@
---
- name: Set hostname
hostname:
name: "{{ hostname }}"
- name: Add myself to /etc/hosts
lineinfile:
dest: /etc/hosts
regexp: '^127\.0\.0\.1[ \t]+localhost'
line: '127.0.0.1 localhost {{ hostname }}'
state: present

View File

@ -0,0 +1,15 @@
- include: apt.yml
tags:
- apt
- include: users.yml
tags:
- users
- include: hostname.yml
tags:
- hostname
- include: ssh.yml
tags:
- ssh

View File

@ -0,0 +1,17 @@
- name: Deploys sshd config
template:
src: sshd_config.j2
dest: /etc/ssh/sshd_config
owner: root
group: root
mode: 0644
notify:
- Restarts sshd
- name: Configure authorized SSH keys
authorized_key:
user: "{{ item.name }}"
key: "{{ item.ssh_keys | default([]) | join('\n') }}"
state: present
exclusive: True
loop: "{{ ihl_base_users }}"

View File

@ -0,0 +1,32 @@
- name: Creates primary groups
group:
name: "{{ item.group | default(item.name) }}"
state: present
loop: "{{ ihl_base_users }}"
- name: Creates additional groups
group:
name: "{{ item.name }}"
gid: "{{ item.gid | default(omit) }}"
state: present
loop: "{{ ihl_base_additional_groups }}"
- name: Creates users
user:
name: "{{ item.name }}"
uid: "{{ item.uid | default(omit) }}"
group: "{{ item.group | default(item.name) }}"
groups: "{{ item.groups | default([]) | union([item.group | default(item.name)]) | unique }}"
home: "{{ item.home | default('/home/' ~ item.name ) }}"
shell: /bin/bash
password: "{{ item.password | default(omit) }}"
update_password: on_create
system: "{{ item.system | default(omit) }}"
append: "{{ user.append | default('yes') }}"
loop: "{{ ihl_base_users }}"
- name: Remove "debian" user
user:
name: debian
state: absent
remove: yes

View File

@ -0,0 +1,56 @@
Port {{ ssh_port }}
# Necessary so X11 still works without IPv6
AddressFamily inet
Protocol 2
# HostKeys for protocol version 2
HostKey /etc/ssh/ssh_host_rsa_key
HostKey /etc/ssh/ssh_host_ecdsa_key
HostKey /etc/ssh/ssh_host_ed25519_key
# Logging
SyslogFacility AUTH
LogLevel INFO
# Authentication
LoginGraceTime 2m
PermitRootLogin prohibit-password
StrictModes yes
PubkeyAuthentication yes
# Don't read the user's ~/.rhosts and ~/.shosts files
IgnoreRhosts yes
# similar for protocol version 2
HostbasedAuthentication no
PermitEmptyPasswords no
ChallengeResponseAuthentication no
PasswordAuthentication no
# Disable several features
KerberosAuthentication no
GSSAPIAuthentication no
UseDNS no
X11Forwarding no
PrintMotd no
PrintLastLog yes
TCPKeepAlive yes
# Allow client to pass locale environment variables
AcceptEnv LANG LC_*
Subsystem sftp /usr/lib/openssh/sftp-server
# Set this to 'yes' to enable PAM authentication, account processing,
# and session processing. If this is enabled, PAM authentication will
# be allowed through the ChallengeResponseAuthentication and
# PasswordAuthentication. Depending on your PAM configuration,
# PAM authentication via ChallengeResponseAuthentication may bypass
# the setting of "PermitRootLogin without-password".
# If you just want the PAM account and session checks to run without
# PAM authentication, then enable this but set PasswordAuthentication
# and ChallengeResponseAuthentication to 'no'.
UsePAM yes
AllowUsers {{ ihl_base_ssh_users | join(' ') }}

View File

@ -0,0 +1,16 @@
ihl_base_apt_packages:
- apt-transport-https
- ca-certificates
- curl
- dnsutils
- git
- htop
- jq
- less
- lm-sensors
- python3
- python3-pip
- python3-setuptools
- smartmontools
- sudo
- nano

View File

@ -0,0 +1,5 @@
---
- name: Restart rsyslog
service:
name: rsyslog
state: restarted

View File

@ -0,0 +1,8 @@
---
- name: Enable logging for cron
lineinfile:
path: /etc/rsyslog.conf
regexp: "^cron.* /var/log/cron.log"
insertafter: "^#cron.*"
line: "cron.* /var/log/cron.log"
notify: Restart rsyslog

View File

@ -0,0 +1,4 @@
- name: Restarts fail2ban
service:
name: fail2ban
state: restarted

View File

@ -0,0 +1,21 @@
---
- name: Install fail2ban
apt:
name: fail2ban
state: present
- name: Enable fail2ban for SSH
template:
src: jail.local.j2
# jail.local overrides jail.conf but does not replace
# we can just put our little SSH conf (port and log file) inside
dest: /etc/fail2ban/jail.local
owner: root
group: root
mode: 0644
notify: Restarts fail2ban
- name: Enable fail2ban
service:
name: fail2ban
enabled: true

View File

@ -0,0 +1,5 @@
[sshd]
mode = normal
port = {{ ssh_port }}
logpath = {{ fail2ban_log_path | default('/var/log/fail2ban.log') }}
backend = %(sshd_backend)s

View File

@ -0,0 +1,14 @@
---
- name: Prometheus node exporter
apt:
name:
- prometheus-node-exporter
state: present
- name: Allow queries from prometheus server
ufw:
rule: allow
port: "9100"
direction: in
proto: tcp
from: "{{ prometheus_server_ip }}"

View File

@ -0,0 +1,26 @@
---
- name: Install ufw
apt:
name:
- ufw
state: present
- name: Configure UFW rules
ufw:
rule: allow
port: "{{ item }}"
direction: in
proto: any
loop: "{{ firewall_in_ports }}"
- name: Set firewall default in policy
ufw:
state: enabled
direction: incoming
policy: deny
- name: Set firewall default out policy
ufw:
state: enabled
direction: outgoing
policy: allow