Compare commits
4 Commits
0712ca3e76
...
c930ccc44a
Author | SHA1 | Date |
---|---|---|
Jef Roosens | c930ccc44a | |
Jef Roosens | 592992f031 | |
Jef Roosens | 5ad30d3343 | |
Jef Roosens | f2f99a932d |
22
README.md
22
README.md
|
@ -1,10 +1,20 @@
|
|||
# Raspberry Pi NAS
|
||||
|
||||
This repository contains all configuration I use to set up a Raspberry Pi 4B as
|
||||
a NAS and media storage server. This repository will most likely evolve a lot.
|
||||
Ansible configuration repository for my Raspberry Pi's.
|
||||
|
||||
The Pi is used to host the following:
|
||||
## Initial setup for new systems
|
||||
|
||||
* Samba network share for access to family pictures
|
||||
* [Photoview](https://photoview.github.io/) instance for accessing image over the internet
|
||||
* [Jellyfin](https://jellyfin.org/) for accessing media library as well
|
||||
1. Flash [Debian Raspberry Pi](https://raspi.debian.net/) on the SD card.
|
||||
2. Configure `/boot/firmware/sysconf.txt`
|
||||
1. Set `hostname` to the desired name
|
||||
2. Set `root_authorized_key` to the contents of the current computer's
|
||||
public SSH key
|
||||
3. Put SD card in Pi and boot
|
||||
4. Connect Pi to wired internet
|
||||
5. Determine IP address given to the new Pi
|
||||
6. Add this IP address to the `initial-hosts.ini` file, along with the
|
||||
`static_ip` host var set to the desired static IP address
|
||||
8. Comment out `ansible_ssh_user` in `group_vars/all/vars.yml`, as this
|
||||
overwrites the one set in the hosts file
|
||||
9. Run `ansible-playbook -i initial-hosts.ini first_run.yml`. This command will
|
||||
hang at the `restart networking` step; at this point you can Ctrl-C.
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Configure base system.
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
# Debian installs don't have Python by default
|
||||
pre_tasks:
|
||||
- name: Update package database
|
||||
raw: apt update
|
||||
- name: Install Python
|
||||
raw: apt install -y python3
|
||||
roles:
|
||||
- base
|
|
@ -1,30 +0,0 @@
|
|||
---
|
||||
ansible_ssh_user: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
63363333613666383765643633393735396439386662346364643730373138333931303833393339
|
||||
6430306434646562396134373433343439316133303931640a343536383539666132633636303031
|
||||
65643735333839613636613963303433623933366532623037323963646462366464623739393461
|
||||
3233613932616531360a326235346464333566633466633864343261663831336166313230306635
|
||||
3265
|
||||
|
||||
ansible_ssh_port: 22
|
||||
|
||||
ansible_become_pass: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
38306565386265383731653936353766396630626163316335643261613561363030653561346239
|
||||
3764363033386531623331616132643637353536326439620a356232626635626266656331643466
|
||||
36343435646561643662373138613237626461373330346566356132636366623731643838383633
|
||||
3765666163656264340a663138623535626161376666323862373131383637356231323737313564
|
||||
6430
|
||||
|
||||
lambroek_password: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
33373365393262643630646336323731376232646138613061363933366265393863636536303430
|
||||
3738363338653035623039383463643761343261336366620a353532613132343439333563663664
|
||||
61643135323936323362326365663366333864363735363438636361643734333930616566356563
|
||||
3038373639646338380a393061376135353564373062353139366461383939333161333936613430
|
||||
33383233336531663261373631363733323839353235613131363966643838373033373437613764
|
||||
37306137366666663938616465393464653961643732636236636438396165623165653363623135
|
||||
36386632303939646632393362373838663337663063326338623534326561656561633131376138
|
||||
64376237373133333761313635346266306638383038663333366139303437323562303733373764
|
||||
63316564393763643834643232663462333633373639633938663035633063356530
|
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
ansible_ssh_user: debian
|
||||
ansible_ssh_port: 22
|
||||
|
||||
# Values for static IP configuration
|
||||
broadcast_ip: 192.168.0.255
|
||||
router_ip: 192.168.0.1
|
||||
|
||||
debian_pass: "{{ vault_debian_pass }}"
|
||||
|
||||
ansible_become_pass: !vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
38306565386265383731653936353766396630626163316335643261613561363030653561346239
|
||||
3764363033386531623331616132643637353536326439620a356232626635626266656331643466
|
||||
36343435646561643662373138613237626461373330346566356132636366623731643838383633
|
||||
3765666163656264340a663138623535626161376666323862373131383637356231323737313564
|
||||
6430
|
|
@ -0,0 +1,6 @@
|
|||
$ANSIBLE_VAULT;1.1;AES256
|
||||
37303338366435366664333235623930303461666537326463613536303263353233303631653061
|
||||
3365613139333035616434376464386436653863366338650a366363336438313364646432626335
|
||||
32396334643064326531393930666263643163636163316430616434363139316665323262616538
|
||||
3665633530616432350a326439636231383765666365386433313432373432373938656638373636
|
||||
34323166343965616330366265353462626132356565316637313430343462363163
|
|
@ -0,0 +1,6 @@
|
|||
raid_uuid: '4d184875-19eb-4923-9b79-bf669c1f7978'
|
||||
lambroek_password: "{{ vault_lambroek_password }}"
|
||||
s3_access_key_id: "{{ vault_s3_access_key_id }}"
|
||||
s3_secret_access_key: "{{ vault_s3_secret_access_key }}"
|
||||
rclone_photos_obf_pass: "{{ vault_rclone_photos_obf_pass }}"
|
||||
rclone_photos_obf_pass2: "{{ vault_rclone_photos_obf_pass2 }}"
|
|
@ -0,0 +1,30 @@
|
|||
$ANSIBLE_VAULT;1.1;AES256
|
||||
37323336656133626339366437393062613937366232613334643034363635623832333136313063
|
||||
3738353666646636323431663339623234306439323138650a373063376634333161666366303831
|
||||
34373963366334636238623134613863303464663133653262333064613863643362633531653061
|
||||
3934363435316636390a626531333463396137303132313363636163306464386138653538353633
|
||||
32663836316665376233346364383461323065383461623762323933316635336661363032333637
|
||||
30323831316239386365323266376439623761316330663063326539306339666362646138653537
|
||||
33663964353632613232653130653164303963386233626233313037653737663436373934303832
|
||||
66336466363465613839306662623631646531303865366536383030616139356539623730633033
|
||||
66353666396162316132663364373137646637353333623738393464366234643264303030393465
|
||||
38643734636336336563646361336165363133323738646531633835646262663637363964336134
|
||||
31346432643336613534396436353064373938386233353435386631373434633766363135653962
|
||||
35393230363735336436633033303465616362653734356235343261313464316138316539386238
|
||||
65346531613566643365336538663538353961333632623465636265613764373637333035623133
|
||||
62313564356534393338346631386365643736336138346532643638333737653835303862383863
|
||||
37336534646364643366376339656636613762323632613836323936326234663261386339333836
|
||||
36326334306136663638323738396337653663326539623936646437393537653538313439356636
|
||||
34366133353165346534366339306564323861386237333262633535646166343463663435396233
|
||||
35306533326234633133653336346161343735633364303662303637303534376337383539353165
|
||||
34313434323433363936623531393464303762616632666661323834303137383535303961333462
|
||||
64363635653039396464366663343661353665643534636464373333643438646536323330626366
|
||||
35636336626532663732393064626139306261306530653433633365326438396535333665616234
|
||||
33323566653634623364323663623833313063326438343766376436386430323834663031643135
|
||||
31326561353761396364343232386530356631636637643838376562346330303334626162646532
|
||||
35666132363939356263313834653836313033346439323765633364636236366234666333323663
|
||||
61323633666661316437643732383231303138666536313665373833383334383263613065666365
|
||||
32373536376461303762396535353733373630313966343431616337633334346565386263376666
|
||||
37633536363336373465383432656465373535393837623962303066366631643730326562626266
|
||||
38333337353062343562303534656166366136646232346364343134363436633436656165616538
|
||||
3236
|
|
@ -0,0 +1,5 @@
|
|||
192.168.0.216 static_ip=192.168.0.3
|
||||
|
||||
[all:vars]
|
||||
# These are expected to be the same
|
||||
ansible_ssh_user=root
|
2
main.yml
2
main.yml
|
@ -3,7 +3,7 @@
|
|||
hosts: all
|
||||
become: yes
|
||||
roles:
|
||||
- packages
|
||||
- base
|
||||
- net-security
|
||||
tags: base
|
||||
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
---
|
||||
- name: Perform common tasks
|
||||
hosts: nas
|
||||
become: yes
|
||||
roles:
|
||||
- base
|
||||
- common
|
||||
tags: base
|
||||
|
||||
- name: Configure BTRFS RAID
|
||||
hosts: nas
|
||||
become: yes
|
||||
roles:
|
||||
- raid
|
||||
tags: raid
|
||||
|
||||
- name: Set up Samba
|
||||
hosts: nas
|
||||
become: yes
|
||||
roles:
|
||||
- samba
|
||||
tags: samba
|
||||
|
||||
- name: Set up Jellyfin
|
||||
hosts: nas
|
||||
become: yes
|
||||
roles:
|
||||
- jellyfin
|
||||
tags: jellyfin
|
||||
|
||||
- name: Set up Caddy
|
||||
hosts: nas
|
||||
become: yes
|
||||
roles:
|
||||
- caddy
|
||||
tags: caddy
|
||||
|
||||
- name: Set up photo sync using Rclone
|
||||
hosts: nas
|
||||
become: yes
|
||||
roles:
|
||||
- rclone
|
||||
tags: rclone
|
|
@ -0,0 +1,5 @@
|
|||
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCkDjXuZn+blanbJAhte8KttrpeCPeT5CGcZ5mlAZv724wTa4qebpwCnf4SK4aFuDQEuCusnia3+X7YWAyCDReNURznAWCtq+b8LGxyIm2hTBbLA1m8sj0xidR/djlUtOwDp9VpSNamUWyiPWJ+WNsPd9xLJ6BK3qRsoFiMN87sO12L7DHHDaMze628Oc+IxFd+VZnH0dPVgitis31f+lXCr8w5qSiEepDJ8Nde8M+Ev1RrPQbR5Q5C+wYxlbY0oPNlGqSrs5i1jJl0BVMI4DlibxatTfuteU5IwcDMQObJr3xJGKNTPswSdzpfJFrLfUBZvsDs94BXEHR2CtxZ4aLQPeLfosWe4zuGvX22p7TzSPx1LkuqIF85Tw1PvK3f7u3l9sozHORAoEA8sFHG+DolqldgjuUgCGpfF/QOY1jkGpbEhq57kKFH+VlFI2XePGQ6299R9RN/Y4S88v14ChLwoLSNWgxK+CgYgB4lbquAIKTKsRla3gkEeziz+qoHPQkD5RcajrWOfSKU4alORpgQerSFZ9zMoz9N2rfTVEzCsVUj0Jiwtd5O7pCX9PWBhz1Nl1ItrRPuFiTSKB05dqsQ1CDZAZMDPJNqotd6QRS5+cKzFLgvU6k/gk08/qV00VM+BxlXkh8PwAhaxNPjMxjzqHx0+xC38FtacuhJiOV91Q== roosensjef@gmail.com
|
||||
|
||||
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCgHqW7mLuaW8XEFJrg031ES7v7y6Uk5QUp++axTd0wzvt5qfqTox9Hg1Xk5C9hdEfYzS5NCU+uoiInR0aHZ3Cl+yxqi3VqDfO20j6Irrt2SOBB86Gsyu9Brj62xtS0rY/e9rmyULJGUtJEz3UmFvn8fE5hUpGjDg7NByFs8f054pzifWw8F/wOvF5rKo9GqkWeXEUZ456FmowXCQLl5SypQliOsHJDs89NiTVvOxiKQXULBhj8o4c0MyCeFfPWqOutSSAetmbnegEjOTy7f/0IiqB+5713KOh1Bm1/u+3J2IVbRgeG1iTJdDVeIxBGmA1wMLvrBtBRIS0MaKa1Xabo3QTgYPHNGrf2w+GMnuoQ6/tdD6omPWGTHXqtHKEeIW1JrlDyhOo86oCl+l2aveMwhFFGW4nQmW7sfrowyLHdU3BpGl4m7pGa+5sTsHiOGEqEN/a7xikztXkuKacQ8E/y1C8gDXgaX8zFl6VOwR5EfMEMX390tz+R+ErDU81h47tSkwbY3KhunSKwPT8jSMldBttnCIexd+QuQgOlSwXkYVPPmXtPUkfp+4VzWSWeGKAa9k3HtVMIvKdVk9eXDVNnVdaAL+EkHyXOyFVVGa9gJ3ZOWhHMNi2/kHAwWMI9CwRxj7AVk30KGBhPN0wdS9Dt8/0Aa33hWuY2p9DxtNaiNw== roosensjef@gmail.com
|
||||
|
||||
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINkcCTP0IE/ANIXJJIMWEg4f5riS8uv3KuypkzQC47XN roosensjef@gmail.com
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
- name: restart networking
|
||||
service:
|
||||
name: networking
|
||||
state: restarted
|
|
@ -0,0 +1,44 @@
|
|||
---
|
||||
- name: Install sudo.
|
||||
apt:
|
||||
name: sudo
|
||||
state: present
|
||||
|
||||
- name: Create data group.
|
||||
group:
|
||||
name: data
|
||||
gid: 1002
|
||||
|
||||
- name: Create debian user.
|
||||
user:
|
||||
name: debian
|
||||
groups:
|
||||
- sudo
|
||||
- data
|
||||
append: true
|
||||
create_home: yes
|
||||
shell: /bin/bash
|
||||
password: "{{ vault_debian_pass | password_hash('sha512') }}"
|
||||
update_password: on_create
|
||||
|
||||
- name: Create SSH directory.
|
||||
file:
|
||||
path: /home/debian/.ssh/
|
||||
state: directory
|
||||
owner: debian
|
||||
group: debian
|
||||
mode: '700'
|
||||
|
||||
- name: Add authorized SSH keys.
|
||||
copy:
|
||||
src: authorized_keys
|
||||
dest: /home/debian/.ssh/authorized_keys
|
||||
owner: debian
|
||||
group: debian
|
||||
mode: '600'
|
||||
|
||||
- name: Install networking config file.
|
||||
template:
|
||||
src: interfaces.j2
|
||||
dest: /etc/network/interfaces.d/eth0
|
||||
notify: restart networking
|
|
@ -0,0 +1,7 @@
|
|||
auto eth0
|
||||
iface eth0 inet static
|
||||
address {{ static_ip }}
|
||||
broadcast {{ broadcast_ip }}
|
||||
netmask 255.255.255.0
|
||||
gateway {{ router_ip }}
|
||||
dns-nameservers {{ router_ip }} 8.8.8.8
|
|
@ -0,0 +1,16 @@
|
|||
# The Caddyfile is an easy way to configure your Caddy web server.
|
||||
#
|
||||
# Unless the file starts with a global options block, the first
|
||||
# uncommented line is always the address of your site.
|
||||
#
|
||||
# To use your own domain name (with automatic HTTPS), first make
|
||||
# sure your domain's A/AAAA DNS records are properly pointed to
|
||||
# this machine's public IP, then replace ":80" below with your
|
||||
# domain name.
|
||||
|
||||
media.roosens.me {
|
||||
reverse_proxy localhost:8096
|
||||
}
|
||||
|
||||
# Refer to the Caddy docs for more information:
|
||||
# https://caddyserver.com/docs/caddyfile
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
- name: reload-caddy
|
||||
service:
|
||||
name: caddy
|
||||
state: reloaded
|
|
@ -1,3 +1,4 @@
|
|||
---
|
||||
- name: Add Caddy GPG key
|
||||
apt_key:
|
||||
url: "https://dl.cloudsmith.io/public/caddy/stable/gpg.key"
|
||||
|
@ -16,3 +17,19 @@
|
|||
apt:
|
||||
name: caddy
|
||||
state: present
|
||||
|
||||
- name: Copy over Caddyfile
|
||||
copy:
|
||||
src: Caddyfile
|
||||
dest: '/etc/caddy/Caddyfile'
|
||||
owner: root
|
||||
group: root
|
||||
mode: '644'
|
||||
notify: reload-caddy
|
||||
|
||||
- name: Ensure Caddy service is running & enabled
|
||||
service:
|
||||
name: caddy
|
||||
state: started
|
||||
enabled: true
|
||||
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
- name: Install packages
|
||||
apt:
|
||||
name:
|
||||
# Needed for handling GPG keys for repositories
|
||||
- debian-keyring
|
||||
- debian-archive-keyring
|
||||
- apt-transport-https
|
||||
|
||||
# Easy to edit files
|
||||
- vim
|
||||
- tmux
|
||||
- htop
|
||||
|
||||
# Spam prevention
|
||||
- fail2ban
|
||||
|
||||
# Disk monitoring
|
||||
- smartmontools
|
||||
state: present
|
||||
|
||||
- name: Install Vim config
|
||||
get_url:
|
||||
url: 'https://r8r.be/vim'
|
||||
dest: '{{ item.dest }}'
|
||||
owner: "{{ item.user }}"
|
||||
group: "{{ item.user }}"
|
||||
mode: '644'
|
||||
with_items:
|
||||
- user: debian
|
||||
dest: "/home/debian/.vimrc"
|
||||
- user: root
|
||||
dest: "/root/.vimrc"
|
||||
|
||||
- name: Enable fail2ban
|
||||
service:
|
||||
name: fail2ban
|
||||
state: started
|
||||
enabled: true
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
- name: Add Jellyfin repository
|
||||
apt_repository:
|
||||
repo: "deb https://repo.jellyfin.org/debian bullseye main"
|
||||
repo: "deb https://repo.jellyfin.org/debian bookworm main"
|
||||
filename: 'jellyfin'
|
||||
state: present
|
||||
|
||||
|
@ -14,6 +14,16 @@
|
|||
name: jellyfin
|
||||
state: present
|
||||
|
||||
- name: Create Jellyfin user
|
||||
user:
|
||||
name: jellyfin
|
||||
groups:
|
||||
- data
|
||||
append: true
|
||||
create_home: no
|
||||
shell: /bin/nologin
|
||||
update_password: on_create
|
||||
|
||||
- name: Copy over service file
|
||||
copy:
|
||||
src: jellyfin.service.conf
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
- name: Install fail2ban.
|
||||
apt:
|
||||
name: fail2ban
|
||||
state: present
|
||||
|
||||
# TODO add proper fail2ban config
|
||||
|
||||
- name: Ensure fail2ban is started & enabled.
|
||||
service:
|
||||
name: fail2ban
|
||||
state: started
|
||||
enabled: true
|
||||
|
||||
# TODO install UFW
|
|
@ -1,11 +0,0 @@
|
|||
- name: Install packages
|
||||
apt:
|
||||
name:
|
||||
# Needed for handling GPG keys for repositories
|
||||
- debian-keyring
|
||||
- debian-archive-keyring
|
||||
- apt-transport-https
|
||||
|
||||
# Easy to edit files
|
||||
- vim
|
||||
state: present
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
- name: Install BTRFS tools
|
||||
apt:
|
||||
name: btrfs-progs
|
||||
state: present
|
||||
|
||||
- name: Create mountpoint
|
||||
file:
|
||||
path: /mnt/data1
|
||||
state: directory
|
||||
mode: '0755'
|
||||
owner: debian
|
||||
group: debian
|
||||
|
||||
- name: Mount RAID
|
||||
mount:
|
||||
path: /mnt/data1
|
||||
src: "UUID={{ raid_uuid }}"
|
||||
fstype: btrfs
|
||||
state: mounted
|
|
@ -0,0 +1,12 @@
|
|||
- name: Ensure Rclone is installed
|
||||
ansible.builtin.apt:
|
||||
name: rclone
|
||||
state: present
|
||||
|
||||
- name: Ensure Rclone configuration file is present
|
||||
ansible.builtin.template:
|
||||
src: 'rclone.conf.j2'
|
||||
dest: /etc/rclone.conf
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0644'
|
|
@ -0,0 +1,20 @@
|
|||
# Copy the config entry below in your rclone configuration file.
|
||||
[ovh-s3]
|
||||
type = s3
|
||||
provider = Other
|
||||
env_auth = false
|
||||
access_key_id = {{ s3_access_key_id }}
|
||||
secret_access_key = {{ s3_secret_access_key }}
|
||||
acl = private
|
||||
region = gra
|
||||
location_constraint = gra
|
||||
# API endpoint
|
||||
# S3 standard endpoint
|
||||
endpoint = https://s3.gra.io.cloud.ovh.net/
|
||||
|
||||
[photos-crypt]
|
||||
type = crypt
|
||||
remote = ovh-s3:pi-s3/photos
|
||||
password = {{ rclone_photos_obf_pass }}
|
||||
password2 = {{ rclone_photos_obf_pass2 }}
|
||||
|
|
@ -3,3 +3,8 @@
|
|||
service:
|
||||
name: smbd
|
||||
state: restarted
|
||||
|
||||
- name: smbpasswd-lambroek
|
||||
shell:
|
||||
cmd: "smbpasswd -sa lambroek"
|
||||
stdin: "{{ lambroek_password }}\n{{ lambroek_password }}"
|
||||
|
|
|
@ -16,10 +16,9 @@
|
|||
create_home: false
|
||||
groups:
|
||||
- data
|
||||
password: "{{ lambroek_password }}"
|
||||
password: "{{ lambroek_password | password_hash('sha512') }}"
|
||||
shell: /sbin/nologin
|
||||
|
||||
# TODO run "smbpasswd -a lambroek"
|
||||
notify: smbpasswd-lambroek
|
||||
|
||||
- name: Copy over smb config file
|
||||
copy:
|
||||
|
|
Loading…
Reference in New Issue