navidrome
This commit is contained in:
48
navidrome.tf
Normal file
48
navidrome.tf
Normal file
@@ -0,0 +1,48 @@
|
||||
resource "proxmox_lxc" "navidrome" {
|
||||
target_node = "ryuk2"
|
||||
hostname = "navidrome"
|
||||
vmid = 223
|
||||
ostemplate = "cephfs:vztmpl/debian-12-standard_12.7-1_amd64.tar.zst"
|
||||
password = "terraform"
|
||||
unprivileged = true
|
||||
cores = 6
|
||||
memory = 2096
|
||||
swap = 1024
|
||||
start = true
|
||||
tags = "terraform"
|
||||
nameserver = "192.168.0.24"
|
||||
ssh_public_keys = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINFw70PrMsilcsqCrwW1I6PAt3anQbhmVg+t/HUfomug ryan@mxDesktop"
|
||||
|
||||
features {
|
||||
nesting = true
|
||||
}
|
||||
|
||||
rootfs {
|
||||
storage = "local-lvm"
|
||||
size = "64G"
|
||||
}
|
||||
|
||||
mountpoint{
|
||||
key = "0"
|
||||
slot = 0
|
||||
storage = "Ceph"
|
||||
mp = "/data"
|
||||
size = "400G"
|
||||
}
|
||||
|
||||
network {
|
||||
name = "eth0"
|
||||
bridge = "vmbr0"
|
||||
ip = "192.168.0.59/24"
|
||||
gw = "192.168.0.1"
|
||||
|
||||
}
|
||||
|
||||
provisioner "local-exec" {
|
||||
command = "./.ansible.d/setup.sh $IP"
|
||||
environment = {
|
||||
IP = "192.168.0.59"
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
18
roles/ind/navidrome/cron.vars
Normal file
18
roles/ind/navidrome/cron.vars
Normal file
@@ -0,0 +1,18 @@
|
||||
---
|
||||
cronjob:
|
||||
|
||||
- name: appdata
|
||||
job_name: "Daily /home/docker/ Backup"
|
||||
job_minute: "0"
|
||||
job_hour: "3"
|
||||
job_day: "*"
|
||||
user: "root"
|
||||
job_command: "/home/docker/restic.sh --backup /home/docker/ "
|
||||
|
||||
- name: weeklydb
|
||||
job_name: "Weekly /home/docker/ verification"
|
||||
job_minute: "10"
|
||||
job_hour: "6"
|
||||
job_day: "1"
|
||||
user: "root"
|
||||
job_command: "/home/docker/restic.sh --verify /home/docker/"
|
||||
135
roles/ind/navidrome/docker-compose.env
Normal file
135
roles/ind/navidrome/docker-compose.env
Normal file
@@ -0,0 +1,135 @@
|
||||
########################################################################
|
||||
########################################################################
|
||||
########################################################################
|
||||
##
|
||||
## Docker Compose Environment Variable file for Jellyfin Media Stack
|
||||
##
|
||||
## Update any of the environment variables below as required.
|
||||
##
|
||||
## It is highly recommended Linux users set up a "docker"
|
||||
## user, so the applications can access the local filesystem
|
||||
## with this user's access privileges. Use PUID / PGID to map
|
||||
## user access between the Docker apps and local filesystem.
|
||||
##
|
||||
########################################################################
|
||||
########################################################################
|
||||
########################################################################
|
||||
|
||||
#Name of the project in Docker
|
||||
COMPOSE_PROJECT_NAME=media-stack
|
||||
|
||||
# This is the network subnet which will be used inside the docker "media_network", change as required.
|
||||
# LOCAL_SUBNET is your home network and is needed so the VPN client allows access to your home computers.
|
||||
DOCKER_SUBNET=172.28.10.0/24
|
||||
DOCKER_GATEWAY=172.28.10.1
|
||||
LOCAL_SUBNET=192.168.0.0/24
|
||||
LOCAL_DOCKER_IP=192.168.0.57
|
||||
|
||||
# Each of the "*ARR" applications have been configured so the theme can be changed to your needs.
|
||||
# Refer to Theme Park for more info / options: https://docs.theme-park.dev/theme-options/aquamarine/
|
||||
TP_DISABLE_THEME=false
|
||||
TP_THEME=nord
|
||||
|
||||
# These are the folders on your local host computer / NAS running docker, they MUST exist
|
||||
# and have correct permissions for PUID and PGUI prior to running the docker-compose.
|
||||
#
|
||||
# Use the commands in the Guide to create all the sub-folders in each of these folders.
|
||||
|
||||
# Host Data Folders - Will accept Linux, Windows, NAS folders
|
||||
FOLDER_FOR_CONFIGS=/home/grail/docker
|
||||
FOLDER_FOR_MEDIA=/media
|
||||
|
||||
# File access, date and time details for the containers / applications to use.
|
||||
# Run "sudo id docker" on host computer to find PUID / PGID and update these to suit.
|
||||
PUID=1000
|
||||
PGID=1000
|
||||
UMASK=0002
|
||||
TIMEZONE=America/Chicago
|
||||
|
||||
# Update your own Internet VPN provide details below
|
||||
VPN_TYPE=openvpn
|
||||
VPN_SERVICE_PROVIDER=airvpn
|
||||
VPN_USERNAME=
|
||||
VPN_PASSWORD=
|
||||
|
||||
|
||||
SERVER_COUNTRIES=
|
||||
SERVER_REGION=
|
||||
SERVER_CITIES="Chicago Illinois"
|
||||
SERVER_HOSTNAMES=
|
||||
|
||||
# Fill in this item ONLY if you're using a custom OpenVPN configuration
|
||||
# Should be inside gluetun data folder - Example: /gluetun/custom-openvpn.conf
|
||||
# You can then edit it inside the FOLDER_FOR_CONFIGS location for gluetun.
|
||||
OPENVPN_CUSTOM_CONFIG=/gluetun/ipvanish.openvpn.conf
|
||||
#OPENVPN_CUSTOM_CONFIG=
|
||||
|
||||
QBIT_PORT_TCP=44172
|
||||
QBIT_PORT_UDP=44172
|
||||
FLARESOLVERR_PORT=8191
|
||||
TDARR_SERVER_PORT=8266
|
||||
WEBUI_PORT_TDARR=8265
|
||||
RUTORRENT_PORT=8200
|
||||
|
||||
|
||||
# SWAG is configured for Reverse Proxy. Set your Internet gateway to redirect incoming ports 80 and 443
|
||||
# to the ports used below (using Docker IP Address), and they will be translated back to 80 and 443 by SWAG.
|
||||
# Change these port numbers if you have conflicting services running on the Docker host computer.
|
||||
|
||||
REVERSE_PROXY_PORT_HTTP=5080
|
||||
REVERSE_PROXY_PORT_HTTPS=5443
|
||||
|
||||
# SWAG REVERSE PROXY SETTINGS:
|
||||
URL=your-domain-name-goes-here.com
|
||||
SUBDOMAINS=wildcard
|
||||
VALIDATION=dns
|
||||
DNSPLUGIN=cloudflare
|
||||
CERTPROVIDER=
|
||||
PROPAGATION=
|
||||
DUCKDNSTOKEN=
|
||||
EMAIL=
|
||||
ONLY_SUBDOMAINS=false
|
||||
EXTRA_DOMAINS=
|
||||
STAGING=false
|
||||
HEALTH_VPN_DURATION_INITIAL=45s
|
||||
HEALTH_VPN_DURATION_ADDITION=30s
|
||||
UPDATER_PERIOD=8h
|
||||
TZ=America/Chicago
|
||||
PUID=1000
|
||||
PGID=1000
|
||||
|
||||
WAN_IP_CMD=false
|
||||
|
||||
AUTH_DELAY=0s
|
||||
MEMORY_LIMIT=256M
|
||||
UPLOAD_MAX_SIZE=16M
|
||||
OPCACHE_MEM_SIZE=128
|
||||
MAX_FILE_UPLOADS=50
|
||||
REAL_IP_FROM=0.0.0.0/32
|
||||
REAL_IP_HEADER=X-Forwarded-For
|
||||
LOG_IP_VAR=remote_addr
|
||||
LOG_ACCESS=true
|
||||
|
||||
XMLRPC_AUTHBASIC_STRING=rTorrent XMLRPC restricted access
|
||||
RUTORRENT_AUTHBASIC_STRING=ruTorrent restricted access
|
||||
WEBDAV_AUTHBASIC_STRING=WebDAV restricted access
|
||||
|
||||
RT_LOG_LEVEL=info
|
||||
RT_LOG_EXECUTE=false
|
||||
RT_LOG_XMLRPC=false
|
||||
|
||||
RU_HTTP_USER_AGENT=Mozilla/5.0 (Windows NT 6.0; WOW64; rv:12.0) Gecko/20100101 Firefox/12.0
|
||||
RU_HTTP_TIME_OUT=30
|
||||
RU_HTTP_USE_GZIP=true
|
||||
RU_RPC_TIME_OUT=5
|
||||
RU_LOG_RPC_CALLS=false
|
||||
RU_LOG_RPC_FAULTS=true
|
||||
RU_PHP_USE_GZIP=false
|
||||
RU_PHP_GZIP_LEVEL=2
|
||||
RU_SCHEDULE_RAND=10
|
||||
RU_LOG_FILE=/data/rutorrent/rutorrent.log
|
||||
RU_DO_DIAGNOSTIC=true
|
||||
RU_SAVE_UPLOADED_TORRENTS=true
|
||||
RU_OVERWRITE_UPLOADED_TORRENTS=false
|
||||
RU_FORBID_USER_SETTINGS=false
|
||||
RU_LOCALE=UTF8
|
||||
76
roles/ind/navidrome/docker-compose.yml
Normal file
76
roles/ind/navidrome/docker-compose.yml
Normal file
@@ -0,0 +1,76 @@
|
||||
services:
|
||||
gluetun:
|
||||
image: qmcgaw/gluetun
|
||||
container_name: gluetun
|
||||
restart: always
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
devices:
|
||||
- /dev/net/tun
|
||||
volumes:
|
||||
- ./data/gluetun:/gluetun
|
||||
ports:
|
||||
- 11000:6881/udp #rutorrent
|
||||
- 11001:11001 #rutorrent
|
||||
- 11002:8080 #rutorrent
|
||||
- 11003:9000 #rutorremt
|
||||
- 4733:4733 #rutorrent
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=America/Chicago
|
||||
- VPN_SERVICE_PROVIDER=airvpn
|
||||
- VPN_TYPE=openvpn # change it to your protocol
|
||||
- OPENVPN_CUSTOM_CONFIG=/gluetun/ipvanish.openvpn.conf
|
||||
- VPN_INTERFACE=tun0
|
||||
- FIREWALL_OUTBOUND_SUBNETS=192.168.0.0/24
|
||||
- SERVER_CITIES="Chicago Illinois"
|
||||
- FIREWALL_VPN_INPUT_PORTS=4733
|
||||
|
||||
rutorrent:
|
||||
image: crazymax/rtorrent-rutorrent:latest
|
||||
container_name: rutorrent
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=America/Chicago
|
||||
- XMLRPC_PORT=11001 #the default port is 8000 but mine was already in use. if yours is free then you don't need this line and you can change this line '- 11001:11001' in ports to '- 11001:8000'
|
||||
- RT_INC_PORT=4733
|
||||
volumes:
|
||||
- ./data/rutorrent/data:/data
|
||||
- /data:/downloads
|
||||
- ./data/rutorrent/password:/passwd
|
||||
network_mode: service:gluetun
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
restart: always
|
||||
|
||||
lidarr:
|
||||
container_name: lidarr
|
||||
image: ghcr.io/hotio/lidarr
|
||||
ports:
|
||||
- "8686:8686"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- UMASK=002
|
||||
- TZ=Etc/UTC
|
||||
volumes:
|
||||
- ./data/lidarr/:/config
|
||||
- /data:/data
|
||||
|
||||
navidrome:
|
||||
image: deluan/navidrome:latest
|
||||
user: 1000:1000 # should be owner of volumes
|
||||
ports:
|
||||
- "4533:4533"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
# Optional: put your config options customization here. Examples:
|
||||
ND_SCANSCHEDULE: 1h
|
||||
ND_LOGLEVEL: info
|
||||
ND_SESSIONTIMEOUT: 24h
|
||||
ND_BASEURL: ""
|
||||
volumes:
|
||||
- "./data/navidrome/:/data"
|
||||
- "/data/:/music:ro"
|
||||
107
roles/ind/navidrome/downloader-install.yml
Normal file
107
roles/ind/navidrome/downloader-install.yml
Normal file
@@ -0,0 +1,107 @@
|
||||
---
|
||||
- hosts: all
|
||||
become: yes
|
||||
remote_user: ansible
|
||||
gather_facts: false
|
||||
|
||||
vars:
|
||||
|
||||
tasks:
|
||||
|
||||
- name: Upgrade-packages.yml - update packages
|
||||
include: ../../lib/upgrade-packages.yml
|
||||
|
||||
- name: Install-docker.yml - setting up docker
|
||||
include: ../../lib/install-docker.yml
|
||||
|
||||
- name: Install Restic and setup
|
||||
include: ../../lib/setup-restic.yml
|
||||
|
||||
- name: Setup Restic Cron jobs
|
||||
include: ../../lib/setup-restic-cron.yml
|
||||
|
||||
- name: Create grail user
|
||||
user:
|
||||
name: grail
|
||||
uid: 2003
|
||||
group: docker
|
||||
state: present
|
||||
create_home: yes
|
||||
home: /data
|
||||
shell: /bin/bash
|
||||
|
||||
- name: Install nfs Packages
|
||||
apt:
|
||||
name:
|
||||
- nfs-common
|
||||
state: present
|
||||
update_cache: no
|
||||
|
||||
- name: Install Python Packages
|
||||
apt:
|
||||
name:
|
||||
- python3-full
|
||||
- python3-pip
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
- name: Setup Service
|
||||
copy:
|
||||
dest: /etc/systemd/system/navidrome.service
|
||||
content: |
|
||||
[Unit]
|
||||
Description=Docker Compose service
|
||||
Requires=docker.service
|
||||
After=docker.service
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
RemainAfterExit=yes
|
||||
WorkingDirectory=/home/docker
|
||||
ExecStart=/usr/bin/docker compose --env-file ./docker-compose.env -f docker-compose.yml up -d
|
||||
ExecStop=/usr/bin/docker compose --env-file ./docker-compose.env -f docker-compose.yml down
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0644'
|
||||
|
||||
- name: Write docker-compose.yml
|
||||
ansible.builtin.copy:
|
||||
src: docker-compose.yml
|
||||
dest: /home/docker/docker-compose.yml
|
||||
owner: docker
|
||||
group: docker
|
||||
mode: u=rw,g=r,o=r
|
||||
|
||||
- name: Write docker-compose.env
|
||||
ansible.builtin.copy:
|
||||
src: docker-compose.env
|
||||
dest: /home/docker/docker-compose.env
|
||||
owner: docker
|
||||
group: docker
|
||||
mode: u=rw,g=r,o=r
|
||||
|
||||
- name: Check if docker config directory exists
|
||||
stat:
|
||||
path: /home/docker/data
|
||||
register: data_stat
|
||||
|
||||
- name: Restore docker config Dir
|
||||
shell: |
|
||||
restic --password-file /home/restic/.resticpassword -r sftp:misamisa://home/restic/$(hostname) --target / restore latest
|
||||
args:
|
||||
chdir: /home/docker/
|
||||
creates: /home/docker/data
|
||||
when: not data_stat.stat.exists or not data_stat.stat.isdir
|
||||
|
||||
- name: Reload systemd daemon
|
||||
systemd:
|
||||
daemon_reload: yes
|
||||
|
||||
- name: Start downloader Service
|
||||
systemd:
|
||||
name: downloader
|
||||
state: started
|
||||
enabled: yes
|
||||
Reference in New Issue
Block a user