]> git.immae.eu Git - perso/Immae/Projets/Puppet.git/commitdiff
Merge branch 'dev'
authorIsmaël Bouya <ismael.bouya@normalesup.org>
Tue, 20 Mar 2018 12:14:41 +0000 (13:14 +0100)
committerIsmaël Bouya <ismael.bouya@normalesup.org>
Tue, 20 Mar 2018 12:14:41 +0000 (13:14 +0100)
71 files changed:
.gitmodules
bin/install_script.sh [new file with mode: 0755]
bin/install_script_ovh_cloud_instance.sh [deleted file]
bin/install_script_ovh_vps_ssd.sh [deleted file]
environments/global/common.yaml
environments/global/roles/backup.yaml [new file with mode: 0644]
environments/global/roles/cryptoportfolio.yaml
environments/global/types/s1-2.yaml
environments/global/types/vps-ovhssd-1.yaml
environments/hiera.yaml
environments/integration/roles/cryptoportfolio.yaml
environments/production/roles/cryptoportfolio.yaml
manifests/site.pp
modules/base_installation/files/cronie/puppet-post-merge
modules/base_installation/files/scripts/puppet_apply [new file with mode: 0644]
modules/base_installation/files/scripts/puppet_reset_and_apply
modules/base_installation/lib/facter/ldapvar.rb
modules/base_installation/lib/puppet/functions/ldap_data.rb [new file with mode: 0644]
modules/base_installation/manifests/cronie.pp
modules/base_installation/manifests/init.pp
modules/base_installation/manifests/params.pp
modules/base_installation/manifests/puppet.pp
modules/base_installation/manifests/services.pp
modules/base_installation/manifests/users.pp
modules/base_installation/templates/puppet/host_ldap.info.erb
modules/pacman
modules/profile/manifests/apache.pp
modules/profile/manifests/fstab.pp [moved from modules/base_installation/manifests/fstab.pp with 73% similarity]
modules/profile/manifests/known_hosts.pp [new file with mode: 0644]
modules/profile/manifests/mail.pp [new file with mode: 0644]
modules/profile/manifests/postgresql.pp
modules/profile/manifests/xmr_stak.pp
modules/profile/templates/mail/ssmtp.conf.erb [new file with mode: 0644]
modules/profile/templates/xmr_stak/xmr-stak.service.erb [moved from modules/profile/files/xmr_stak/xmr-stak.service with 63% similarity]
modules/role/manifests/backup.pp [new file with mode: 0644]
modules/role/manifests/cryptoportfolio.pp
modules/role/manifests/cryptoportfolio/apache.pp [new file with mode: 0644]
modules/role/manifests/cryptoportfolio/bot.pp [new file with mode: 0644]
modules/role/manifests/cryptoportfolio/front.pp [new file with mode: 0644]
modules/role/manifests/cryptoportfolio/notify.pp [new file with mode: 0644]
modules/role/manifests/cryptoportfolio/postgresql.pp [new file with mode: 0644]
modules/role/templates/backup/backup_dirname_head.sh.erb [new file with mode: 0644]
modules/role/templates/backup/backup_dirname_part.sh.erb [new file with mode: 0644]
modules/role/templates/backup/backup_dirname_tail.sh.erb [new file with mode: 0644]
modules/role/templates/backup/backup_head.sh.erb [new file with mode: 0644]
modules/role/templates/backup/backup_immae_eu.sh.erb [new file with mode: 0644]
modules/role/templates/backup/backup_tail.sh.erb [new file with mode: 0644]
modules/role/templates/backup/ssh_host_changed.info.erb [new file with mode: 0644]
modules/role/templates/backup/ssh_key_changed.info.erb [new file with mode: 0644]
modules/role/templates/cryptoportfolio/api_conf.toml.erb
modules/role/templates/cryptoportfolio/bot_config.ini.erb
modules/role/templates/cryptoportfolio/cryptoportfolio-app.service.erb
modules/role/templates/cryptoportfolio/static_conf.env.erb
modules/ssh_keygen [new submodule]
python/buy_ovh_vps_ssd.py [moved from python/buy_vps_server.py with 100% similarity]
python/get_initial_configuration_ovh_cloud_instance.py [moved from python/get_initial_configuration_cloud_instance.py with 52% similarity]
python/get_initial_configuration_ovh_vps_ssd.py [moved from python/get_initial_configuration.py with 100% similarity]
python/list_servers.py
python/ovh_helper.py
python/reboot_ovh_cloud_instance.py [moved from python/reboot_cloud_instance.py with 52% similarity]
python/reboot_ovh_vps_ssd.py [moved from python/reboot_vps_server.py with 100% similarity]
python/reinstall_ovh_cloud_instance.py [moved from python/reinstall_cloud_instance.py with 71% similarity]
python/reinstall_ovh_vps_ssd.py [moved from python/reinstall_vps_server.py with 100% similarity]
scripts/arch_install_script.sh [new file with mode: 0755]
scripts/arch_puppet_configuration_script.sh [new file with mode: 0755]
scripts/ovh_cloud_instance/arch_host_puppet_configuration_script.sh [new file with mode: 0755]
scripts/ovh_cloud_instance/arch_host_script.sh [new file with mode: 0755]
scripts/ovh_vps_ssd/arch_chroot_script.sh [new file with mode: 0755]
scripts/ovh_vps_ssd/arch_host_puppet_configuration_script.sh [new file with mode: 0755]
scripts/ovh_vps_ssd/arch_host_script.sh [new file with mode: 0755]
scripts/send_and_run.tcl [new file with mode: 0755]

index fa5163a1ffcfe8ffe2f88b23bc3bc38ebd25e3f4..735ca8ce9d094f1817ce39c706784470f7c36f79 100644 (file)
@@ -46,6 +46,9 @@
 [submodule "python/ovh"]
        path = python/ovh
        url = git://git.immae.eu/github/ovh/python-ovh
+[submodule "modules/ssh_keygen"]
+       path = modules/ssh_keygen
+       url = git://git.immae.eu/github/voxpupuli/puppet-ssh_keygen
 [submodule "modules/ssl"]
        path = modules/ssl
        url = git://git.immae.eu/github/fnerdwq/puppet-ssl
diff --git a/bin/install_script.sh b/bin/install_script.sh
new file mode 100755 (executable)
index 0000000..bd7f38b
--- /dev/null
@@ -0,0 +1,155 @@
+#!/bin/bash
+
+usage() {
+cat <<EOF
+$(basename $0) [options]
+  --help,-h               This help
+
+  One of the following options is necessary:
+  --instance-id id        Id of the cloud instance
+  --vps-id id             Id of the vps
+
+  Optional arguments:
+  --password password     Password of the host (only useful in case of no reboot and vps)
+  --reinstall-first       Start with reinstalling the vps
+  --host-user user        Use another user than the default one
+  --no-reboot             Don't reboot
+  --no-reboot-start       Don't reboot to rescue at the beginning
+  --no-reboot-end         Don't reboot to normal at the end
+  --git-branch branch     Use another puppet branch (default: master)
+  --environment env       Environment to use for the install (default: production)
+EOF
+}
+
+set -e
+
+git_branch=master
+environment=production
+host_user=""
+password=""
+T=""
+
+while [ -n "$1" ]; do
+  case "$1" in
+    --instance-id)
+      host_id="$2"
+      if [ -z "$host_user" ]; then
+        host_user="arch"
+      fi
+      if [ -z "$password" ]; then
+        password="x"
+      fi
+      [ -n "$T" ] && usage && exit 1
+      T="ovh_cloud_instance"
+      shift
+      ;;
+    --vps-id)
+      host_id="$2"
+      if [ -z "$host_user" ]; then
+        host_user="root"
+      fi
+      [ -n "$T" ] && usage && exit 1
+      T="ovh_vps_ssd"
+      shift
+      ;;
+    --password)
+      password="$2"
+      shift
+      ;;
+    --reinstall-first)
+      reinstall_first=1
+      ;;
+    --host-user)
+      host_user="$2"
+      shift
+      ;;
+    --no-reboot)
+      no_reboot=1
+      ;;
+    --no-reboot-start)
+      no_reboot_start=1
+      ;;
+    --no-reboot-end)
+      no_reboot_end=1
+      ;;
+    --git-branch)
+      git_branch="$2"
+      shift
+      ;;
+    --environment)
+      environment="$2"
+      shift
+      ;;
+    --help|-h)
+      usage
+      exit 0
+      ;;
+  esac
+
+  shift
+done
+
+if [ -z "$T" -o -z "$host_id" ]; then
+  usage
+  exit 1
+fi
+
+DIRECTORY=$(cd `dirname $0` && pwd)
+PYTHON_DIRECTORY="$DIRECTORY/../python"
+SCRIPTS="$DIRECTORY/../scripts"
+
+if [ -n "$reinstall_first" ]; then
+  echo "Réinstallation du système"
+  python $PYTHON_DIRECTORY/reinstall_$T.py --use-current "$host_id"
+
+  read -p "Appuyer sur une touche quand le serveur est prêt" ready
+fi
+
+if [ -z "$no_reboot" -a -z "$no_reboot_start" ]; then
+  echo "Patienter le temps du reboot"
+  python $PYTHON_DIRECTORY/reboot_$T.py --rescue "$host_id"
+
+  read -p "Appuyer sur une touche quand l'instance a redémarré" ready
+fi
+
+if [ -z "$password" ]; then
+  stty -echo
+  read -p "Mot de passe reçu par e-mail : " password; echo
+  stty echo
+fi
+
+ARCH_DIR=`mktemp -d`
+ARCH_HOST_SCRIPT="$SCRIPTS/$T/arch_host_script.sh"
+if [ -f "$SCRIPTS/$T/arch_chroot_script.sh" ]; then
+  ARCH_CHROOT_SCRIPT="$SCRIPTS/$T/arch_chroot_script.sh"
+else
+  ARCH_CHROOT_SCRIPT=""
+fi
+ARCH_INSTALL_SCRIPT="$SCRIPTS/arch_install_script.sh"
+ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT="$SCRIPTS/$T/arch_host_puppet_configuration_script.sh"
+ARCH_PUPPET_CONFIGURATION_SCRIPT="$SCRIPTS/arch_puppet_configuration_script.sh"
+ARCH_PUPPET_INITIAL_CONFIGURATION="$ARCH_DIR/puppet_variables.json"
+
+trap "rm -rf $ARCH_DIR" EXIT
+
+#### Base installation stage
+python $PYTHON_DIRECTORY/get_initial_configuration_$T.py $host_id > $ARCH_PUPPET_INITIAL_CONFIGURATION
+host_address=$(python $PYTHON_DIRECTORY/get_initial_configuration_$T.py $host_id | jq -r '.ips.v4.ipAddress')
+
+dest="$host_user@$host_address"
+files="$ARCH_HOST_SCRIPT $ARCH_CHROOT_SCRIPT $ARCH_PUPPET_INITIAL_CONFIGURATION $ARCH_INSTALL_SCRIPT"
+
+$SCRIPTS/send_and_run.tcl "$dest" "$password" "$git_branch" "$environment" $files
+
+### Role specific stage
+read -p "Press key when LDAP is configured" i
+
+files="$ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT $ARCH_PUPPET_CONFIGURATION_SCRIPT"
+
+$SCRIPTS/send_and_run.tcl "$dest" "$password" "$git_branch" "$environment" $files
+
+### Installation finished
+if [ -z "$no_reboot" -a -z "$no_reboot_end" ]; then
+  echo "Rebooting"
+  python $PYTHON_DIRECTORY/reboot_$T.py --local "$host_id"
+fi
diff --git a/bin/install_script_ovh_cloud_instance.sh b/bin/install_script_ovh_cloud_instance.sh
deleted file mode 100755 (executable)
index 26e410e..0000000
+++ /dev/null
@@ -1,182 +0,0 @@
-#!/bin/bash
-
-usage() {
-cat <<EOF
-  $0 [options]
-  --help,-h               This help
-  --instance-id id        Id of the instance
-  --reinstall-first       Start with reinstalling the vps
-  --host-user user        Use another user (default: arch)
-  --no-reboot             Don't reboot
-  --no-reboot-start       Don't reboot to rescue at the beginning
-  --no-reboot-end         Don't reboot to normal at the end
-  --git-branch            Use another puppet branch (default: master)
-  --environment           Environment to use for the installl (default: production)
-EOF
-}
-
-set -e
-
-host_user=arch
-git_branch=master
-environment=production
-
-while [ -n "$1" ]; do
-  case "$1" in
-    --instance-id)
-      instance_id="$2"
-      shift
-      ;;
-    --reinstall-first)
-      reinstall_first=1
-      ;;
-    --host-user)
-      host_user="$2"
-      shift
-      ;;
-    --no-reboot)
-      no_reboot=1
-      ;;
-    --no-reboot-start)
-      no_reboot_start=1
-      ;;
-    --no-reboot-end)
-      no_reboot_end=1
-      ;;
-    --git-branch)
-      git_branch="$2"
-      shift
-      ;;
-    --environment)
-      environment="$2"
-      shift
-      ;;
-    --help|-h)
-      usage
-      exit 0
-      ;;
-  esac
-
-  shift
-done
-
-DIRECTORY=$(cd `dirname $0` && pwd)
-PYTHON_DIRECTORY="$DIRECTORY/../python"
-
-if [ -z "$instance_id" ]; then
-  read -p "Id de l'instance : " instance_id
-fi
-
-if [ -n "$reinstall_first" ]; then
-  echo "Réinstallation du système"
-  python $PYTHON_DIRECTORY/reinstall_cloud_instance.py --use-current "$instance_id"
-
-  read -p "Appuyer sur une touche quand le serveur est prêt" ready
-fi
-
-if [ -z "$no_reboot" -a -z "$no_reboot_start" ]; then
-  echo "Patienter le temps du reboot"
-  python $PYTHON_DIRECTORY/reboot_cloud_instance.py --rescue "$instance_id"
-
-  read -p "Appuyer sur une touche quand l'instance a redémarré" ready
-fi
-
-ARCH_DIR=`mktemp -d`
-ARCH_HOST_SCRIPT="$ARCH_DIR/arch_host_script.sh"
-ARCH_INSTALL_SCRIPT="$ARCH_DIR/arch_install_script.sh"
-ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT="$ARCH_DIR/arch_host_puppet_configuration_script.sh"
-ARCH_PUPPET_CONFIGURATION_SCRIPT="$ARCH_DIR/arch_puppet_configuration_script.sh"
-ARCH_PUPPET_INITIAL_CONFIGURATION="$ARCH_DIR/puppet_variables.json"
-
-trap "rm -rf $ARCH_DIR" EXIT
-
-#### Base installation stage
-python $PYTHON_DIRECTORY/get_initial_configuration_cloud_instance.py $instance_id > $ARCH_PUPPET_INITIAL_CONFIGURATION
-host_address=$(python $PYTHON_DIRECTORY/get_initial_configuration_cloud_instance.py $instance_id | jq -r '.ips.v4.ipAddress')
-
-cat > $ARCH_HOST_SCRIPT <<EOF
-#!/bin/bash
-
-sudo haveged &
-sudo pacman -Sy --noconfirm arch-install-scripts
-
-DEVICE=/dev/vdb1
-MOUNTPOINT=/mnt
-
-UUID=\$(lsblk -rno UUID "\$DEVICE")
-PART="/dev/disk/by-uuid/\$UUID"
-
-# mkfs.ext4 -F -U "\$UUID" "\$DEVICE"
-sudo mount "\$DEVICE" /mnt
-
-##### FIXME: mkfs.ext4 would be better ####
-for i in /mnt/*; do
-  if [ "\$i" = "/mnt/boot" ]; then
-    # keep /boot/grub
-    sudo rm -f \$i/*
-  else
-    sudo rm -rf \$i
-  fi
-done
-##### /FIXME ####
-
-sudo pacstrap /mnt base git puppet
-
-echo "\$PART / auto defaults 0 1" | sudo tee /mnt/etc/fstab
-
-sudo cp /tmp/arch_install_script.sh "\$MOUNTPOINT/root/"
-sudo cp /tmp/puppet_variables.json "\$MOUNTPOINT/root/"
-
-sudo arch-chroot "\$MOUNTPOINT" /root/arch_install_script.sh
-EOF
-
-cat > $ARCH_INSTALL_SCRIPT <<EOF
-CODE_PATH="/etc/puppetlabs/code"
-rm -rf \$CODE_PATH
-git clone -b $git_branch --recursive https://git.immae.eu/perso/Immae/Projets/Puppet.git \$CODE_PATH
-puppet apply --environment $environment --tags base_installation --test \$CODE_PATH/manifests/site.pp
-# The password seed requires puppet to be run twice
-puppet apply --environment $environment --tags base_installation --test \$CODE_PATH/manifests/site.pp
-EOF
-
-chmod a+x $ARCH_HOST_SCRIPT $ARCH_INSTALL_SCRIPT
-
-expect -f - <<EOF
-set timeout -1
-spawn scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $ARCH_PUPPET_INITIAL_CONFIGURATION $ARCH_HOST_SCRIPT $ARCH_INSTALL_SCRIPT $host_user@$host_address:/tmp
-expect eof
-spawn ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $host_user@$host_address /tmp/arch_host_script.sh
-expect eof
-EOF
-
-### Role specific stage
-read -p "Press key when LDAP is configured" i
-
-cat > $ARCH_PUPPET_CONFIGURATION_SCRIPT <<EOF
-CODE_PATH="/etc/puppetlabs/code"
-puppet apply --environment $environment --tags base_installation --test \$CODE_PATH/manifests/site.pp
-EOF
-
-cat > $ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT <<EOF
-MOUNTPOINT=/mnt
-
-sudo cp /tmp/arch_puppet_configuration_script.sh "\$MOUNTPOINT/root/"
-
-sudo arch-chroot "\$MOUNTPOINT" /root/arch_puppet_configuration_script.sh
-EOF
-
-chmod a+x $ARCH_PUPPET_CONFIGURATION_SCRIPT $ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT
-
-expect -f - <<EOF
-set timeout -1
-spawn scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $ARCH_PUPPET_CONFIGURATION_SCRIPT $ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT $host_user@$host_address:/tmp
-expect eof
-spawn ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $host_user@$host_address /tmp/arch_host_puppet_configuration_script.sh
-expect eof
-EOF
-
-### Installation finished
-if [ -z "$no_reboot" -a -z "$no_reboot_end" ]; then
-  echo "Rebooting"
-  python $PYTHON_DIRECTORY/reboot_cloud_instance.py --local "$instance_id"
-fi
diff --git a/bin/install_script_ovh_vps_ssd.sh b/bin/install_script_ovh_vps_ssd.sh
deleted file mode 100755 (executable)
index 6b1aa39..0000000
+++ /dev/null
@@ -1,237 +0,0 @@
-#!/bin/bash
-
-usage() {
-cat <<EOF
-  $0 [options]
-  --help,-h               This help
-  --vps vps_name          Name of the vps
-  --password password     Password of the vps (only useful in case of no reboot)
-  --reinstall-first       Start with reinstalling the vps
-  --host-user user        Use another user (default: root)
-  --no-reboot             Don't reboot
-  --no-reboot-start       Don't reboot to rescue at the beginning
-  --no-reboot-end         Don't reboot to normal at the end
-  --git-branch            Use another puppet branch (default: master)
-  --environment           Environment to use for the installl (default: production)
-EOF
-}
-
-set -e
-
-host_user=root
-git_branch=master
-environment=production
-
-while [ -n "$1" ]; do
-  case "$1" in
-    --vps)
-      vps_name="$2"
-      shift
-      ;;
-    --reinstall-first)
-      reinstall_first=1
-      ;;
-    --password)
-      password="$2"
-      shift
-      ;;
-    --host-user)
-      host_user="$2"
-      shift
-      ;;
-    --no-reboot)
-      no_reboot=1
-      ;;
-    --no-reboot-start)
-      no_reboot_start=1
-      ;;
-    --no-reboot-end)
-      no_reboot_end=1
-      ;;
-    --git-branch)
-      git_branch="$2"
-      shift
-      ;;
-    --environment)
-      environment="$2"
-      shift
-      ;;
-    --help|-h)
-      usage
-      exit 0
-      ;;
-  esac
-
-  shift
-done
-
-DIRECTORY=$(cd `dirname $0` && pwd)
-PYTHON_DIRECTORY="$DIRECTORY/../python"
-
-if [ -z "$vps_name" ]; then
-  read -p "Nom du vps : " vps_name
-fi
-
-if [ -n "$reinstall_first" ]; then
-  echo "Réinstallation du système"
-  python $PYTHON_DIRECTORY/reinstall_vps_server.py --use-current "$vps_name"
-
-  read -p "Appuyer sur une touche quand le serveur est prêt" ready
-fi
-
-if [ -z "$no_reboot" -a -z "$no_reboot_start" ]; then
-  echo "Patienter le temps du reboot"
-  python $PYTHON_DIRECTORY/reboot_vps_server.py --rescue "$vps_name"
-fi
-
-if [ -z "$password" ]; then
-  stty -echo
-  read -p "Mot de passe reçu par e-mail : " password; echo
-  stty echo
-fi
-
-ARCH_DIR=`mktemp -d`
-ARCH_HOST_SCRIPT="$ARCH_DIR/arch_host_script.sh"
-ARCH_CHROOT_SCRIPT="$ARCH_DIR/arch_chroot_script.sh"
-ARCH_INSTALL_SCRIPT="$ARCH_DIR/arch_install_script.sh"
-ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT="$ARCH_DIR/arch_host_puppet_configuration_script.sh"
-ARCH_PUPPET_CONFIGURATION_SCRIPT="$ARCH_DIR/arch_puppet_configuration_script.sh"
-ARCH_PUPPET_INITIAL_CONFIGURATION="$ARCH_DIR/puppet_variables.json"
-
-trap "rm -rf $ARCH_DIR" EXIT
-
-#### Base installation stage
-python $PYTHON_DIRECTORY/get_initial_configuration.py $vps_name > $ARCH_PUPPET_INITIAL_CONFIGURATION
-
-cat > $ARCH_HOST_SCRIPT <<EOF
-#!/bin/bash
-
-apt-get update
-apt-get install -y haveged
-haveged &
-
-cd /tmp
-
-LATEST=\$(curl https://mirrors.kernel.org/archlinux/iso/latest/sha1sums.txt | grep "bootstrap" | head -n1)
-SHA1=\$(echo "\$LATEST" | cut -d' ' -f1)
-NAME=\$(echo "\$LATEST" | cut -d' ' -f3)
-
-curl -O "https://mirrors.kernel.org/archlinux/iso/latest/\$NAME"
-
-tar -xzf "\$NAME"
-
-echo 'Server = http://archlinux.mirrors.ovh.net/archlinux/\$repo/os/\$arch' > /tmp/root.x86_64/etc/pacman.d/mirrorlist
-
-DEVICE_STR=\$(cat /proc/mounts | grep "/dev/[sv]d.. /mnt/")
-DEVICE=\$(echo "\$DEVICE_STR" | cut -d' ' -f1)
-MOUNTPOINT=\$(echo "\$DEVICE_STR" | cut -d' ' -f2)
-
-umount "\$DEVICE"
-UUID=\$(lsblk -rno UUID "\$DEVICE")
-
-echo "\$UUID" > /tmp/root.x86_64/device_uuid
-
-cp /tmp/arch_chroot_script.sh /tmp/root.x86_64/
-
-/tmp/root.x86_64/bin/arch-chroot /tmp/root.x86_64/ /arch_chroot_script.sh
-
-mount "\$DEVICE"
-
-cp /tmp/arch_install_script.sh "\$MOUNTPOINT/root/"
-cp /tmp/puppet_variables.json "\$MOUNTPOINT/root/"
-
-/tmp/root.x86_64/bin/arch-chroot "\$MOUNTPOINT" /root/arch_install_script.sh
-EOF
-
-
-cat > $ARCH_CHROOT_SCRIPT <<EOF
-#!/bin/bash
-
-pacman-key --init
-pacman-key --populate archlinux
-
-UUID=\$(cat /device_uuid)
-PART="/dev/disk/by-uuid/\$UUID"
-DEVICE=\$(realpath "\$PART")
-
-# mkfs.ext4 -F -U "\$UUID" "\$DEVICE"
-mount "\$DEVICE" /mnt
-
-##### FIXME: mkfs.ext4 would be better ####
-for i in /mnt/*; do
-  if [ "\$i" = "/mnt/boot" ]; then
-    # keep /boot/grub
-    rm -f \$i/*
-  else
-    rm -rf \$i
-  fi
-done
-##### /FIXME ####
-
-pacstrap /mnt base git puppet
-
-echo "\$PART / auto defaults 0 1" > /mnt/etc/fstab
-
-umount /mnt
-EOF
-
-cat > $ARCH_INSTALL_SCRIPT <<EOF
-CODE_PATH="/etc/puppetlabs/code"
-rm -rf \$CODE_PATH
-git clone -b $git_branch --recursive https://git.immae.eu/perso/Immae/Projets/Puppet.git \$CODE_PATH
-puppet apply --environment $environment --tags base_installation --test \$CODE_PATH/manifests/site.pp
-# The password seed requires puppet to be run twice
-puppet apply --environment $environment --tags base_installation --test \$CODE_PATH/manifests/site.pp
-EOF
-
-chmod a+x $ARCH_HOST_SCRIPT $ARCH_CHROOT_SCRIPT $ARCH_INSTALL_SCRIPT
-
-expect -f - <<EOF
-set timeout -1
-spawn scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $ARCH_PUPPET_INITIAL_CONFIGURATION $ARCH_HOST_SCRIPT $ARCH_CHROOT_SCRIPT $ARCH_INSTALL_SCRIPT $host_user@$vps_name:/tmp
-expect "assword:"
-send "$password\n"
-expect eof
-spawn ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $host_user@$vps_name /tmp/arch_host_script.sh
-expect "assword:"
-send "$password\r"
-expect eof
-EOF
-
-### Role specific stage
-read -p "Press key when LDAP is configured" i
-
-cat > $ARCH_PUPPET_CONFIGURATION_SCRIPT <<EOF
-CODE_PATH="/etc/puppetlabs/code"
-puppet apply --environment $environment --tags base_installation --test \$CODE_PATH/manifests/site.pp
-EOF
-
-cat > $ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT <<EOF
-DEVICE_STR=\$(cat /proc/mounts | grep "/dev/[sv]d.. /mnt/")
-DEVICE=\$(echo "\$DEVICE_STR" | cut -d' ' -f1)
-MOUNTPOINT=\$(echo "\$DEVICE_STR" | cut -d' ' -f2)
-
-cp /tmp/arch_puppet_configuration_script.sh "\$MOUNTPOINT/root/"
-
-/tmp/root.x86_64/bin/arch-chroot "\$MOUNTPOINT" /root/arch_puppet_configuration_script.sh
-EOF
-
-chmod a+x $ARCH_PUPPET_CONFIGURATION_SCRIPT $ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT
-
-expect -f - <<EOF
-set timeout -1
-spawn scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $ARCH_PUPPET_CONFIGURATION_SCRIPT $ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT $host_user@$vps_name:/tmp
-expect "assword:"
-send "$password\n"
-expect eof
-spawn ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $host_user@$vps_name /tmp/arch_host_puppet_configuration_script.sh
-expect "assword:"
-send "$password\r"
-expect eof
-EOF
-
-### Installation finished
-if [ -z "$no_reboot" -a -z "$no_reboot_end" ]; then
-  echo "Rebooting"
-  python $PYTHON_DIRECTORY/reboot_vps_server.py --local "$vps_name"
-fi
index 5b21dca75061bd0b0d3561dc50414e32178d563a..094e0fff4c04c15a307951d334109fda8fcc483e 100644 (file)
@@ -1,11 +1,17 @@
 ---
 lookup_options:
-  base_installation::mounts:
+  profile::fstab::mounts:
     merge: unique
   classes:
     merge: deep
   base_installation::system_users:
     merge: unique
+  letsencrypt::hosts:
+    merge: unique
+  role::backup::backups:
+    merge: unique
+  profile::known_hosts::hosts:
+    merge: unique
 
 classes:
   stdlib: ~
@@ -16,22 +22,21 @@ base_installation::ldap_cn: "%{facts.ec2_metadata.hostname}"
 base_installation::ldap_server: "ldap.immae.eu"
 base_installation::ldap_uri: "ldaps://ldap.immae.eu"
 # FIXME: get all mounts without needing that hack?
-base_installation::mounts:
-  - "%{facts.ldapvar.self.vars.mounts.0}"
-  - "%{facts.ldapvar.self.vars.mounts.1}"
 base_installation::puppet_conf_path: "/etc/puppetlabs/puppet"
 base_installation::puppet_code_path: "/etc/puppetlabs/code"
 base_installation::puppet_pass_seed: "/etc/puppetlabs/puppet/password_seed"
 base_installation::puppet_ssl_path: "/etc/puppetlabs/ssl"
 base_installation::system_locales: ["fr_FR.UTF-8", "en_US.UTF-8"]
 base_installation::system_timezone: "Europe/Paris"
-base_installation::system_users:
-  - userid: 1000
-    username: "immae"
-    groups: ["wheel"]
-    keys:
-      - host: "immae.eu"
-        key: "AAAAB3NzaC1yc2EAAAADAQABAAABAQDi5PgLBwMRyRwzJPnSgUyRAuB9AAxMijsw1pR/t/wmxQne1O5fIPOleHx+D8dyZbwm+XkzlcJpgT0Qy3qC9J8BPhshJvO/tA/8CI/oS/FE0uWsyACH1DMO2dk4gRRZGSE9IuzDMRPlnfZ3n0tdsPzzv3GH4It/oPIgsvkTowKztGLQ7Xmjr5BxzAhXcIQymqA0U3XWHSdWvnSRDaOFG0PDoVMS85IdwlviVKLnV5Sstb4NC/P28LFfgvW8DO/XrOqujgDomqTmR41dK/AyrGGOb2cQUMO4l8Oa+74aOyKaB61rr/rJkr+wCbEttkTvgFa6zZygSk3edfiWE2rgn4+v"
-        key_type: "ssh-rsa"
-xmr_stak::mining_pool: "pool.minexmr.com:7777"
-xmr_stak::wallet: "44CA8TxTFYbQqN2kLyk8AnB6Ghz4mcbGpYC2EyXW7A8H9QspvWnTjDn39XUZDPrFwPa5JNwt4TmAxcooPWv4SaJqL87Bcdo"
+base_installation::system_users: [] # Fetched via ldap
+profile::fstab::mounts:
+  - "%{facts.ldapvar.self.vars.mounts.0}"
+  - "%{facts.ldapvar.self.vars.mounts.1}"
+profile::xmr_stak::mining_pool: "" # Fetched via ldap
+profile::xmr_stak::wallet: "" # Fetched via ldap
+profile::mail::mailhub: "" # Fetched via ldap
+role::backup::mailto: "" # Fetched via ldap
+role::backup::backups: [] # Fetched via ldap
+profile::known_hosts::hosts: [] # Fetched via ldap
+letsencrypt::email: ~ # Fetched via ldap
+letsencrypt::try_for_real_hostname: true
diff --git a/environments/global/roles/backup.yaml b/environments/global/roles/backup.yaml
new file mode 100644 (file)
index 0000000..cdd5f09
--- /dev/null
@@ -0,0 +1,11 @@
+---
+classes:
+  role::backup: ~
+role::backup::user: "backup"
+role::backup::group: "backup"
+base_installation::system_users:
+  - username: "%{lookup('role::backup::user')}"
+    userid: 2000
+    system: true
+    password: "!!"
+
index 3d36e711b79b02706062000fcad8dd35e12d5934..f875c1b8a0a1f736e8a5fd01ba9a79521ba7412e 100644 (file)
@@ -1,4 +1,19 @@
 ---
 classes:
   role::cryptoportfolio: ~
-cryptoportfolio::slack_webhook: "%{ldapvar.self.vars.cf_slack_webhook.0}"
+letsencrypt::hosts: "%{lookup('base_installation::system_hostname')}"
+role::cryptoportfolio::user: "cryptoportfolio"
+role::cryptoportfolio::group: "cryptoportfolio"
+role::cryptoportfolio::home: "/home/cryptoportfolio"
+role::cryptoportfolio::env: "prod"
+role::cryptoportfolio::webhook_url: "%{ldapvar.self.vars.cf_slack_webhook.0}"
+role::cryptoportfolio::pg_db: "cryptoportfolio"
+role::cryptoportfolio::pg_user: "cryptoportfolio"
+role::cryptoportfolio::pg_user_replication: "cryptoportfolio_replication"
+role::cryptoportfolio::web_host: "%{lookup('base_installation::system_hostname')}"
+role::cryptoportfolio::web_port: ""
+role::cryptoportfolio::web_ssl: true
+base_installation::system_users:
+  - username: "%{lookup('role::cryptoportfolio::user')}"
+    system: true
+    password: "!!"
index 496b7418eb5cf9cd06334358c28251aff12eb07f..5bfdf9ac0c2c8be169c140f04df1b2470ccfbe28 100644 (file)
@@ -6,4 +6,5 @@ classes:
 base_installation::system_hostname: "%{ldapvar.self.vars.host.0}"
 base_installation::real_hostname: "%{facts.ec2_metadata.hostname}.v.immae.eu"
 base_installation::ldap_cert_path: "/etc/ssl/certs/ca-certificates.crt"
-ssl::try_letsencrypt_for_real_hostname: true
+letsencrypt::try_for_real_hostname: true
+profile::xmr_stak::cpulimit: "30"
index 73f7a45cce65666566827b1ae608800c154d323d..8dd512c593fdd67e7e58a08981c3fb6ff5af16c0 100644 (file)
@@ -7,4 +7,5 @@ base_installation::system_hostname: "%{ldapvar.self.vars.host.0}"
 base_installation::real_hostname: "%{facts.ec2_metadata.hostname}.ovh.net"
 base_installation::grub_device: "/dev/sdb"
 base_installation::ldap_cert_path: "/etc/ssl/certs/ca-certificates.crt"
-ssl::try_letsencrypt_for_real_hostname: false
+letsencrypt::try_for_real_hostname: false
+profile::xmr_stak::cpulimit: "90"
index 5a9a6d68958ed81a698224b79492e88b64fbdd0b..61d40d870c2959d504563c4a0269b7a79df0ae7d 100644 (file)
@@ -8,6 +8,10 @@ defaults:
 hierarchy:
   - name: "Initialization variables"
     path: "/root/puppet_variables.json"
+    data_hash: json_data
+
+  - name: "Puppet ldap variables"
+    data_hash: ldap_data
 
   - name: "Per-role environment data"
     mapped_paths: [ldapvar.self.vars.roles, role, "roles/%{role}.yaml"]
index 9825bce0214f536b24f83f27f2fdca7d7ba6ab9e..6b8eb929e8c229b3c3a370d2697a5c647c151c94 100644 (file)
@@ -1,5 +1,3 @@
 ---
-cryptoportfolio::front_version: v0.0.2-3-g6200f9a
-cryptoportfolio::front_sha256: 69d31251ecd4fcea46d93dfee0184b1171019a765b6744b84f6eec6b10e5818f
-cryptoportfolio::bot_version: v0.5-8-g34eb08f
-cryptoportfolio::bot_sha256: f5b99c4a1cc4db0228f757705a5a909aa301e42787bc5842f8ba442fec0d3fd1
+role::cryptoportfolio::front_version: v0.0.2-3-g6200f9a
+role::cryptoportfolio::front_sha256: 69d31251ecd4fcea46d93dfee0184b1171019a765b6744b84f6eec6b10e5818f
index c9328e1de282ff76702deb2e75218d7ac5e0fdb4..566c7f2c57255c6c939ed4423584184ac3a15325 100644 (file)
@@ -1,5 +1,5 @@
 ---
-cryptoportfolio::front_version: v0.0.2-3-g6200f9a
-cryptoportfolio::front_sha256: 69d31251ecd4fcea46d93dfee0184b1171019a765b6744b84f6eec6b10e5818f
-cryptoportfolio::bot_version: v0.5.1
-cryptoportfolio::bot_sha256: 733789711365b2397bd996689af616a6789207d26c71a31ad1af68620b267d54
+role::cryptoportfolio::front_version: v0.0.2-3-g6200f9a
+role::cryptoportfolio::front_sha256: 69d31251ecd4fcea46d93dfee0184b1171019a765b6744b84f6eec6b10e5818f
+role::cryptoportfolio::bot_version: v0.5.1
+role::cryptoportfolio::bot_sha256: 733789711365b2397bd996689af616a6789207d26c71a31ad1af68620b267d54
index f922d2b665f0c0782f0c25510b43b4a5afd00a5f..3d40ad29b31a0c3a861a91135d041f1cb9e57383 100644 (file)
@@ -1,5 +1,5 @@
 node default {
-  lookup('classes', Hash, 'deep').each |$class_name, $class_hash| {
+  lookup('classes').each |$class_name, $class_hash| {
     if empty($class_hash) {
       include $class_name
     } else {
index ac5e3ff1cc57456ad93054d7fff9a697a1f7f035..f5c21a72aa796fd5b475bee2ab6805781f1b20ad 100644 (file)
@@ -1,7 +1,7 @@
 #!/bin/bash
-## Run Puppet locally using puppet apply
+## Run Puppet locally using puppet_apply
 git submodule update --init
-/usr/bin/puppet apply `pwd`/manifests/site.pp
+/usr/local/sbin/puppet_apply `pwd`/manifests/site.pp
 
 ## Log status of the Puppet run
 if [ $? -eq 0 ]
diff --git a/modules/base_installation/files/scripts/puppet_apply b/modules/base_installation/files/scripts/puppet_apply
new file mode 100644 (file)
index 0000000..69673cc
--- /dev/null
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+lockfile=/var/run/puppet-apply.lock
+path=`dirname $0`
+path=`cd $path/..; pwd`
+
+if [ $(id -u) -gt 0 ]; then
+  echo "You must be root to run this script." >&2
+  exit 2
+fi
+
+if (set -o noclobber; echo "$$" > "$lockfile") 2> /dev/null; then
+  trap 'rm -f "$lockfile"; exit $?' INT TERM EXIT
+
+  puppet apply "$@"
+
+  rm -f "$lockfile"
+  trap - INT TERM EXIT
+else
+  echo "Failed to acquire lockfile: $lockfile." >&2
+  echo "Held by $(cat $lockfile 2>/dev/null)" >&2
+  exit 1
+fi
index 6743044152de39e24c0ef3bd7e0b37b33b40448a..0350e6e17e7e78ebe4eeb4029c289490d937fb8e 100644 (file)
@@ -11,4 +11,4 @@ fi
 git reset --hard origin/$branch
 
 git submodule update --init
-puppet apply --test manifests/site.pp
+puppet_apply --test manifests/site.pp
index 3ee6623080709c013f1281ab3f8c39a52eac21da..08d58e4cb1a7b96f406bbe772a03c4f393ce0e12 100644 (file)
@@ -27,16 +27,18 @@ begin
 
         connection.search(base, scope, filter) do |entry|
           data_ = entry.to_hash
-          data_['vars'] = (data_[Puppet[:ldapstackedattrs]] || [])
-            .map { |var| var.split("=", 2) }
-            .group_by { |(key, value)| key }
-            .map { |key, value| [key, value.map(&:last)] }
-            .to_h
+          if data_["objectClass"].any? { |class_| class_ == "puppetClient" }
+            data_['vars'] = (data_[Puppet[:ldapstackedattrs]] || [])
+              .map { |var| var.split("=", 2) }
+              .group_by { |(key, value)| key }
+              .map { |key, value| [key, value.map(&:last)] }
+              .to_h
 
-          data[:other] << data_
+            data[:other] << data_
 
-          if data_["cn"].any? { |cn| cn == host }
-            data[:self] = data_
+            if data_["cn"].any? { |cn| cn == host }
+              data[:self] = data_
+            end
           end
         end
 
diff --git a/modules/base_installation/lib/puppet/functions/ldap_data.rb b/modules/base_installation/lib/puppet/functions/ldap_data.rb
new file mode 100644 (file)
index 0000000..0c92d89
--- /dev/null
@@ -0,0 +1,46 @@
+require 'json'
+
+Puppet::Functions.create_function(:ldap_data) do
+  dispatch :ldap_data do
+    param 'Hash', :options
+    param 'Puppet::LookupContext', :context
+  end
+
+  def ldap_data(options, context)
+    begin
+      require 'ldap'
+      require 'puppet/util/ldap/connection'
+    rescue LoadError
+      context.not_found
+      return
+    end
+
+    if !context.cache_has_key("ldap_lookup")
+      begin
+        conn = Puppet::Util::Ldap::Connection.instance
+        conn.start
+        connection = conn.connection
+      rescue ::LDAP::ResultError => e
+        raise Puppet::ParseError, ("ldapquery(): LDAP ResultError - #{e.message}")
+      end
+
+      host = Facter.value('ec2_metadata')["hostname"]
+      base = Puppet[:ldapbase]
+      scope  = ::LDAP::LDAP_SCOPE_SUBTREE
+      filter = "(objectclass=*)"
+
+      data = {}
+      connection.search(base, scope, filter) do |entry|
+        data_ = entry.to_hash
+        jsons = data_["immaePuppetJson"] || []
+        jsons.each do |json|
+          data.merge!(JSON.parse(json))
+        end
+      end
+
+      context.cache("ldap_lookup", data)
+    end
+
+    context.cached_value("ldap_lookup")
+  end
+end
index 72f2d8f709b97b98566b8e54cd1123638fd336a8..2235470b6a2c93d122822b824b5f6f5e3f991e5d 100644 (file)
@@ -19,15 +19,22 @@ class base_installation::cronie inherits base_installation {
     }
     cron { 'puppet-apply':
       ensure  => present,
-      command => "cd $base_installation::puppet_code_path ; puppet apply $base_installation::puppet_code_path/manifests/site.pp",
+      command => "cd $base_installation::puppet_code_path ; /usr/local/sbin/puppet_apply $base_installation::puppet_code_path/manifests/site.pp",
       user    => root,
       minute  => '*/20'
     }
     cron { 'puppet-apply-reboot':
       ensure  => present,
-      command => "cd $base_installation::puppet_code_path ; puppet apply $base_installation::puppet_code_path/manifests/site.pp",
+      command => "cd $base_installation::puppet_code_path ; /usr/local/sbin/puppet_apply $base_installation::puppet_code_path/manifests/site.pp",
       user    => root,
       special => "reboot"
     }
+    cron { 'pacman_keys':
+      ensure  => present,
+      command => "pacman-key --populate archlinux",
+      user    => root,
+      minute  => "0",
+      hour    => "1",
+    }
   }
 }
index cb1cdda74808093bfebc72ab72c2095359dc2d6b..998f8ff137015a497828de28165a93d45ba2b285 100644 (file)
@@ -6,7 +6,6 @@ class base_installation (
   Optional[String]        $ldap_dn          = $base_installation::params::ldap_dn,
   Optional[String]        $ldap_server      = $base_installation::params::ldap_server,
   Optional[String]        $ldap_uri         = $base_installation::params::ldap_uri,
-  Optional[Array[String]] $mounts           = $base_installation::params::mounts,
   Optional[String]        $puppet_code_path = $base_installation::params::puppet_code_path,
   Optional[String]        $puppet_conf_path = $base_installation::params::puppet_conf_path,
   Optional[String]        $puppet_pass_seed = $base_installation::params::puppet_pass_seed,
@@ -31,5 +30,4 @@ class base_installation (
   contain ::base_installation::users
   contain ::base_installation::package_managers
   contain ::base_installation::puppet
-  contain ::base_installation::fstab
 }
index 2a9fe6a6be21bf9f34d9f02b5cc74edcaf087403..5ade838ae0b70430a9ad2b9a6ea2428dcfb86ad0 100644 (file)
@@ -10,7 +10,6 @@ class base_installation::params {
   $ldap_cert_path   = "/etc/ssl/certs/ca-certificates.crt"
   $ldap_uri         = "ldaps://ldap.example.com"
   $ldap_server      = "ldap.example.com"
-  $mounts           = []
   $real_hostname    = "example.com"
   $system_hostname  = "example.com"
   $system_locales   = ["en_US.UTF-8"]
index b3ce49207a67007266df1ba5ca1b1883c76015ff..0cb43bcdd24635ef84df3f9f977e4ceeb5b566ae 100644 (file)
@@ -39,6 +39,12 @@ class base_installation::puppet (
     source => "puppet:///modules/base_installation/scripts/report_print.rb"
   }
 
+  file { '/usr/local/sbin/puppet_apply':
+    mode   => "0755",
+    ensure => present,
+    source => "puppet:///modules/base_installation/scripts/puppet_apply",
+  }
+
   unless empty(find_file($password_seed)) {
     $ldap_password = generate_password(24, $password_seed, "ldap")
     $ssha_ldap_seed = generate_password(5, $password_seed, "ldap_seed")
@@ -67,7 +73,7 @@ class base_installation::puppet (
       require => File[$base_installation::puppet_conf_path],
     }
 
-    $ips = lookup("ips") |$key| { {} }
+    $ips = lookup("ips", { 'default_value' => undef })
     file { "$base_installation::puppet_conf_path/host_ldap.info":
       content => template("base_installation/puppet/host_ldap.info.erb"),
       require => File[$base_installation::puppet_conf_path],
index c641f4b26105496e6deadf4f57f6c13cd71893f3..d7b4d61a48d86a1edcce1b602197947ee319e7eb 100644 (file)
@@ -38,7 +38,7 @@ class base_installation::services inherits base_installation {
      group   => "root"
   }
 
-  $ip6 = lookup("ips.v6") |$key| { {} }
+  $ip6 = lookup("ips.v6", { 'default_value' => undef })
   file { '/etc/systemd/network/en-dhcp.network':
     ensure  => "present",
     path    => "/etc/systemd/network/en-dhcp.network",
index 766c0f054210bdbfa34f0cd7d916a2e18465728a..f893c51e379f78e13e69fa1d0f5b5e01ecdc2e67 100644 (file)
@@ -26,22 +26,26 @@ class base_installation::users (
       ensure         => "present",
       groups         => $user[groups],
       managehome     => true,
+      system         => !!$user[system],
       home           => "/home/${user[username]}",
-      notify         => Exec["remove_password"],
+      notify         => Exec["remove_password:${user[username]}:${user[userid]}"],
       purge_ssh_keys => true
     }
 
-    exec { "remove_password":
+    exec { "remove_password:${user[username]}:${user[userid]}":
       command     => "/usr/bin/chage -d 0 ${user[username]} && /usr/bin/passwd -d ${user[username]}",
+      onlyif      => "/usr/bin/test -z '${user[password]}'",
       refreshonly => true
     }
 
-    $user[keys].each |$key| {
-      ssh_authorized_key { "${user[username]}@${key[host]}":
-        name => "${user[username]}@${key[host]}",
-        user => $user[username],
-        type => $key[key_type],
-        key  => $key[key],
+    if has_key($user, "keys") {
+      $user[keys].each |$key| {
+        ssh_authorized_key { "${user[username]}@${key[host]}":
+          name => "${user[username]}@${key[host]}",
+          user => $user[username],
+          type => $key[key_type],
+          key  => $key[key],
+        }
       }
     }
   }
index a71c6f361de36d1ee752b262c52d8d34c25b914b..9c79d3c8480bd6675c91d74bd495e3a08f64ac8e 100644 (file)
@@ -1,4 +1,4 @@
-#### Please add this node to LDAP:
+\e[0;35m#### Please add this node to LDAP:\e[0m
 ldapadd -D "cn=root,<%= @ldap_base %>" -W << 'EOF'
 dn: <%= @ldap_dn %>
 cn: <%= @ldap_cn %>
@@ -14,12 +14,13 @@ objectclass: ipHost
 environment: <%= @environment %>
 userpassword: {SSHA}<%= Base64.encode64(Digest::SHA1.digest(@ldap_password+@ssha_ldap_seed)+@ssha_ldap_seed).chomp! %>
 EOF
-#### Or modify an existing entry:
+\e[0;35m#### Or modify an existing entry:\e[0m
 ldapmodify -D "cn=root,<%= @ldap_base %>" -W << 'EOF'
 dn: <%= @ldap_dn %>
 changetype: modify
 replace: userPassword
 userpassword: {SSHA}<%= Base64.encode64(Digest::SHA1.digest(@ldap_password+@ssha_ldap_seed)+@ssha_ldap_seed).chomp! %>
+-
 replace: environment
 environment: <%= @environment %>
 <%- unless @ips.empty? -%>
index 8db3b97081230d5d89f8f1467eec05bf19b03f3c..5668cfa6dff56d5965ab2143fd0aeb322518d9c8 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 8db3b97081230d5d89f8f1467eec05bf19b03f3c
+Subproject commit 5668cfa6dff56d5965ab2143fd0aeb322518d9c8
index 8db58da99beb5aed81e044791bf7393213ba5682..382633bb3f8c60d802356b43ce973d019066db91 100644 (file)
@@ -67,13 +67,12 @@ class profile::apache {
     install_method  => "package",
     package_name    => "certbot",
     package_command => "certbot",
-    # FIXME
-    email           => 'sites+letsencrypt@mail.immae.eu',
+    email           => lookup('letsencrypt::email'),
   }
 
-  $real_hostname = lookup("base_installation::real_hostname") |$key| { {} }
+  $real_hostname = lookup("base_installation::real_hostname", { "default_value" => undef })
   unless empty($real_hostname) {
-    if (lookup("ssl::try_letsencrypt_for_real_hostname") |$key| { true }) {
+    if (lookup("letsencrypt::try_for_real_hostname", { "default_value" => true })) {
       letsencrypt::certonly { $real_hostname:
         before => Apache::Vhost["default_ssl"];
         default: * => $::profile::apache::letsencrypt_certonly_default;
@@ -110,6 +109,14 @@ class profile::apache {
     }
   }
 
+  lookup("letsencrypt::hosts", { "default_value" => [] }).each |$host| {
+    if ($host != $real_hostname) { # Done above already
+      letsencrypt::certonly { $host: ;
+        default: * => $letsencrypt_certonly_default;
+      }
+    }
+  }
+
   apache::vhost { "redirect_no_ssl":
     port          => '80',
     error_log     => false,
similarity index 73%
rename from modules/base_installation/manifests/fstab.pp
rename to modules/profile/manifests/fstab.pp
index f012e76f26a8c4a587539625e20a3bb582171702..5f2e58e2d9a2c3a45f25c902200529c0def2df20 100644 (file)
@@ -1,6 +1,6 @@
-class base_installation::fstab (
-  $mounts = $base_installation::mounts
-) inherits base_installation {
+class profile::fstab (
+  Optional[Array] $mounts = []
+) {
   $mounts.each |$mount| {
     unless empty($mount) {
       $infos = split($mount, ';')
diff --git a/modules/profile/manifests/known_hosts.pp b/modules/profile/manifests/known_hosts.pp
new file mode 100644 (file)
index 0000000..ed9ec8e
--- /dev/null
@@ -0,0 +1,11 @@
+class profile::known_hosts (
+  Optional[Array]  $hosts = []
+) {
+  $hosts.each |$host| {
+    sshkey { $host["name"]:
+      ensure => "present",
+      key    => $host["key"],
+      type   => $host["type"],
+    }
+  }
+}
diff --git a/modules/profile/manifests/mail.pp b/modules/profile/manifests/mail.pp
new file mode 100644 (file)
index 0000000..cc47b77
--- /dev/null
@@ -0,0 +1,14 @@
+class profile::mail (
+  String            $mailhub,
+  Optional[Integer] $mailhub_port = 25,
+) {
+  ensure_packages(["s-nail", "ssmtp"])
+
+  $hostname = lookup("base_installation::real_hostname")
+
+  file { "/etc/ssmtp/ssmtp.conf":
+    ensure  => "present",
+    content =>  template("profile/mail/ssmtp.conf.erb"),
+  }
+}
+
index 1024c6657c62ee3f71b1ed1b191efa5bf9fc4574..2cd1bcc652052beaf6e3e2fa121b50acd45a3c62 100644 (file)
@@ -1,5 +1,5 @@
 class profile::postgresql {
-  $password_seed = lookup("base_installation::puppet_pass_seed") |$key| { {} }
+  $password_seed = lookup("base_installation::puppet_pass_seed")
 
   class { '::postgresql::globals':
     encoding             => 'UTF-8',
@@ -32,7 +32,7 @@ class profile::postgresql {
     database    => 'all',
     user        => $pg_user,
     auth_method => 'ident',
-    order       => "a1",
+    order       => "00-01",
   }
   postgresql::server::pg_hba_rule { 'localhost access as postgres user':
     description => 'Allow localhost access to postgres user',
@@ -41,7 +41,7 @@ class profile::postgresql {
     user        => $pg_user,
     address     => "127.0.0.1/32",
     auth_method => 'md5',
-    order       => "a2",
+    order       => "00-02",
   }
   postgresql::server::pg_hba_rule { 'localhost ip6 access as postgres user':
     description => 'Allow localhost access to postgres user',
@@ -50,7 +50,7 @@ class profile::postgresql {
     user        => $pg_user,
     address     => "::1/128",
     auth_method => 'md5',
-    order       => "a3",
+    order       => "00-03",
   }
   postgresql::server::pg_hba_rule { 'deny access to postgresql user':
     description => 'Deny remote access to postgres user',
@@ -59,7 +59,7 @@ class profile::postgresql {
     user        => $pg_user,
     address     => "0.0.0.0/0",
     auth_method => 'reject',
-    order       => "a4",
+    order       => "00-04",
   }
 
   postgresql::server::pg_hba_rule { 'local access':
@@ -68,7 +68,7 @@ class profile::postgresql {
     database    => 'all',
     user        => 'all',
     auth_method => 'md5',
-    order       => "b1",
+    order       => "10-01",
   }
 
   postgresql::server::pg_hba_rule { 'local access with same name':
@@ -77,7 +77,7 @@ class profile::postgresql {
     database    => 'all',
     user        => 'all',
     auth_method => 'ident',
-    order       => "b2",
+    order       => "10-02",
   }
 
 }
index e5582eb4a7d25c6a00144258309e7c170f9168ab..ccb6baaf97028cec06840e95cfae882514a649f5 100644 (file)
@@ -1,4 +1,9 @@
-class profile::xmr_stak {
+class profile::xmr_stak (
+  String           $mining_pool,
+  String           $wallet,
+  Optional[String] $cpulimit = "50",
+  Optional[String] $password = "x",
+) {
   ensure_resource('exec', 'systemctl daemon-reload', {
     command     => '/usr/bin/systemctl daemon-reload',
     refreshonly =>  true
@@ -21,15 +26,12 @@ class profile::xmr_stak {
     mode    => "0644",
     owner   => "root",
     group   => "root",
-    source  => "puppet:///modules/profile/xmr_stak/xmr-stak.service",
+    content => template("profile/xmr_stak/xmr-stak.service.erb"),
     require => User["xmr_stak"],
     notify  => Exec["systemctl daemon-reload"]
   }
 
-  $mining_pool = lookup("xmr_stak::mining_pool") |$key| { {} }
-  $wallet = lookup("xmr_stak::wallet") |$key| { {} }
-  $password = lookup("xmr_stak::password") |$key| { "x" }
-  $instance = regsubst($facts["ec2_metadata"]["hostname"], '\.', "_", "G")
+  $instance = regsubst(lookup("base_installation::ldap_cn"), '\.', "_", "G")
 
   file { "/var/lib/xmr_stak/xmr-stak.conf":
     mode    => "0644",
diff --git a/modules/profile/templates/mail/ssmtp.conf.erb b/modules/profile/templates/mail/ssmtp.conf.erb
new file mode 100644 (file)
index 0000000..e7a0410
--- /dev/null
@@ -0,0 +1,14 @@
+#
+# /etc/ssmtp.conf -- a config file for sSMTP sendmail.
+#
+# The person who gets all mail for userids < 1000
+# Make this empty to disable rewriting.
+root=postmaster
+# The place where the mail goes. The actual machine name is required
+# no MX records are consulted. Commonly mailhosts are named mail.domain.com
+# The example will fit if you are in domain.com and you mailhub is so named.
+mailhub=<%= @mailhub %>:<%= @mailhub_port %>
+# Where will the mail seem to come from?
+#rewriteDomain=y
+# The full hostname
+hostname=<%= @hostname %>
similarity index 63%
rename from modules/profile/files/xmr_stak/xmr-stak.service
rename to modules/profile/templates/xmr_stak/xmr-stak.service.erb
index 93ee383d682318744de0c7e95d36a67b08e03040..d63103b85e0cad10aeb3d3fdf907302987f64868 100644 (file)
@@ -8,8 +8,9 @@ WorkingDirectory=/var/lib/xmr_stak
 Type=simple
 User=xmr_stak
 Group=xmr_stak
-ExecStart=/usr/bin/cpulimit --limit 90 /usr/bin/xmr-stak -c /var/lib/xmr_stak/xmr-stak.conf
+ExecStart=/usr/bin/cpulimit --limit <%= @cpulimit %> /usr/bin/xmr-stak -c /var/lib/xmr_stak/xmr-stak.conf
 Nice=19
 
 [Install]
 WantedBy=multi-user.target
+
diff --git a/modules/role/manifests/backup.pp b/modules/role/manifests/backup.pp
new file mode 100644 (file)
index 0000000..7a0c275
--- /dev/null
@@ -0,0 +1,125 @@
+class role::backup (
+  String            $user,
+  String            $group,
+  String            $mailto,
+  Optional[Array]   $backups = [],
+  Optional[String]  $mountpoint = "/backup1",
+  Optional[String]  $backup_script = "/usr/local/bin/backup.sh",
+) {
+  include "base_installation"
+
+  include "profile::fstab"
+  include "profile::mail"
+  include "profile::tools"
+  include "profile::xmr_stak"
+  include "profile::known_hosts"
+
+  ensure_packages(["rsync"])
+
+  ssh_keygen { $user:
+    notify => Notify_refresh["notify-backup-sshkey-change"]
+  }
+
+  $hosts = $backups.map |$backup| { $backup["host"] }
+
+  notify_refresh { "notify-backup-sshkey-change":
+    message     => template("role/backup/ssh_key_changed.info.erb"),
+    refreshonly => true
+  }
+
+  $hosts.each |$host| {
+    notify_refresh { "notify-backup-sshhost-$host-changed":
+      message     => template("role/backup/ssh_host_changed.info.erb"),
+      refreshonly => true,
+      subscribe   => Sshkey[$host],
+    }
+  }
+
+  concat { $backup_script:
+    ensure         => "present",
+    ensure_newline => true,
+    mode           => "0755",
+  }
+
+  cron { "backup":
+    ensure  => present,
+    command => $backup_script,
+    user    => $user,
+    minute  => 25,
+    hour    => 3,
+    require => Concat[$backup_script],
+  }
+
+  concat::fragment { "backup_head":
+    target  => $backup_script,
+    content => template("role/backup/backup_head.sh.erb"),
+    order   => "01-50",
+  }
+
+  concat::fragment { "backup_tail":
+    target  => $backup_script,
+    content => template("role/backup/backup_tail.sh.erb"),
+    order   => "99-50",
+  }
+
+  $backups.each |$infos| {
+    $dirname = $infos["name"]
+    $login = $infos["login"]
+    $host = $infos["host"]
+    $dest = "$login@$host"
+    $base = "$mountpoint/$dirname"
+    $nbr  = $infos["nbr"]
+    $order_dirname = $infos["order"]
+
+    file { $base:
+      ensure  => "directory",
+      owner   => $user,
+      group   => $group,
+      require => Mount[$mountpoint],
+    } ->
+    file { "$base/older":
+      ensure  => "directory",
+      owner   => $user,
+      group   => $group,
+    } ->
+    file { "$base/rsync_output":
+      ensure  => "directory",
+      owner   => $user,
+      group   => $group,
+    }
+
+    concat::fragment { "backup_${dirname}_head":
+      target  => $backup_script,
+      content => template("role/backup/backup_dirname_head.sh.erb"),
+      order   => "$order_dirname-01",
+    }
+
+    concat::fragment { "backup_${dirname}_tail":
+      target  => $backup_script,
+      content => template("role/backup/backup_dirname_tail.sh.erb"),
+      order   => "$order_dirname-99",
+    }
+
+    $infos["parts"].each |$part| {
+      $local_folder = $part["local_folder"]
+      $remote_folder = $part["remote_folder"]
+      $exclude_from = $part["exclude_from"]
+      $files_from = $part["files_from"]
+      $args = $part["args"]
+      $order_part = $part["order"]
+
+      file { "$base/$local_folder":
+        ensure  => "directory",
+        owner   => $user,
+        group   => $group,
+        require => File[$base],
+      }
+
+      concat::fragment { "backup_${dirname}_${local_folder}":
+        target  => $backup_script,
+        content => template("role/backup/backup_dirname_part.sh.erb"),
+        order   => "$order_dirname-$order_part",
+      }
+    }
+  }
+}
index bec247ee0ff6a19771f39506a63d59259a5ab7a6..799e297475ff6047234bf8fba5f27fda12a5019e 100644 (file)
@@ -1,9 +1,22 @@
-class role::cryptoportfolio {
-  ensure_resource('exec', 'systemctl daemon-reload', {
-    command     => '/usr/bin/systemctl daemon-reload',
-    refreshonly =>  true
-  })
-
+class role::cryptoportfolio (
+  String            $user,
+  String            $group,
+  String            $home,
+  Optional[String]  $env                 = "prod",
+  Optional[String]  $webhook_url         = undef,
+  String            $pg_user,
+  String            $pg_user_replication,
+  String            $pg_db,
+  Optional[String]  $pg_hostname          = "localhost",
+  Optional[String]  $pg_port              = "5432",
+  Optional[String]  $web_host             = undef,
+  Optional[String]  $web_port             = "",
+  Optional[Boolean] $web_ssl              = true,
+  Optional[String]  $front_version        = undef,
+  Optional[String]  $front_sha256         = undef,
+  Optional[String]  $bot_version          = undef,
+  Optional[String]  $bot_sha256           = undef,
+) {
   include "base_installation"
 
   include "profile::tools"
@@ -11,420 +24,17 @@ class role::cryptoportfolio {
   include "profile::apache"
   include "profile::xmr_stak"
 
-  $password_seed = lookup("base_installation::puppet_pass_seed") |$key| { {} }
-
-  $cf_pg_user = "cryptoportfolio"
-  $cf_pg_user_replication = "cryptoportfolio_replication"
-  $cf_pg_db = "cryptoportfolio"
-  $cf_pg_password = generate_password(24, $password_seed, "postgres_cryptoportfolio")
-  $cf_pg_replication_password = generate_password(24, $password_seed, "postgres_cryptoportfolio_replication")
-  $cf_pg_hostname = "localhost"
-  $cf_pg_port = "5432"
-  $cf_pg_host = "${cf_pg_hostname}:${cf_pg_port}"
-
-  $cf_user = "cryptoportfolio"
-  $cf_group = "cryptoportfolio"
-  $cf_home = "/opt/cryptoportfolio"
-  $cf_env = "prod"
-  $cf_front_app_host = lookup("base_installation::system_hostname") |$key| { "example.com" }
-  $cf_front_app_port = ""
-  $cf_front_app_ssl = "true"
-  $cf_front_app = "${cf_home}/go/src/immae.eu/Immae/Projets/Cryptomonnaies/Cryptoportfolio/Front"
-  $cf_front_app_api_workdir = "${cf_front_app}/cmd/app"
-  $cf_front_app_api_bin = "${cf_front_app_api_workdir}/cryptoportfolio-app"
-  $cf_front_app_api_conf = "${cf_home}/conf.toml"
-  $cf_front_app_api_secret = generate_password(24, $password_seed, "cryptoportfolio_api_secret")
-
-  $cf_front_app_static_conf = "${cf_front_app}/cmd/web/env/prod.env"
-
-  $cf_bot_app = "${cf_home}/bot"
-  $cf_bot_app_conf = "${cf_home}/bot_config.ini"
-  $cf_bot_app_reports = "${cf_home}/bot_reports"
-
-  $cf_webhook_url = lookup("cryptoportfolio::slack_webhook") |$key| { "" }
-
-  file { "/var/lib/postgres/data/certs":
-    ensure  => directory,
-    mode    => "0700",
-    owner   => $::profile::postgresql::pg_user,
-    group   => $::profile::postgresql::pg_user,
-    require => File["/var/lib/postgres"],
-  }
+  contain "role::cryptoportfolio::postgresql"
+  contain "role::cryptoportfolio::apache"
 
-  file { "/var/lib/postgres/data/certs/cert.pem":
-    source  => "file:///etc/letsencrypt/live/$cf_front_app_host/cert.pem",
-    mode    => "0600",
-    links   => "follow",
-    owner   => $::profile::postgresql::pg_user,
-    group   => $::profile::postgresql::pg_user,
-    require => [Letsencrypt::Certonly[$cf_front_app_host], File["/var/lib/postgres/data/certs"]]
-  }
-
-  file { "/var/lib/postgres/data/certs/privkey.pem":
-    source  => "file:///etc/letsencrypt/live/$cf_front_app_host/privkey.pem",
-    mode    => "0600",
-    links   => "follow",
-    owner   => $::profile::postgresql::pg_user,
-    group   => $::profile::postgresql::pg_user,
-    require => [Letsencrypt::Certonly[$cf_front_app_host], File["/var/lib/postgres/data/certs"]]
-  }
-
-  postgresql::server::config_entry { "wal_level":
-    value   => "logical",
-  }
-
-  postgresql::server::config_entry { "ssl":
-    value   => "on",
-    require => Letsencrypt::Certonly[$cf_front_app_host],
-  }
-
-  postgresql::server::config_entry { "ssl_cert_file":
-    value   => "/var/lib/postgres/data/certs/cert.pem",
-    require => Letsencrypt::Certonly[$cf_front_app_host],
-  }
-
-  postgresql::server::config_entry { "ssl_key_file":
-    value   => "/var/lib/postgres/data/certs/privkey.pem",
-    require => Letsencrypt::Certonly[$cf_front_app_host],
-  }
-
-  postgresql::server::db { $cf_pg_db:
-    user     =>  $cf_pg_user,
-    password =>  postgresql_password($cf_pg_user, $cf_pg_password),
-  }
-  ->
-  postgresql_psql { "CREATE PUBLICATION ${cf_pg_db}_publication FOR ALL TABLES":
-    db     => $cf_pg_db,
-    unless => "SELECT 1 FROM pg_catalog.pg_publication WHERE pubname = '${cf_pg_db}_publication'",
-  }
-  ->
-  postgresql::server::role { $cf_pg_user_replication:
-    db            => $cf_pg_db,
-    replication   => true,
-    password_hash => postgresql_password($cf_pg_user_replication, $cf_pg_replication_password),
-  }
-  ->
-  postgresql::server::database_grant { $cf_pg_user_replication:
-    db        => $cf_pg_db,
-    privilege => "CONNECT",
-    role      => $cf_pg_user_replication,
-  }
-  ->
-  postgresql::server::grant { "all tables in schema:public:$cf_pg_user_replication":
-    db          => $cf_pg_db,
-    role        => $cf_pg_user_replication,
-    privilege   => "SELECT",
-    object_type => "ALL TABLES IN SCHEMA",
-    object_name => "public",
-  }
-  ->
-  postgresql::server::grant { "all sequences in schema:public:$cf_pg_user_replication":
-    db          => $cf_pg_db,
-    role        => $cf_pg_user_replication,
-    privilege   => "SELECT",
-    object_type => "ALL SEQUENCES IN SCHEMA",
-    object_name => "public",
-  }
-
-  postgresql::server::pg_hba_rule { 'allow localhost TCP access to cryptoportfolio user':
-    type        => 'host',
-    database    => $cf_pg_db,
-    user        => $cf_pg_user,
-    address     => '127.0.0.1/32',
-    auth_method => 'md5',
-    order       => "b0",
-  }
-  postgresql::server::pg_hba_rule { 'allow localhost ip6 TCP access to cryptoportfolio user':
-    type        => 'host',
-    database    => $cf_pg_db,
-    user        => $cf_pg_user,
-    address     => '::1/128',
-    auth_method => 'md5',
-    order       => "b0",
-  }
-
-  postgresql::server::pg_hba_rule { 'allow TCP access to replication user from immae.eu':
-    type        => 'hostssl',
-    database    => $cf_pg_db,
-    user        => $cf_pg_user_replication,
-    address     => 'immae.eu',
-    auth_method => 'md5',
-    order       => "b0",
-  }
-
-  letsencrypt::certonly { $cf_front_app_host: ;
-    default: * => $::profile::apache::letsencrypt_certonly_default;
-  }
-
-  class { 'apache::mod::headers': }
-  apache::vhost { $cf_front_app_host:
-    port                => '443',
-    docroot             => false,
-    manage_docroot      => false,
-    proxy_dest          => "http://localhost:8000",
-    request_headers     => 'set X-Forwarded-Proto "https"',
-    ssl                 => true,
-    ssl_cert            => "/etc/letsencrypt/live/$cf_front_app_host/cert.pem",
-    ssl_key             => "/etc/letsencrypt/live/$cf_front_app_host/privkey.pem",
-    ssl_chain           => "/etc/letsencrypt/live/$cf_front_app_host/chain.pem",
-    require             => Letsencrypt::Certonly[$cf_front_app_host],
-    proxy_preserve_host => true;
-    default: *          => $::profile::apache::apache_vhost_default;
-  }
-
-  user { $cf_user:
-    name       => $cf_user,
-    ensure     => "present",
-    managehome => true,
-    home       => $cf_home,
-    system     => true,
-    password   => '!!',
-  }
-
-  file { "/usr/local/bin/slack-notify":
-    mode   => "0755",
-    source => "puppet:///modules/role/cryptoportfolio/slack-notify.py",
-  }
-
-  $front_version = lookup("cryptoportfolio::front_version") |$key| { {} }
-  $front_sha256 = lookup("cryptoportfolio::front_sha256") |$key| { {} }
-
-  $bot_version = lookup("cryptoportfolio::bot_version") |$key| { {} }
-  $bot_sha256 = lookup("cryptoportfolio::bot_sha256") |$key| { {} }
+  contain "role::cryptoportfolio::notify"
 
   unless empty($bot_version) {
-    ensure_packages(["python", "python-pip"])
-
-    file { $cf_bot_app:
-      ensure  => "directory",
-      mode    => "0700",
-      owner   => $cf_user,
-      group   => $cf_group,
-      require => User[$cf_user],
-    }
-
-    archive { "${cf_home}/trader_${bot_version}.tar.gz":
-      path          => "${cf_home}/trader_${bot_version}.tar.gz",
-      source        => "https://git.immae.eu/releases/cryptoportfolio/trader/trader_${bot_version}.tar.gz",
-      checksum_type => "sha256",
-      checksum      => $bot_sha256,
-      cleanup       => false,
-      extract       => true,
-      user          => $cf_user,
-      username      => $facts["ec2_metadata"]["hostname"],
-      password      => generate_password(24, $password_seed, "ldap"),
-      extract_path  => $cf_bot_app,
-      require       => [User[$cf_user], File[$cf_bot_app]],
-    } ~>
-    exec { "py-cryptoportfolio-dependencies":
-      cwd         => $cf_bot_app,
-      user        => $cf_user,
-      environment => ["HOME=${cf_home}"],
-      command     => "/usr/bin/make install",
-      require     => User[$cf_user],
-      refreshonly => true,
-      before      => [
-        File[$cf_bot_app_conf],
-        Cron["py-cryptoportfolio-before"],
-        Cron["py-cryptoportfolio-after"],
-      ]
-    }
-
-    file { $cf_bot_app_conf:
-      owner   => $cf_user,
-      group   => $cf_group,
-      mode    => "0600",
-      content => template("role/cryptoportfolio/bot_config.ini.erb"),
-      require => [
-        User[$cf_user],
-        Archive["${cf_home}/trader_${bot_version}.tar.gz"],
-      ],
-    }
-
-    cron { "py-cryptoportfolio-before":
-      ensure      => present,
-      command     => "cd $cf_bot_app ; python main.py --config $cf_bot_app_conf --before",
-      user        => "cryptoportfolio",
-      weekday     => 7, # Sunday
-      hour        => 22,
-      minute      => 30,
-      environment => ["HOME=${cf_home}","PATH=/usr/bin/"],
-      require     => [
-        File[$cf_bot_app_conf],
-        Archive["${cf_home}/trader_${bot_version}.tar.gz"]
-      ],
-    }
-
-    cron { "py-cryptoportfolio-after":
-      ensure      => present,
-      command     => "cd $cf_bot_app ; python main.py --config $cf_bot_app_conf --after",
-      user        => "cryptoportfolio",
-      weekday     => 1, # Monday
-      hour        => 1,
-      minute      => 0,
-      environment => ["HOME=${cf_home}","PATH=/usr/bin/"],
-      require     => [
-        File[$cf_bot_app_conf],
-        Archive["${cf_home}/trader_${bot_version}.tar.gz"]
-      ],
-    }
-
-    unless empty($cf_webhook_url) {
-      exec { "bot-slack-notify":
-        refreshonly => true,
-        environment => [
-          "P_PROJECT=Trader",
-          "P_WEBHOOK=${cf_webhook_url}",
-          "P_VERSION=${bot_version}",
-          "P_HOST=${cf_front_app_host}",
-          "P_HTTPS=${cf_front_app_ssl}",
-        ],
-        command     => "/usr/local/bin/slack-notify",
-        require     => File["/usr/local/bin/slack-notify"],
-        subscribe   => Exec["py-cryptoportfolio-dependencies"],
-      }
-    }
+    contain "role::cryptoportfolio::bot"
   }
 
   # FIXME: restore backup
   unless empty($front_version) {
-    ensure_packages(["go", "npm", "nodejs", "yarn"])
-
-    file { [
-        "${cf_home}/go/",
-        "${cf_home}/go/src",
-        "${cf_home}/go/src/immae.eu",
-        "${cf_home}/go/src/immae.eu/Immae",
-        "${cf_home}/go/src/immae.eu/Immae/Projets",
-        "${cf_home}/go/src/immae.eu/Immae/Projets/Cryptomonnaies",
-        "${cf_home}/go/src/immae.eu/Immae/Projets/Cryptomonnaies/Cryptoportfolio",
-        $cf_front_app]:
-      ensure  => "directory",
-      mode    => "0700",
-      owner   => $cf_user,
-      group   => $cf_group,
-      require => User[$cf_user],
-    }
-
-    file { "${cf_home}/front":
-      ensure  => "link",
-      target  => $cf_front_app,
-      before => File[$cf_front_app],
-    }
-
-    file { "/etc/systemd/system/cryptoportfolio-app.service":
-      mode    => "0644",
-      owner   => "root",
-      group   => "root",
-      content => template("role/cryptoportfolio/cryptoportfolio-app.service.erb"),
-      notify  => Exec["systemctl daemon-reload"],
-    }
-
-    service { 'cryptoportfolio-app':
-      enable    => true,
-      ensure    => "running",
-      subscribe => [Exec["go-cryptoportfolio-app"], Exec["web-cryptoportfolio-build"]],
-      require   => [
-        File["/etc/systemd/system/cryptoportfolio-app.service"],
-        Postgresql::Server::Db[$cf_pg_db]
-      ],
-    } ~>
-    exec { "dump $cf_pg_db structure":
-      refreshonly => true,
-      user        => $::profile::postgresql::pg_user,
-      group       => $::profile::postgresql::pg_user,
-      command     => "/usr/bin/pg_dump --schema-only --clean --no-publications $cf_pg_db > /var/lib/postgres/${cf_pg_db}.schema",
-    }
-
-    archive { "${cf_home}/front_${front_version}.tar.gz":
-      path          => "${cf_home}/front_${front_version}.tar.gz",
-      source        => "https://git.immae.eu/releases/cryptoportfolio/front/front_${front_version}.tar.gz",
-      checksum_type => "sha256",
-      checksum      => $front_sha256,
-      cleanup       => false,
-      extract       => true,
-      user          => $cf_user,
-      username      => $facts["ec2_metadata"]["hostname"],
-      password      => generate_password(24, $password_seed, "ldap"),
-      extract_path  => $cf_front_app,
-      require       => [User[$cf_user], File[$cf_front_app]],
-      notify        => [
-        Exec["web-cryptoportfolio-dependencies"],
-        Exec["go-get-dep"],
-      ]
-    }
-
-    # Api
-    file { $cf_front_app_api_conf:
-      owner   => $cf_user,
-      group   => $cf_group,
-      mode    => "0600",
-      content => template("role/cryptoportfolio/api_conf.toml.erb"),
-      before  => Exec["go-cryptoportfolio-app"],
-    }
-
-    exec { "go-get-dep":
-      user        => $cf_user,
-      environment => ["HOME=${cf_home}"],
-      creates     => "${cf_home}/go/bin/dep",
-      command     => "/usr/bin/go get -u github.com/golang/dep/cmd/dep",
-      refreshonly => true,
-    } ~>
-    exec { "go-cryptoportfolio-dependencies":
-      cwd         => $cf_front_app,
-      user        => $cf_user,
-      environment => ["HOME=${cf_home}"],
-      command     => "${cf_home}/go/bin/dep ensure",
-      refreshonly => true,
-    } ~>
-    exec { "go-cryptoportfolio-app":
-      cwd         => $cf_front_app_api_workdir,
-      user        => $cf_user,
-      environment => ["HOME=${cf_home}"],
-      command     => "/usr/bin/make build",
-      refreshonly => true,
-    }
-
-    # Static pages
-    file { $cf_front_app_static_conf:
-      owner   => $cf_user,
-      group   => $cf_group,
-      mode    => "0600",
-      content => template("role/cryptoportfolio/static_conf.env.erb"),
-      before  => Exec["web-cryptoportfolio-build"],
-    }
-
-    exec { "web-cryptoportfolio-dependencies":
-      cwd         => "${cf_front_app}/cmd/web",
-      user        => $cf_user,
-      environment => ["HOME=${cf_home}"],
-      command     => "/usr/bin/make install",
-      refreshonly => true,
-      require     => [Package["npm"], Package["nodejs"], Package["yarn"]]
-    } ~>
-    exec { "web-cryptoportfolio-build":
-      cwd         => "${cf_front_app}/cmd/web",
-      user        => $cf_user,
-      environment => ["HOME=${cf_home}"],
-      path        => ["${cf_front_app}/cmd/web/node_modules/.bin/", "/usr/bin"],
-      command     => "/usr/bin/make static ENV=${cf_env}",
-      refreshonly => true,
-    }
-
-    unless empty($cf_webhook_url) {
-      exec { "front-slack-notify":
-        refreshonly => true,
-        environment => [
-          "P_PROJECT=Front",
-          "P_WEBHOOK=${cf_webhook_url}",
-          "P_VERSION=${front_version}",
-          "P_HOST=${cf_front_app_host}",
-          "P_HTTPS=${cf_front_app_ssl}",
-        ],
-        command     => "/usr/local/bin/slack-notify",
-        require     => File["/usr/local/bin/slack-notify"],
-        subscribe   => [Exec["go-cryptoportfolio-app"], Exec["web-cryptoportfolio-build"]],
-      }
-    }
+    contain "role::cryptoportfolio::front"
   }
 }
diff --git a/modules/role/manifests/cryptoportfolio/apache.pp b/modules/role/manifests/cryptoportfolio/apache.pp
new file mode 100644 (file)
index 0000000..62d5447
--- /dev/null
@@ -0,0 +1,17 @@
+class role::cryptoportfolio::apache inherits role::cryptoportfolio {
+  class { 'apache::mod::headers': }
+  apache::vhost { $web_host:
+    port                => '443',
+    docroot             => false,
+    manage_docroot      => false,
+    proxy_dest          => "http://localhost:8000",
+    request_headers     => 'set X-Forwarded-Proto "https"',
+    ssl                 => true,
+    ssl_cert            => "/etc/letsencrypt/live/$web_host/cert.pem",
+    ssl_key             => "/etc/letsencrypt/live/$web_host/privkey.pem",
+    ssl_chain           => "/etc/letsencrypt/live/$web_host/chain.pem",
+    require             => Letsencrypt::Certonly[$web_host],
+    proxy_preserve_host => true;
+    default: *          => $::profile::apache::apache_vhost_default;
+  }
+}
diff --git a/modules/role/manifests/cryptoportfolio/bot.pp b/modules/role/manifests/cryptoportfolio/bot.pp
new file mode 100644 (file)
index 0000000..a15c779
--- /dev/null
@@ -0,0 +1,101 @@
+class role::cryptoportfolio::bot inherits role::cryptoportfolio {
+  $password_seed = lookup("base_installation::puppet_pass_seed")
+
+  $cf_bot_app = "${home}/bot"
+  $cf_bot_app_conf = "${home}/bot_config.ini"
+  $cf_bot_app_reports = "${home}/bot_reports"
+
+  ensure_packages(["python", "python-pip"])
+
+  file { $cf_bot_app:
+    ensure  => "directory",
+    mode    => "0700",
+    owner   => $user,
+    group   => $group,
+    require => User["$user:"],
+  }
+
+  archive { "${home}/trader_${bot_version}.tar.gz":
+    path          => "${home}/trader_${bot_version}.tar.gz",
+    source        => "https://git.immae.eu/releases/cryptoportfolio/trader/trader_${bot_version}.tar.gz",
+    checksum_type => "sha256",
+    checksum      => $bot_sha256,
+    cleanup       => false,
+    extract       => true,
+    user          => $user,
+    username      => lookup("base_installation::ldap_cn"),
+    password      => generate_password(24, $password_seed, "ldap"),
+    extract_path  => $cf_bot_app,
+    require       => [User["$user:"], File[$cf_bot_app]],
+  } ~>
+  exec { "py-cryptoportfolio-dependencies":
+    cwd         => $cf_bot_app,
+    user        => $user,
+    environment => ["HOME=${home}"],
+    command     => "/usr/bin/make install",
+    require     => User["$user:"],
+    refreshonly => true,
+    before      => [
+      File[$cf_bot_app_conf],
+      Cron["py-cryptoportfolio-before"],
+      Cron["py-cryptoportfolio-after"],
+    ]
+  }
+
+  $pg_password = generate_password(24, $password_seed, "postgres_cryptoportfolio")
+  file { $cf_bot_app_conf:
+    owner   => $user,
+    group   => $group,
+    mode    => "0600",
+    content => template("role/cryptoportfolio/bot_config.ini.erb"),
+    require => [
+      User["$user:"],
+      Archive["${home}/trader_${bot_version}.tar.gz"],
+    ],
+  }
+
+  cron { "py-cryptoportfolio-before":
+    ensure      => present,
+    command     => "cd $cf_bot_app ; python main.py --config $cf_bot_app_conf --before",
+    user        => $user,
+    weekday     => 7, # Sunday
+    hour        => 22,
+    minute      => 30,
+    environment => ["HOME=${home}","PATH=/usr/bin/"],
+    require     => [
+      File[$cf_bot_app_conf],
+      Archive["${home}/trader_${bot_version}.tar.gz"]
+    ],
+  }
+
+  cron { "py-cryptoportfolio-after":
+    ensure      => present,
+    command     => "cd $cf_bot_app ; python main.py --config $cf_bot_app_conf --after",
+    user        => $user,
+    weekday     => 1, # Monday
+    hour        => 1,
+    minute      => 0,
+    environment => ["HOME=${home}","PATH=/usr/bin/"],
+    require     => [
+      File[$cf_bot_app_conf],
+      Archive["${home}/trader_${bot_version}.tar.gz"]
+    ],
+  }
+
+  unless empty($webhook_url) {
+    exec { "bot-slack-notify":
+      refreshonly => true,
+      environment => [
+        "P_PROJECT=Trader",
+        "P_WEBHOOK=${webhook_url}",
+        "P_VERSION=${bot_version}",
+        "P_HOST=${web_host}",
+        "P_HTTPS=${web_ssl}",
+      ],
+      command     => "/usr/local/bin/slack-notify",
+      require     => File["/usr/local/bin/slack-notify"],
+      subscribe   => Exec["py-cryptoportfolio-dependencies"],
+    }
+  }
+}
+
diff --git a/modules/role/manifests/cryptoportfolio/front.pp b/modules/role/manifests/cryptoportfolio/front.pp
new file mode 100644 (file)
index 0000000..280ef8b
--- /dev/null
@@ -0,0 +1,158 @@
+class role::cryptoportfolio::front inherits role::cryptoportfolio {
+  ensure_resource('exec', 'systemctl daemon-reload', {
+    command     => '/usr/bin/systemctl daemon-reload',
+    refreshonly =>  true
+  })
+
+  $password_seed = lookup("base_installation::puppet_pass_seed")
+
+  $cf_front_app = "${home}/go/src/immae.eu/Immae/Projets/Cryptomonnaies/Cryptoportfolio/Front"
+  $cf_front_app_api_workdir = "${cf_front_app}/cmd/app"
+  $cf_front_app_api_bin = "${cf_front_app_api_workdir}/cryptoportfolio-app"
+  $cf_front_app_api_conf = "${home}/conf.toml"
+  $cf_front_app_api_secret = generate_password(24, $password_seed, "cryptoportfolio_api_secret")
+
+  $cf_front_app_static_conf = "${cf_front_app}/cmd/web/env/prod.env"
+
+  ensure_packages(["go", "npm", "nodejs", "yarn"])
+
+  file { [
+      "${home}/go/",
+      "${home}/go/src",
+      "${home}/go/src/immae.eu",
+      "${home}/go/src/immae.eu/Immae",
+      "${home}/go/src/immae.eu/Immae/Projets",
+      "${home}/go/src/immae.eu/Immae/Projets/Cryptomonnaies",
+      "${home}/go/src/immae.eu/Immae/Projets/Cryptomonnaies/Cryptoportfolio",
+      $cf_front_app]:
+    ensure  => "directory",
+    mode    => "0700",
+    owner   => $user,
+    group   => $group,
+    require => User["$user:"],
+  }
+
+  file { "${home}/front":
+    ensure  => "link",
+    target  => $cf_front_app,
+    before => File[$cf_front_app],
+  }
+
+  file { "/etc/systemd/system/cryptoportfolio-app.service":
+    mode    => "0644",
+    owner   => "root",
+    group   => "root",
+    content => template("role/cryptoportfolio/cryptoportfolio-app.service.erb"),
+    notify  => Exec["systemctl daemon-reload"],
+  }
+
+  service { 'cryptoportfolio-app':
+    enable    => true,
+    ensure    => "running",
+    subscribe => [File[$cf_front_app_api_conf], Exec["go-cryptoportfolio-app"], Exec["web-cryptoportfolio-build"]],
+    require   => [
+      File["/etc/systemd/system/cryptoportfolio-app.service"],
+      Postgresql::Server::Db[$pg_db]
+    ],
+  } ~>
+  exec { "dump $pg_db structure":
+    refreshonly => true,
+    user        => $::profile::postgresql::pg_user,
+    group       => $::profile::postgresql::pg_user,
+    command     => "/usr/bin/pg_dump --schema-only --clean --no-publications $pg_db > /var/lib/postgres/${pg_db}.schema",
+  }
+
+  archive { "${home}/front_${front_version}.tar.gz":
+    path          => "${home}/front_${front_version}.tar.gz",
+    source        => "https://git.immae.eu/releases/cryptoportfolio/front/front_${front_version}.tar.gz",
+    checksum_type => "sha256",
+    checksum      => $front_sha256,
+    cleanup       => false,
+    extract       => true,
+    user          => $user,
+    username      => lookup("base_installation::ldap_cn"),
+    password      => generate_password(24, $password_seed, "ldap"),
+    extract_path  => $cf_front_app,
+    require       => [User["$user:"], File[$cf_front_app]],
+    notify        => [
+      Exec["web-cryptoportfolio-dependencies"],
+      Exec["go-get-dep"],
+    ]
+  }
+
+  # Api
+  $pg_password = generate_password(24, $password_seed, "postgres_cryptoportfolio")
+  $pg_host = "${pg_hostname}:${pg_port}"
+  file { $cf_front_app_api_conf:
+    owner   => $user,
+    group   => $group,
+    mode    => "0600",
+    content => template("role/cryptoportfolio/api_conf.toml.erb"),
+    before  => Exec["go-cryptoportfolio-app"],
+  }
+
+  exec { "go-get-dep":
+    user        => $user,
+    environment => ["HOME=${home}"],
+    creates     => "${home}/go/bin/dep",
+    command     => "/usr/bin/go get -u github.com/golang/dep/cmd/dep",
+    refreshonly => true,
+  } ~>
+  exec { "go-cryptoportfolio-dependencies":
+    cwd         => $cf_front_app,
+    user        => $user,
+    environment => ["HOME=${home}"],
+    command     => "${home}/go/bin/dep ensure",
+    refreshonly => true,
+  } ~>
+  exec { "go-cryptoportfolio-app":
+    cwd         => $cf_front_app_api_workdir,
+    user        => $user,
+    environment => ["HOME=${home}"],
+    command     => "/usr/bin/make build",
+    refreshonly => true,
+  }
+
+  # Static pages
+  file { $cf_front_app_static_conf:
+    owner   => $user,
+    group   => $group,
+    mode    => "0600",
+    content => template("role/cryptoportfolio/static_conf.env.erb"),
+    before  => Exec["web-cryptoportfolio-build"],
+  }
+
+  exec { "web-cryptoportfolio-dependencies":
+    cwd         => "${cf_front_app}/cmd/web",
+    user        => $user,
+    environment => ["HOME=${home}"],
+    command     => "/usr/bin/make install",
+    refreshonly => true,
+    require     => [Package["npm"], Package["nodejs"], Package["yarn"]]
+  } ~>
+  exec { "web-cryptoportfolio-build":
+    cwd         => "${cf_front_app}/cmd/web",
+    user        => $user,
+    environment => ["HOME=${home}"],
+    path        => ["${cf_front_app}/cmd/web/node_modules/.bin/", "/usr/bin"],
+    command     => "/usr/bin/make static ENV=${env}",
+    refreshonly => true,
+  }
+
+  unless empty($webhook_url) {
+    exec { "front-slack-notify":
+      refreshonly => true,
+      environment => [
+        "P_PROJECT=Front",
+        "P_WEBHOOK=${webhook_url}",
+        "P_VERSION=${front_version}",
+        "P_HOST=${web_host}",
+        "P_HTTPS=${web_ssl}",
+      ],
+      command     => "/usr/local/bin/slack-notify",
+      require     => File["/usr/local/bin/slack-notify"],
+      subscribe   => [Exec["go-cryptoportfolio-app"], Exec["web-cryptoportfolio-build"]],
+    }
+  }
+
+}
diff --git a/modules/role/manifests/cryptoportfolio/notify.pp b/modules/role/manifests/cryptoportfolio/notify.pp
new file mode 100644 (file)
index 0000000..218312c
--- /dev/null
@@ -0,0 +1,6 @@
+class role::cryptoportfolio::notify inherits role::cryptoportfolio {
+  file { "/usr/local/bin/slack-notify":
+    mode   => "0755",
+    source => "puppet:///modules/role/cryptoportfolio/slack-notify.py",
+  }
+}
diff --git a/modules/role/manifests/cryptoportfolio/postgresql.pp b/modules/role/manifests/cryptoportfolio/postgresql.pp
new file mode 100644 (file)
index 0000000..cc4d2a9
--- /dev/null
@@ -0,0 +1,116 @@
+class role::cryptoportfolio::postgresql inherits role::cryptoportfolio {
+  $password_seed = lookup("base_installation::puppet_pass_seed")
+
+  $pg_password = generate_password(24, $password_seed, "postgres_cryptoportfolio")
+  $pg_replication_password = generate_password(24, $password_seed, "postgres_cryptoportfolio_replication")
+
+  file { "/var/lib/postgres/data/certs":
+    ensure  => directory,
+    mode    => "0700",
+    owner   => $::profile::postgresql::pg_user,
+    group   => $::profile::postgresql::pg_user,
+    require => File["/var/lib/postgres"],
+  }
+
+  file { "/var/lib/postgres/data/certs/cert.pem":
+    source  => "file:///etc/letsencrypt/live/$web_host/cert.pem",
+    mode    => "0600",
+    links   => "follow",
+    owner   => $::profile::postgresql::pg_user,
+    group   => $::profile::postgresql::pg_user,
+    require => [Letsencrypt::Certonly[$web_host], File["/var/lib/postgres/data/certs"]]
+  }
+
+  file { "/var/lib/postgres/data/certs/privkey.pem":
+    source  => "file:///etc/letsencrypt/live/$web_host/privkey.pem",
+    mode    => "0600",
+    links   => "follow",
+    owner   => $::profile::postgresql::pg_user,
+    group   => $::profile::postgresql::pg_user,
+    require => [Letsencrypt::Certonly[$web_host], File["/var/lib/postgres/data/certs"]]
+  }
+
+  postgresql::server::config_entry { "wal_level":
+    value   => "logical",
+  }
+
+  postgresql::server::config_entry { "ssl":
+    value   => "on",
+    require => Letsencrypt::Certonly[$web_host],
+  }
+
+  postgresql::server::config_entry { "ssl_cert_file":
+    value   => "/var/lib/postgres/data/certs/cert.pem",
+    require => Letsencrypt::Certonly[$web_host],
+  }
+
+  postgresql::server::config_entry { "ssl_key_file":
+    value   => "/var/lib/postgres/data/certs/privkey.pem",
+    require => Letsencrypt::Certonly[$web_host],
+  }
+
+  postgresql::server::db { $pg_db:
+    user     =>  $pg_user,
+    password =>  postgresql_password($pg_user, $pg_password),
+  }
+  ->
+  postgresql_psql { "CREATE PUBLICATION ${pg_db}_publication FOR ALL TABLES":
+    db     => $pg_db,
+    unless => "SELECT 1 FROM pg_catalog.pg_publication WHERE pubname = '${pg_db}_publication'",
+  }
+  ->
+  postgresql::server::role { $pg_user_replication:
+    db            => $pg_db,
+    replication   => true,
+    password_hash => postgresql_password($pg_user_replication, $pg_replication_password),
+  }
+  ->
+  postgresql::server::database_grant { $pg_user_replication:
+    db        => $pg_db,
+    privilege => "CONNECT",
+    role      => $pg_user_replication,
+  }
+  ->
+  postgresql::server::grant { "all tables in schema:public:$pg_user_replication":
+    db          => $pg_db,
+    role        => $pg_user_replication,
+    privilege   => "SELECT",
+    object_type => "ALL TABLES IN SCHEMA",
+    object_name => "public",
+  }
+  ->
+  postgresql::server::grant { "all sequences in schema:public:$pg_user_replication":
+    db          => $pg_db,
+    role        => $pg_user_replication,
+    privilege   => "SELECT",
+    object_type => "ALL SEQUENCES IN SCHEMA",
+    object_name => "public",
+  }
+
+  postgresql::server::pg_hba_rule { 'allow localhost TCP access to cryptoportfolio user':
+    type        => 'host',
+    database    => $pg_db,
+    user        => $pg_user,
+    address     => '127.0.0.1/32',
+    auth_method => 'md5',
+    order       => "05-01",
+  }
+  postgresql::server::pg_hba_rule { 'allow localhost ip6 TCP access to cryptoportfolio user':
+    type        => 'host',
+    database    => $pg_db,
+    user        => $pg_user,
+    address     => '::1/128',
+    auth_method => 'md5',
+    order       => "05-01",
+  }
+
+  postgresql::server::pg_hba_rule { 'allow TCP access to replication user from immae.eu':
+    type        => 'hostssl',
+    database    => $pg_db,
+    user        => $pg_user_replication,
+    address     => 'immae.eu',
+    auth_method => 'md5',
+    order       => "05-01",
+  }
+
+}
diff --git a/modules/role/templates/backup/backup_dirname_head.sh.erb b/modules/role/templates/backup/backup_dirname_head.sh.erb
new file mode 100644 (file)
index 0000000..e20cfd3
--- /dev/null
@@ -0,0 +1,27 @@
+##### <%= @dirname %> #####
+DEST="<%= @dest %>"
+BASE="<%= @base %>"
+OLD_BAK_BASE=$BASE/older/j
+BAK_BASE=${OLD_BAK_BASE}0
+RSYNC_OUTPUT=$BASE/rsync_output
+NBR=<%= @nbr %>
+
+if ! ssh \
+    -o PreferredAuthentications=publickey \
+    -o StrictHostKeyChecking=yes \
+    -o ClearAllForwardings=yes \
+    $DEST backup; then
+  echo "Fichier de verrouillage backup sur $DEST ou impossible de se connecter" >&2
+  skip=$DEST
+fi
+
+rm -rf ${OLD_BAK_BASE}${NBR}
+for j in `seq -w $(($NBR-1)) -1 0`; do
+  [ ! -d ${OLD_BAK_BASE}$j ] && continue
+  mv ${OLD_BAK_BASE}$j ${OLD_BAK_BASE}$(($j+1))
+done
+mkdir $BAK_BASE
+mv $RSYNC_OUTPUT $BAK_BASE
+mkdir $RSYNC_OUTPUT
+
+if [ "$skip" != "$DEST" ]; then
diff --git a/modules/role/templates/backup/backup_dirname_part.sh.erb b/modules/role/templates/backup/backup_dirname_part.sh.erb
new file mode 100644 (file)
index 0000000..ec662c4
--- /dev/null
@@ -0,0 +1,26 @@
+### <%= @dirname %> <%= @local_folder %> ###
+LOCAL="<%= @local_folder %>"
+REMOTE="<%= @remote_folder %>"
+
+cd $BASE/$LOCAL
+cat > $EXCL_FROM <<EOF
+<%= @exclude_from.join("\n") %>
+EOF
+cat > $FILES_FROM <<EOF
+<%= @files_from.join("\n") %>
+EOF
+
+OUT=$RSYNC_OUTPUT/$LOCAL
+rsync -XAavbrz --fake-super -e ssh --numeric-ids --delete \
+  --backup-dir=$BAK_BASE/$LOCAL \
+<%- unless @args.empty? -%>
+  <%= @args %>\
+<% end -%>
+<%- unless @exclude_from.empty? -%>
+  --exclude-from=$EXCL_FROM \
+<% end -%>
+<%- unless @files_from.empty? -%>
+  --files-from=$FILES_FROM \
+<% end -%>
+  $DEST:$REMOTE . > $OUT || true
+### End <%= @dirname %> <%= @local_folder %> ###
diff --git a/modules/role/templates/backup/backup_dirname_tail.sh.erb b/modules/role/templates/backup/backup_dirname_tail.sh.erb
new file mode 100644 (file)
index 0000000..6b16c9d
--- /dev/null
@@ -0,0 +1,4 @@
+
+  ssh $DEST sh -c "date > .last_backup"
+fi # [ "$skip" != "$DEST" ]
+##### End <%= @dirname %> #####
diff --git a/modules/role/templates/backup/backup_head.sh.erb b/modules/role/templates/backup/backup_head.sh.erb
new file mode 100644 (file)
index 0000000..be9f5bf
--- /dev/null
@@ -0,0 +1,20 @@
+#!/bin/bash
+MAILTO="<%= @mailto %>"
+
+EXCL_FROM=`mktemp`
+FILES_FROM=`mktemp`
+TMP_STDERR=`mktemp`
+
+on_exit() {
+  if [ -s "$TMP_STDERR" ]; then
+    cat "$TMP_STDERR" | mail -Ssendwait -s "save_distant rsync error" "$MAILTO" 
+  fi
+  rm -f $TMP_STDERR $EXCL_FROM $FILES_FROM
+}
+
+trap "on_exit" EXIT
+
+exec 2> "$TMP_STDERR"
+exec < /dev/null
+
+set -e
diff --git a/modules/role/templates/backup/backup_immae_eu.sh.erb b/modules/role/templates/backup/backup_immae_eu.sh.erb
new file mode 100644 (file)
index 0000000..4fab30e
--- /dev/null
@@ -0,0 +1,79 @@
+#!/bin/bash
+DEST="<%= @dest %>"
+MAILTO="<%= @mailto %>"
+BASE="<%= @base %>"
+OLD_BAK_BASE=$BASE/older/j
+BAK_BASE=${OLD_BAK_BASE}0
+RSYNC_OUTPUT=$BASE/rsync_output
+NBR=7
+
+TMP=`mktemp`
+TMP_STDERR=`mktemp`
+
+trap "rm -f $TMP $TMP_STDERR" EXIT
+
+exec 2> "$TMP_STDERR"
+
+set -e
+if ! `ssh -o ClearAllForwardings=yes $DEST backup`; then
+  echo "Fichier de verrouillage backup sur $DEST"
+  exit 1
+fi
+
+rm -rf ${OLD_BAK_BASE}${NBR}
+for j in `seq -w $(($NBR-1)) -1 0`; do
+  [ ! -d ${OLD_BAK_BASE}$j ] && continue
+  mv ${OLD_BAK_BASE}$j ${OLD_BAK_BASE}$(($j+1))
+done
+mkdir $BAK_BASE
+mv $RSYNC_OUTPUT $BAK_BASE
+mkdir $RSYNC_OUTPUT
+
+##############
+NAME="home"
+FOLDER="/home/immae"
+
+cd $BASE/$NAME
+cat > $TMP <<EOF
+/.no_backup/
+/hosts/florian/nobackup/
+/hosts/connexionswing.com/
+/hosts/connexionswing.immae.eu/
+/hosts/ludivine.immae.eu/
+/hosts/ludivinecassal.com/
+/hosts/piedsjaloux.fr/
+/hosts/piedsjaloux.immae.eu/
+/hosts/spip/sites/*/
+/hosts/spip/spip*
+EOF
+OUT=$RSYNC_OUTPUT/$NAME
+rsync -XAavbrz --fake-super -e ssh --numeric-ids --delete \
+  --backup-dir=$BAK_BASE/$NAME --exclude-from=$TMP \
+  $DEST:$FOLDER . > $OUT || true
+
+##############
+NAME="system"
+FOLDER="/"
+
+cd $BASE/$NAME
+cat > $TMP <<EOF
+/etc/
+/srv/
+/var/lib/
+/var/spool/
+/var/named/
+/usr/local/
+EOF
+OUT=$RSYNC_OUTPUT/$NAME
+rsync -XAavbrz -R --fake-super -e ssh --numeric-ids --delete \
+  --rsync-path='sudo rsync' \
+  --backup-dir=$BAK_BASE/$NAME \
+  --files-from=$TMP \
+  $DEST:$FOLDER . > $OUT || true
+
+##############
+ssh $DEST sh -c "date > .last_backup"
+
+if [ -s "$TMP_STDERR" ]; then
+  cat "$TMP_STDERR" | mail -Ssendwait -s "save_distant rsync error" "$MAILTO" 
+fi
diff --git a/modules/role/templates/backup/backup_tail.sh.erb b/modules/role/templates/backup/backup_tail.sh.erb
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/modules/role/templates/backup/ssh_host_changed.info.erb b/modules/role/templates/backup/ssh_host_changed.info.erb
new file mode 100644 (file)
index 0000000..ebf202e
--- /dev/null
@@ -0,0 +1,4 @@
+Host <%= @host %> added, please send <%= @user %> key if necessary.
+<%- if File.exist?("/home/#{@user}/.ssh/id_rsa.pub") %>
+  <%= File.read("/home/#{@user}/.ssh/id_rsa.pub") %>
+<% end -%>
diff --git a/modules/role/templates/backup/ssh_key_changed.info.erb b/modules/role/templates/backup/ssh_key_changed.info.erb
new file mode 100644 (file)
index 0000000..43fd2ec
--- /dev/null
@@ -0,0 +1,5 @@
+ssh key of <%= @user %> changed,
+please update hosts:
+<%- @hosts.each do |host| %>
+  - <%= host %>
+<% end -%>
index 13550c9a8b4de44a6cfb695af36695548d754191..7a4b66d1062a24341f0524d2e6aaca030cff84ce 100644 (file)
@@ -1,15 +1,15 @@
 log_level="info"
-mode="<%= @cf_env %>"
+mode="<%= @env %>"
 log_out="stdout"
 
 [db]
-user="<%= @cf_pg_user %>"
-password="<%= @cf_pg_password %>"
-database="<%= @cf_pg_db %>"
-address="<%= @cf_pg_host %>"
+user="<%= @pg_user %>"
+password="<%= @pg_password %>"
+database="<%= @pg_db %>"
+address="<%= @pg_host %>"
 
 [api]
-domain="<%= @cf_front_app_host %>"
+domain="<%= @web_host %>"
 jwt_secret="<%= @cf_front_app_api_secret %>"
 
 [app]
index 30298eb5c4efde0db8ef4b1fe8ecef8aec797526..b0211a6b59841333ff1f5cc4f9798227de9ef732 100644 (file)
@@ -1,9 +1,9 @@
 [postgresql]
-host = <%= @cf_pg_hostname %>
-port = <%= @cf_pg_port %>
-user = <%= @cf_pg_user %>
-password = <%= @cf_pg_password %>
-database = <%= @cf_pg_db %>
+host = <%= @pg_hostname %>
+port = <%= @pg_port %>
+user = <%= @pg_user %>
+password = <%= @pg_password %>
+database = <%= @pg_db %>
 
 [app]
 report_path = <%= @cf_bot_app_reports %>
index a521c0e16ea87638f6f689f436613559590e7b95..ed2b9082a486ea7efb33832d6989981591e58800 100644 (file)
@@ -5,8 +5,8 @@ Description=Cryptoportfolio app
 Type=simple
   
 WorkingDirectory=<%= @cf_front_app_api_workdir %>
-User=<%= @cf_user %>
-Group=<%= @cf_group %>
+User=<%= @user %>
+Group=<%= @group %>
 UMask=007
   
 ExecStart=<%= @cf_front_app_api_bin %> -conf <%= @cf_front_app_api_conf %>
index db9759ddbc0fae627ede073de3dffbf8aa0b0430..314ee14c558594ca5674906033a0895a2cf4e4d7 100644 (file)
@@ -1,4 +1,4 @@
-API_HOST="<%= @cf_front_app_host %>"
-API_PORT="<%= @cf_front_app_port %>"
-API_HTTPS="<%= @cf_front_app_ssl %>"
+API_HOST="<%= @web_host %>"
+API_PORT="<%= @web_port %>"
+API_HTTPS="<%= @web_ssl %>"
 
diff --git a/modules/ssh_keygen b/modules/ssh_keygen
new file mode 160000 (submodule)
index 0000000..ca53363
--- /dev/null
@@ -0,0 +1 @@
+Subproject commit ca53363249b58af96f90cb810c7c51dda8ba803b
similarity index 52%
rename from python/get_initial_configuration_cloud_instance.py
rename to python/get_initial_configuration_ovh_cloud_instance.py
index 41577163e96fe95f43d9993e59625e0a6f0a210c..844373c3a519ae2ae3f206a1040e92543ea9e854 100644 (file)
@@ -6,6 +6,7 @@ except ImportError:
     # In case it's installed globally
     import ovh
 import sys
+from ovh_helper import find_cloud_instance
 
 infos = {}
 
@@ -13,19 +14,7 @@ infos = {}
 # See ovh/README.rst
 client = ovh.Client()
 
-projects_list = client.get('/cloud/project/')
-if len(projects_list) > 1:
-    print("More than one project is not supported, taking the first one")
-project = projects_list[0]
-instances_list = client.get('/cloud/project/{}/instance'.format(project))
-instances = dict(map(lambda x: (x["id"], x), instances_list))
-if sys.argv[-1] in instances:
-    instance = instances[sys.argv[-1]]
-else:
-    print("Instance not in list:")
-    for instance in instances_list:
-        print("{}: {}".format(instance["name"], instance["id"]))
-    sys.exit(1)
+_, instance = find_cloud_instance(client, sys.argv[-1])
 
 infos["ips"] = {}
 for ip_infos in instance["ipAddresses"]:
index 9b8bc64b5362c9cbb669a47c4e0dbdab244ba105..e7bd2af9a27ed5c5a205dc60678a77bffd30da10 100644 (file)
@@ -6,7 +6,18 @@ except ImportError:
 
 client = ovh.Client()
 
+print("OVH cloud instances:")
+projects_list = client.get('/cloud/project/')
+for project_id in projects_list:
+    project = client.get('/cloud/project/{}'.format(project_id))
+    print("\t{}:".format(project["description"]))
+    instances_list = client.get('/cloud/project/{}/instance'.format(project_id))
+    for instance in instances_list:
+        print("\t\t{}: {}".format(instance["name"], instance["id"]))
+
 vps_list = client.get('/vps/')
 
+print("OVH VPS SSD servers:")
 for vps in vps_list:
-    print(vps)
+    print("\t{}".format(vps))
+
index a49a245ae7233e93a500bdbd844906de2f18599b..19834ae2004508da78c2928f80f03872e2b5476c 100644 (file)
@@ -1,4 +1,5 @@
 import time
+import sys
 
 def show_progress(client, vps, task_type):
     running_task_id = client.get("/vps/{}/tasks?type={}".format(vps, task_type))[0]
@@ -17,3 +18,19 @@ def show_progress(client, vps, task_type):
         time.sleep(3)
 
     print("\rFinished")
+
+def find_cloud_instance(client, instance_id):
+    projects_list = client.get('/cloud/project/')
+    instances_list = []
+    for project in projects_list:
+        instances_list += list(map(lambda x: [project, x],
+                client.get('/cloud/project/{}/instance'.format(project))))
+    instances = dict(map(lambda x: (x[1]["id"], x), instances_list))
+    if instance_id in instances:
+        project_instance = instances[instance_id]
+    else:
+        print("Instance not in list:")
+        for instance in instances_list:
+            print("{}: {}".format(instance[1]["name"], instance[1]["id"]))
+        sys.exit(1)
+    return project_instance
similarity index 52%
rename from python/reboot_cloud_instance.py
rename to python/reboot_ovh_cloud_instance.py
index b90f488826c0243f9c233a2cf469dc822d069c1f..de20c07c1a905789f5c129ec797c0626de664efd 100644 (file)
@@ -6,24 +6,13 @@ except ImportError:
     # In case it's installed globally
     import ovh
 import sys
+from ovh_helper import find_cloud_instance
 
 # Credentials are stored in ~/.ovh.conf
 # See ovh/README.rst
 client = ovh.Client()
 
-projects_list = client.get('/cloud/project/')
-if len(projects_list) > 1:
-    print("More than one project is not supported, taking the first one")
-project = projects_list[0]
-instances_list = client.get('/cloud/project/{}/instance'.format(project))
-instances = dict(map(lambda x: (x["id"], x), instances_list))
-if sys.argv[-1] in instances:
-    instance = instances[sys.argv[-1]]
-else:
-    print("Instance not in list:")
-    for instance in instances_list:
-        print("{}: {}".format(instance["name"], instance["id"]))
-    sys.exit(1)
+project, instance = find_cloud_instance(client, sys.argv[-1])
 
 if "--rescue" in sys.argv:
     netboot_mode="rescue"
similarity index 71%
rename from python/reinstall_cloud_instance.py
rename to python/reinstall_ovh_cloud_instance.py
index c488fda23f4de2d3e408f14a476a492e46821bc8..c0d26174821bf7e0bb4a13b86371b55a2d85765d 100644 (file)
@@ -6,24 +6,13 @@ except ImportError:
     # In case it's installed globally
     import ovh
 import sys
+from ovh_helper import find_cloud_instance
 
 # Credentials are stored in ~/.ovh.conf
 # See ovh/README.rst
 client = ovh.Client()
 
-projects_list = client.get('/cloud/project/')
-if len(projects_list) > 1:
-    print("More than one project is not supported, taking the first one")
-project = projects_list[0]
-instances_list = client.get('/cloud/project/{}/instance'.format(project))
-instances = dict(map(lambda x: (x["id"], x), instances_list))
-if sys.argv[-1] in instances:
-    instance = instances[sys.argv[-1]]
-else:
-    print("Instance not in list:")
-    for instance in instances_list:
-        print("{}: {}".format(instance["name"], instance["id"]))
-    sys.exit(1)
+project, instance = find_cloud_instance(client, sys.argv[-1])
 
 current_image = instance["imageId"]
 available_images = client.get('/cloud/project/{}/image'.format(project),
diff --git a/scripts/arch_install_script.sh b/scripts/arch_install_script.sh
new file mode 100755 (executable)
index 0000000..d2c6107
--- /dev/null
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+git_branch="$1"
+environment="$2"
+CODE_PATH="/etc/puppetlabs/code"
+
+rm -rf $CODE_PATH
+
+pacman-key --init
+pacman-key --populate archlinux
+
+git clone -b $git_branch --recursive https://git.immae.eu/perso/Immae/Projets/Puppet.git $CODE_PATH
+puppet apply --environment $environment --tags base_installation --test $CODE_PATH/manifests/site.pp
+# The password seed requires puppet to be run twice
+puppet apply --environment $environment --tags base_installation --test $CODE_PATH/manifests/site.pp
+
diff --git a/scripts/arch_puppet_configuration_script.sh b/scripts/arch_puppet_configuration_script.sh
new file mode 100755 (executable)
index 0000000..caf8987
--- /dev/null
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+git_branch="$1"
+environment="$2"
+CODE_PATH="/etc/puppetlabs/code"
+
+puppet apply --environment $environment --tags base_installation --test $CODE_PATH/manifests/site.pp
+
diff --git a/scripts/ovh_cloud_instance/arch_host_puppet_configuration_script.sh b/scripts/ovh_cloud_instance/arch_host_puppet_configuration_script.sh
new file mode 100755 (executable)
index 0000000..6aae8a7
--- /dev/null
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+git_branch="$1"
+environment="$2"
+
+MOUNTPOINT=/mnt
+
+sudo cp /tmp/arch_puppet_configuration_script.sh "$MOUNTPOINT/root/"
+
+sudo arch-chroot "$MOUNTPOINT" /root/arch_puppet_configuration_script.sh "$git_branch" "$environment"
+
+sudo umount "$MOUNTPOINT"
diff --git a/scripts/ovh_cloud_instance/arch_host_script.sh b/scripts/ovh_cloud_instance/arch_host_script.sh
new file mode 100755 (executable)
index 0000000..42dcc4a
--- /dev/null
@@ -0,0 +1,50 @@
+#!/bin/bash
+
+set -e
+
+git_branch="$1"
+environment="$2"
+
+# Randomizer
+sudo haveged &
+# /Randomizer
+
+# Prepare an arch chroot
+sudo pacman -Sy --noconfirm arch-install-scripts
+# /Prepare an arch chroot
+
+# Prepare device information
+DEVICE=/dev/vdb1
+MOUNTPOINT=/mnt
+
+UUID=$(lsblk -rno UUID "$DEVICE")
+PART="/dev/disk/by-uuid/$UUID"
+# /Prepare device information
+
+# Install very basic system (base git puppet)
+# mkfs.ext4 -F -U "$UUID" "$DEVICE"
+sudo mount "$DEVICE" /mnt
+
+##### FIXME: mkfs.ext4 would be better ####
+for i in /mnt/*; do
+  if [ "$i" = "/mnt/boot" ]; then
+    # keep /boot/grub
+    sudo rm -f $i/* || true
+  else
+    sudo rm -rf $i
+  fi
+done
+##### /FIXME ####
+
+sudo pacstrap -G /mnt base git puppet
+
+echo "$PART / auto defaults 0 1" | sudo tee /mnt/etc/fstab
+# /Install very basic system
+
+# Install rest of system (via puppet)
+sudo cp /tmp/arch_install_script.sh "$MOUNTPOINT/root/"
+sudo cp /tmp/puppet_variables.json "$MOUNTPOINT/root/"
+
+sudo arch-chroot "$MOUNTPOINT" /root/arch_install_script.sh "$git_branch" "$environment"
+# /Install rest of system
+
diff --git a/scripts/ovh_vps_ssd/arch_chroot_script.sh b/scripts/ovh_vps_ssd/arch_chroot_script.sh
new file mode 100755 (executable)
index 0000000..7b7887f
--- /dev/null
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+pacman-key --init
+pacman-key --populate archlinux
+
+UUID=$(cat /device_uuid)
+PART="/dev/disk/by-uuid/$UUID"
+DEVICE=$(realpath "$PART")
+
+# mkfs.ext4 -F -U "$UUID" "$DEVICE"
+mount "$DEVICE" /mnt
+
+##### FIXME: mkfs.ext4 would be better ####
+for i in /mnt/*; do
+  if [ "$i" = "/mnt/boot" ]; then
+    # keep /boot/grub
+    rm -f $i/*
+  else
+    rm -rf $i
+  fi
+done
+##### /FIXME ####
+
+pacstrap -G /mnt base git puppet
+
+echo "$PART / auto defaults 0 1" > /mnt/etc/fstab
+
+umount /mnt
+
diff --git a/scripts/ovh_vps_ssd/arch_host_puppet_configuration_script.sh b/scripts/ovh_vps_ssd/arch_host_puppet_configuration_script.sh
new file mode 100755 (executable)
index 0000000..35f46fd
--- /dev/null
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+git_branch="$1"
+environment="$2"
+
+DEVICE_STR=$(cat /proc/mounts | grep "/dev/[sv]d.. /mnt/")
+DEVICE=$(echo "$DEVICE_STR" | cut -d' ' -f1)
+MOUNTPOINT=$(echo "$DEVICE_STR" | cut -d' ' -f2)
+
+cp /tmp/arch_puppet_configuration_script.sh "$MOUNTPOINT/root/"
+
+/tmp/root.x86_64/bin/arch-chroot "$MOUNTPOINT" /root/arch_puppet_configuration_script.sh "$git_branch" "$environment"
+
+umount "$MOUNTPOINT"
diff --git a/scripts/ovh_vps_ssd/arch_host_script.sh b/scripts/ovh_vps_ssd/arch_host_script.sh
new file mode 100755 (executable)
index 0000000..8eecae4
--- /dev/null
@@ -0,0 +1,53 @@
+#!/bin/bash
+
+set -e
+
+git_branch="$1"
+environment="$2"
+
+# Randomizer
+apt-get update
+apt-get install -y haveged
+haveged &
+# /Randomizer
+
+# Prepare an arch chroot
+cd /tmp
+
+LATEST=$(curl -L https://mirrors.kernel.org/archlinux/iso/latest/sha1sums.txt | grep "bootstrap" | head -n1)
+SHA1=$(echo "$LATEST" | cut -d' ' -f1)
+NAME=$(echo "$LATEST" | cut -d' ' -f3)
+
+curl -L -O "https://mirrors.kernel.org/archlinux/iso/latest/$NAME"
+
+tar -xzf "$NAME"
+
+echo 'Server = http://archlinux.mirrors.ovh.net/archlinux/$repo/os/$arch' > /tmp/root.x86_64/etc/pacman.d/mirrorlist
+# /Prepare an arch chroot
+
+# Prepare device information (not available in chroot)
+DEVICE_STR=$(cat /proc/mounts | grep "/dev/[sv]d.. /mnt/")
+DEVICE=$(echo "$DEVICE_STR" | cut -d' ' -f1)
+MOUNTPOINT=$(echo "$DEVICE_STR" | cut -d' ' -f2)
+
+umount "$DEVICE"
+UUID=$(lsblk -rno UUID "$DEVICE")
+
+echo "$UUID" > /tmp/root.x86_64/device_uuid
+# /Prepare device information
+
+# Install very basic system via chroot (base git puppet)
+cp /tmp/arch_chroot_script.sh /tmp/root.x86_64/
+
+/tmp/root.x86_64/bin/arch-chroot /tmp/root.x86_64/ /arch_chroot_script.sh
+# /Install very basic system via chroot
+
+# Mount and install rest of system (via puppet)
+mount "$DEVICE"
+
+cp /tmp/arch_install_script.sh "$MOUNTPOINT/root/"
+cp /tmp/puppet_variables.json "$MOUNTPOINT/root/"
+
+/tmp/root.x86_64/bin/arch-chroot "$MOUNTPOINT" /root/arch_install_script.sh "$git_branch" "$environment"
+# /Mount and install rest of system
+
diff --git a/scripts/send_and_run.tcl b/scripts/send_and_run.tcl
new file mode 100755 (executable)
index 0000000..42fae62
--- /dev/null
@@ -0,0 +1,31 @@
+#!/usr/bin/expect -f
+set dest [lindex $argv 0]
+set password [lindex $argv 1]
+set git_branch [lindex $argv 2]
+set environment [lindex $argv 3]
+set script [lindex $argv 4]
+set files [lrange $argv 4 end]
+
+if {$password == "x"} {
+  set ask_password 0
+} else {
+  set ask_password 1
+}
+
+set scriptname [file tail $script]
+
+set sshopts [split "-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no"]
+
+set timeout -1
+spawn scp {*}$sshopts {*}$files $dest:/tmp
+if {$ask_password} {
+  expect "assword:"
+  send "$password\n"
+}
+expect eof
+spawn ssh {*}$sshopts $dest /tmp/$scriptname $git_branch $environment
+if {$ask_password} {
+  expect "assword:"
+  send "$password\n"
+}
+expect eof