diff options
-rwxr-xr-x | bin/install_script_ovh_cloud_instance.sh | 182 | ||||
-rwxr-xr-x | bin/install_script_ovh_vps_ssd.sh (renamed from bin/install_script.sh) | 0 | ||||
-rw-r--r-- | python/get_initial_configuration_cloud_instance.py | 41 | ||||
-rw-r--r-- | python/reboot_cloud_instance.py | 39 | ||||
-rw-r--r-- | python/reinstall_cloud_instance.py | 57 |
5 files changed, 319 insertions, 0 deletions
diff --git a/bin/install_script_ovh_cloud_instance.sh b/bin/install_script_ovh_cloud_instance.sh new file mode 100755 index 0000000..26e410e --- /dev/null +++ b/bin/install_script_ovh_cloud_instance.sh | |||
@@ -0,0 +1,182 @@ | |||
1 | #!/bin/bash | ||
2 | |||
3 | usage() { | ||
4 | cat <<EOF | ||
5 | $0 [options] | ||
6 | --help,-h This help | ||
7 | --instance-id id Id of the instance | ||
8 | --reinstall-first Start with reinstalling the vps | ||
9 | --host-user user Use another user (default: arch) | ||
10 | --no-reboot Don't reboot | ||
11 | --no-reboot-start Don't reboot to rescue at the beginning | ||
12 | --no-reboot-end Don't reboot to normal at the end | ||
13 | --git-branch Use another puppet branch (default: master) | ||
14 | --environment Environment to use for the installl (default: production) | ||
15 | EOF | ||
16 | } | ||
17 | |||
18 | set -e | ||
19 | |||
20 | host_user=arch | ||
21 | git_branch=master | ||
22 | environment=production | ||
23 | |||
24 | while [ -n "$1" ]; do | ||
25 | case "$1" in | ||
26 | --instance-id) | ||
27 | instance_id="$2" | ||
28 | shift | ||
29 | ;; | ||
30 | --reinstall-first) | ||
31 | reinstall_first=1 | ||
32 | ;; | ||
33 | --host-user) | ||
34 | host_user="$2" | ||
35 | shift | ||
36 | ;; | ||
37 | --no-reboot) | ||
38 | no_reboot=1 | ||
39 | ;; | ||
40 | --no-reboot-start) | ||
41 | no_reboot_start=1 | ||
42 | ;; | ||
43 | --no-reboot-end) | ||
44 | no_reboot_end=1 | ||
45 | ;; | ||
46 | --git-branch) | ||
47 | git_branch="$2" | ||
48 | shift | ||
49 | ;; | ||
50 | --environment) | ||
51 | environment="$2" | ||
52 | shift | ||
53 | ;; | ||
54 | --help|-h) | ||
55 | usage | ||
56 | exit 0 | ||
57 | ;; | ||
58 | esac | ||
59 | |||
60 | shift | ||
61 | done | ||
62 | |||
63 | DIRECTORY=$(cd `dirname $0` && pwd) | ||
64 | PYTHON_DIRECTORY="$DIRECTORY/../python" | ||
65 | |||
66 | if [ -z "$instance_id" ]; then | ||
67 | read -p "Id de l'instance : " instance_id | ||
68 | fi | ||
69 | |||
70 | if [ -n "$reinstall_first" ]; then | ||
71 | echo "Réinstallation du système" | ||
72 | python $PYTHON_DIRECTORY/reinstall_cloud_instance.py --use-current "$instance_id" | ||
73 | |||
74 | read -p "Appuyer sur une touche quand le serveur est prêt" ready | ||
75 | fi | ||
76 | |||
77 | if [ -z "$no_reboot" -a -z "$no_reboot_start" ]; then | ||
78 | echo "Patienter le temps du reboot" | ||
79 | python $PYTHON_DIRECTORY/reboot_cloud_instance.py --rescue "$instance_id" | ||
80 | |||
81 | read -p "Appuyer sur une touche quand l'instance a redémarré" ready | ||
82 | fi | ||
83 | |||
84 | ARCH_DIR=`mktemp -d` | ||
85 | ARCH_HOST_SCRIPT="$ARCH_DIR/arch_host_script.sh" | ||
86 | ARCH_INSTALL_SCRIPT="$ARCH_DIR/arch_install_script.sh" | ||
87 | ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT="$ARCH_DIR/arch_host_puppet_configuration_script.sh" | ||
88 | ARCH_PUPPET_CONFIGURATION_SCRIPT="$ARCH_DIR/arch_puppet_configuration_script.sh" | ||
89 | ARCH_PUPPET_INITIAL_CONFIGURATION="$ARCH_DIR/puppet_variables.json" | ||
90 | |||
91 | trap "rm -rf $ARCH_DIR" EXIT | ||
92 | |||
93 | #### Base installation stage | ||
94 | python $PYTHON_DIRECTORY/get_initial_configuration_cloud_instance.py $instance_id > $ARCH_PUPPET_INITIAL_CONFIGURATION | ||
95 | host_address=$(python $PYTHON_DIRECTORY/get_initial_configuration_cloud_instance.py $instance_id | jq -r '.ips.v4.ipAddress') | ||
96 | |||
97 | cat > $ARCH_HOST_SCRIPT <<EOF | ||
98 | #!/bin/bash | ||
99 | |||
100 | sudo haveged & | ||
101 | sudo pacman -Sy --noconfirm arch-install-scripts | ||
102 | |||
103 | DEVICE=/dev/vdb1 | ||
104 | MOUNTPOINT=/mnt | ||
105 | |||
106 | UUID=\$(lsblk -rno UUID "\$DEVICE") | ||
107 | PART="/dev/disk/by-uuid/\$UUID" | ||
108 | |||
109 | # mkfs.ext4 -F -U "\$UUID" "\$DEVICE" | ||
110 | sudo mount "\$DEVICE" /mnt | ||
111 | |||
112 | ##### FIXME: mkfs.ext4 would be better #### | ||
113 | for i in /mnt/*; do | ||
114 | if [ "\$i" = "/mnt/boot" ]; then | ||
115 | # keep /boot/grub | ||
116 | sudo rm -f \$i/* | ||
117 | else | ||
118 | sudo rm -rf \$i | ||
119 | fi | ||
120 | done | ||
121 | ##### /FIXME #### | ||
122 | |||
123 | sudo pacstrap /mnt base git puppet | ||
124 | |||
125 | echo "\$PART / auto defaults 0 1" | sudo tee /mnt/etc/fstab | ||
126 | |||
127 | sudo cp /tmp/arch_install_script.sh "\$MOUNTPOINT/root/" | ||
128 | sudo cp /tmp/puppet_variables.json "\$MOUNTPOINT/root/" | ||
129 | |||
130 | sudo arch-chroot "\$MOUNTPOINT" /root/arch_install_script.sh | ||
131 | EOF | ||
132 | |||
133 | cat > $ARCH_INSTALL_SCRIPT <<EOF | ||
134 | CODE_PATH="/etc/puppetlabs/code" | ||
135 | rm -rf \$CODE_PATH | ||
136 | git clone -b $git_branch --recursive https://git.immae.eu/perso/Immae/Projets/Puppet.git \$CODE_PATH | ||
137 | puppet apply --environment $environment --tags base_installation --test \$CODE_PATH/manifests/site.pp | ||
138 | # The password seed requires puppet to be run twice | ||
139 | puppet apply --environment $environment --tags base_installation --test \$CODE_PATH/manifests/site.pp | ||
140 | EOF | ||
141 | |||
142 | chmod a+x $ARCH_HOST_SCRIPT $ARCH_INSTALL_SCRIPT | ||
143 | |||
144 | expect -f - <<EOF | ||
145 | set timeout -1 | ||
146 | spawn scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $ARCH_PUPPET_INITIAL_CONFIGURATION $ARCH_HOST_SCRIPT $ARCH_INSTALL_SCRIPT $host_user@$host_address:/tmp | ||
147 | expect eof | ||
148 | spawn ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $host_user@$host_address /tmp/arch_host_script.sh | ||
149 | expect eof | ||
150 | EOF | ||
151 | |||
152 | ### Role specific stage | ||
153 | read -p "Press key when LDAP is configured" i | ||
154 | |||
155 | cat > $ARCH_PUPPET_CONFIGURATION_SCRIPT <<EOF | ||
156 | CODE_PATH="/etc/puppetlabs/code" | ||
157 | puppet apply --environment $environment --tags base_installation --test \$CODE_PATH/manifests/site.pp | ||
158 | EOF | ||
159 | |||
160 | cat > $ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT <<EOF | ||
161 | MOUNTPOINT=/mnt | ||
162 | |||
163 | sudo cp /tmp/arch_puppet_configuration_script.sh "\$MOUNTPOINT/root/" | ||
164 | |||
165 | sudo arch-chroot "\$MOUNTPOINT" /root/arch_puppet_configuration_script.sh | ||
166 | EOF | ||
167 | |||
168 | chmod a+x $ARCH_PUPPET_CONFIGURATION_SCRIPT $ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT | ||
169 | |||
170 | expect -f - <<EOF | ||
171 | set timeout -1 | ||
172 | spawn scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $ARCH_PUPPET_CONFIGURATION_SCRIPT $ARCH_HOST_PUPPET_CONFIGURATION_SCRIPT $host_user@$host_address:/tmp | ||
173 | expect eof | ||
174 | spawn ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no $host_user@$host_address /tmp/arch_host_puppet_configuration_script.sh | ||
175 | expect eof | ||
176 | EOF | ||
177 | |||
178 | ### Installation finished | ||
179 | if [ -z "$no_reboot" -a -z "$no_reboot_end" ]; then | ||
180 | echo "Rebooting" | ||
181 | python $PYTHON_DIRECTORY/reboot_cloud_instance.py --local "$instance_id" | ||
182 | fi | ||
diff --git a/bin/install_script.sh b/bin/install_script_ovh_vps_ssd.sh index 6b1aa39..6b1aa39 100755 --- a/bin/install_script.sh +++ b/bin/install_script_ovh_vps_ssd.sh | |||
diff --git a/python/get_initial_configuration_cloud_instance.py b/python/get_initial_configuration_cloud_instance.py new file mode 100644 index 0000000..4157716 --- /dev/null +++ b/python/get_initial_configuration_cloud_instance.py | |||
@@ -0,0 +1,41 @@ | |||
1 | # -*- encoding: utf-8 -*- | ||
2 | import json | ||
3 | try: | ||
4 | from ovh import ovh | ||
5 | except ImportError: | ||
6 | # In case it's installed globally | ||
7 | import ovh | ||
8 | import sys | ||
9 | |||
10 | infos = {} | ||
11 | |||
12 | # Credentials are stored in ~/.ovh.conf | ||
13 | # See ovh/README.rst | ||
14 | client = ovh.Client() | ||
15 | |||
16 | projects_list = client.get('/cloud/project/') | ||
17 | if len(projects_list) > 1: | ||
18 | print("More than one project is not supported, taking the first one") | ||
19 | project = projects_list[0] | ||
20 | instances_list = client.get('/cloud/project/{}/instance'.format(project)) | ||
21 | instances = dict(map(lambda x: (x["id"], x), instances_list)) | ||
22 | if sys.argv[-1] in instances: | ||
23 | instance = instances[sys.argv[-1]] | ||
24 | else: | ||
25 | print("Instance not in list:") | ||
26 | for instance in instances_list: | ||
27 | print("{}: {}".format(instance["name"], instance["id"])) | ||
28 | sys.exit(1) | ||
29 | |||
30 | infos["ips"] = {} | ||
31 | for ip_infos in instance["ipAddresses"]: | ||
32 | ip_infos["ipAddress"] = ip_infos.pop("ip") | ||
33 | ip_infos["gateway"] = ip_infos.pop("gatewayIp") | ||
34 | |||
35 | if ip_infos["version"] == 4: | ||
36 | infos["ips"]["v4"] = ip_infos | ||
37 | else: | ||
38 | infos["ips"]["v6"] = ip_infos | ||
39 | infos["ips"]["v6"]["mask"] = 128 | ||
40 | |||
41 | print(json.dumps(infos)) | ||
diff --git a/python/reboot_cloud_instance.py b/python/reboot_cloud_instance.py new file mode 100644 index 0000000..b90f488 --- /dev/null +++ b/python/reboot_cloud_instance.py | |||
@@ -0,0 +1,39 @@ | |||
1 | # -*- encoding: utf-8 -*- | ||
2 | import json | ||
3 | try: | ||
4 | from ovh import ovh | ||
5 | except ImportError: | ||
6 | # In case it's installed globally | ||
7 | import ovh | ||
8 | import sys | ||
9 | |||
10 | # Credentials are stored in ~/.ovh.conf | ||
11 | # See ovh/README.rst | ||
12 | client = ovh.Client() | ||
13 | |||
14 | projects_list = client.get('/cloud/project/') | ||
15 | if len(projects_list) > 1: | ||
16 | print("More than one project is not supported, taking the first one") | ||
17 | project = projects_list[0] | ||
18 | instances_list = client.get('/cloud/project/{}/instance'.format(project)) | ||
19 | instances = dict(map(lambda x: (x["id"], x), instances_list)) | ||
20 | if sys.argv[-1] in instances: | ||
21 | instance = instances[sys.argv[-1]] | ||
22 | else: | ||
23 | print("Instance not in list:") | ||
24 | for instance in instances_list: | ||
25 | print("{}: {}".format(instance["name"], instance["id"])) | ||
26 | sys.exit(1) | ||
27 | |||
28 | if "--rescue" in sys.argv: | ||
29 | netboot_mode="rescue" | ||
30 | elif "--local" in sys.argv: | ||
31 | netboot_mode="local" | ||
32 | else: | ||
33 | netboot_mode=None | ||
34 | |||
35 | if netboot_mode is not None: | ||
36 | result = client.post("/cloud/project/{}/instance/{}/rescueMode".format(project, | ||
37 | instance["id"]), imageId=instance["imageId"], rescue=(netboot_mode == "rescue")) | ||
38 | print(result) | ||
39 | |||
diff --git a/python/reinstall_cloud_instance.py b/python/reinstall_cloud_instance.py new file mode 100644 index 0000000..c488fda --- /dev/null +++ b/python/reinstall_cloud_instance.py | |||
@@ -0,0 +1,57 @@ | |||
1 | # -*- encoding: utf-8 -*- | ||
2 | import json | ||
3 | try: | ||
4 | from ovh import ovh | ||
5 | except ImportError: | ||
6 | # In case it's installed globally | ||
7 | import ovh | ||
8 | import sys | ||
9 | |||
10 | # Credentials are stored in ~/.ovh.conf | ||
11 | # See ovh/README.rst | ||
12 | client = ovh.Client() | ||
13 | |||
14 | projects_list = client.get('/cloud/project/') | ||
15 | if len(projects_list) > 1: | ||
16 | print("More than one project is not supported, taking the first one") | ||
17 | project = projects_list[0] | ||
18 | instances_list = client.get('/cloud/project/{}/instance'.format(project)) | ||
19 | instances = dict(map(lambda x: (x["id"], x), instances_list)) | ||
20 | if sys.argv[-1] in instances: | ||
21 | instance = instances[sys.argv[-1]] | ||
22 | else: | ||
23 | print("Instance not in list:") | ||
24 | for instance in instances_list: | ||
25 | print("{}: {}".format(instance["name"], instance["id"])) | ||
26 | sys.exit(1) | ||
27 | |||
28 | current_image = instance["imageId"] | ||
29 | available_images = client.get('/cloud/project/{}/image'.format(project), | ||
30 | osType="linux", | ||
31 | region=instance["region"]) | ||
32 | available_images_ids = list(map(lambda x: x["id"], available_images)) | ||
33 | |||
34 | def print_images(available_images): | ||
35 | for image in available_images: | ||
36 | print("{}: {}".format(image["name"], image["id"])) | ||
37 | |||
38 | def reinstall(image_id): | ||
39 | return client.post('/cloud/project/{}/instance/{}/reinstall'.format(project, instance["id"]), | ||
40 | imageId=image_id) | ||
41 | |||
42 | if "--get-state" in sys.argv: | ||
43 | print(instance["status"]) | ||
44 | elif "--use-current" in sys.argv: | ||
45 | if current_image in available_images_ids: | ||
46 | print("Current image still available, using it") | ||
47 | print(reinstall(current_image)) | ||
48 | else: | ||
49 | print("Current image no more available. Chose among:") | ||
50 | print_images(available_images) | ||
51 | elif sys.argv[-1] in available_templates: | ||
52 | print("Chosen image available, using it") | ||
53 | print(reinstall(current_image)) | ||
54 | else: | ||
55 | print("Chosen image not available. Chose among:") | ||
56 | print_images(available_images) | ||
57 | |||