diff options
Diffstat (limited to 'modules/private/buildbot/projects')
6 files changed, 0 insertions, 1227 deletions
diff --git a/modules/private/buildbot/projects/caldance/__init__.py b/modules/private/buildbot/projects/caldance/__init__.py deleted file mode 100644 index 2074d9e..0000000 --- a/modules/private/buildbot/projects/caldance/__init__.py +++ /dev/null | |||
@@ -1,198 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import os | ||
4 | from buildbot.util import bytes2unicode | ||
5 | import json | ||
6 | |||
7 | __all__ = [ "configure", "E" ] | ||
8 | |||
9 | class E(): | ||
10 | PROJECT = "caldance" | ||
11 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
12 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
13 | PB_SOCKET = "unix:address=/run/buildbot/{}_pb.sock".format(PROJECT) | ||
14 | RELEASE_PATH = "/var/lib/ftp/release.immae.eu/{}".format(PROJECT) | ||
15 | RELEASE_URL = "https://release.immae.eu/{}".format(PROJECT) | ||
16 | GIT_URL = "gitolite@git.immae.eu:perso/simon_descarpentries/www.cal-dance.com" | ||
17 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
18 | SSH_HOST_KEY = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIFbhFTl2A2RJn5L51yxJM4XfCS2ZaiSX/jo9jFSdghF" | ||
19 | LDAP_HOST = "ldap.immae.eu" | ||
20 | LDAP_DN = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
21 | LDAP_ROLES_BASE = "ou=roles,ou=hosts,dc=immae,dc=eu" | ||
22 | XMPP_RECIPIENTS = os.environ["BUILDBOT_XMPP_RECIPIENTS"].split(" ") | ||
23 | |||
24 | PUPPET_HOST = { | ||
25 | "integration": [ "-p8022", "root@caldance.cs.immae.dev"], | ||
26 | } | ||
27 | |||
28 | # master.cfg | ||
29 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
30 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
31 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
32 | LDAP_BASE = "dc=immae,dc=eu" | ||
33 | LDAP_PATTERN = "(uid=%(username)s)" | ||
34 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=caldance,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
35 | TITLE_URL = "https://caldance.cs.immae.dev" | ||
36 | TITLE = "Caldance" | ||
37 | |||
38 | class CustomBase(webhooks.base): | ||
39 | def getChanges(self, request): | ||
40 | try: | ||
41 | content = request.content.read() | ||
42 | args = json.loads(bytes2unicode(content)) | ||
43 | except Exception as e: | ||
44 | raise ValueError("Error loading JSON: " + str(e)) | ||
45 | |||
46 | args.setdefault("comments", "") | ||
47 | args.setdefault("repository", "") | ||
48 | args.setdefault("author", args.get("who", "unknown")) | ||
49 | |||
50 | if args["category"] == "deploy_webhook": | ||
51 | args = { | ||
52 | "category": "deploy_webhook", | ||
53 | "comments": "", | ||
54 | "repository": "", | ||
55 | "author": "webhook", | ||
56 | "project": "Caldance", | ||
57 | "properties": { | ||
58 | "environment": args.get("environment", "integration"), | ||
59 | "build": "caldance_{}.tar.gz".format(args.get("build", "master")) | ||
60 | } | ||
61 | } | ||
62 | |||
63 | return ([args], None) | ||
64 | |||
65 | def deploy_hook_scheduler(project, timer=1): | ||
66 | return schedulers.AnyBranchScheduler( | ||
67 | change_filter=util.ChangeFilter(category="deploy_webhook", project=project), | ||
68 | name="{}_deploy".format(project), treeStableTimer=timer, builderNames=["{}_deploy".format(project)]) | ||
69 | |||
70 | def configure(c): | ||
71 | c["buildbotURL"] = E.BUILDBOT_URL | ||
72 | c["www"]["port"] = E.SOCKET | ||
73 | |||
74 | c["www"]["change_hook_dialects"]["base"] = { | ||
75 | "custom_class": CustomBase | ||
76 | } | ||
77 | c['workers'].append(worker.LocalWorker("generic-worker")) | ||
78 | c['workers'].append(worker.LocalWorker("deploy-worker")) | ||
79 | |||
80 | db_lock = util.MasterLock("deploy_after_build") | ||
81 | |||
82 | c['schedulers'].append(hook_scheduler("Caldance", timer=1)) | ||
83 | c['schedulers'].append(force_scheduler("force_caldance", ["Caldance_build"])) | ||
84 | c['schedulers'].append(deploy_scheduler("deploy_caldance", ["Caldance_deploy"])) | ||
85 | c['schedulers'].append(deploy_hook_scheduler("Caldance", timer=1)) | ||
86 | |||
87 | c['builders'].append(factory("caldance", locks=[db_lock.access('exclusive')])) | ||
88 | |||
89 | c['builders'].append(deploy_factory("caldance", locks=[db_lock.access('exclusive')])) | ||
90 | |||
91 | c['services'].append(SlackStatusPush( | ||
92 | name="slack_status_caldance", | ||
93 | builders=["Caldance_build", "Caldance_deploy"], | ||
94 | serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip())) | ||
95 | c['services'].append(XMPPStatusPush( | ||
96 | name="xmpp_status_caldance", | ||
97 | builders=["Caldance_build", "Caldance_deploy"], | ||
98 | recipients=E.XMPP_RECIPIENTS, | ||
99 | password=open(E.SECRETS_FILE + "/notify_xmpp_password", "r").read().rstrip())) | ||
100 | |||
101 | def factory(project, locks=[], ignore_fails=False): | ||
102 | release_file = "{1}/{0}_%(kw:clean_branch)s.tar.gz" | ||
103 | |||
104 | package = util.Interpolate("{0}_%(kw:clean_branch)s.tar.gz".format(project), clean_branch=clean_branch) | ||
105 | package_dest = util.Interpolate(release_file.format(project, E.RELEASE_PATH), clean_branch=clean_branch) | ||
106 | package_url = util.Interpolate(release_file.format(project, E.RELEASE_URL), clean_branch=clean_branch) | ||
107 | |||
108 | factory = util.BuildFactory() | ||
109 | factory.addStep(steps.Git(logEnviron=False, repourl=E.GIT_URL, | ||
110 | sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
111 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
112 | factory.addSteps(package_and_upload(package, package_dest, package_url)) | ||
113 | |||
114 | return util.BuilderConfig( | ||
115 | name="{}_build".format(project.capitalize()), | ||
116 | locks=locks, | ||
117 | workernames=["generic-worker"], factory=factory) | ||
118 | |||
119 | def compute_build_infos(project): | ||
120 | @util.renderer | ||
121 | def compute(props): | ||
122 | import re, hashlib | ||
123 | build_file = props.getProperty("build") | ||
124 | package_dest = "{1}/{0}".format(build_file, E.RELEASE_PATH) | ||
125 | version = re.match(r"{0}_(.*).tar.gz".format(project), build_file).group(1) | ||
126 | with open(package_dest, "rb") as f: | ||
127 | sha = hashlib.sha256(f.read()).hexdigest() | ||
128 | return { | ||
129 | "build_version": version, | ||
130 | "build_hash": sha, | ||
131 | } | ||
132 | return compute | ||
133 | |||
134 | @util.renderer | ||
135 | def puppet_ssh_command(props): | ||
136 | environment = props["environment"] if props.hasProperty("environment") else "integration" | ||
137 | ssh_command = [ | ||
138 | "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no", | ||
139 | "-i", E.SSH_KEY_PATH ] | ||
140 | return ssh_command + E.PUPPET_HOST.get(environment, ["host.invalid"]) | ||
141 | |||
142 | def deploy_factory(project, locks=[]): | ||
143 | package_dest = util.Interpolate("{0}/%(prop:build)s".format(E.RELEASE_PATH)) | ||
144 | |||
145 | factory = util.BuildFactory() | ||
146 | factory.addStep(steps.MasterShellCommand(command=["test", "-f", package_dest])) | ||
147 | factory.addStep(steps.SetProperties(properties=compute_build_infos(project))) | ||
148 | factory.addStep(LdapPush(environment=util.Property("environment"), | ||
149 | project=project, build_version=util.Property("build_version"), | ||
150 | build_hash=util.Property("build_hash"), ldap_password=util.Secret("ldap"))) | ||
151 | factory.addStep(steps.MasterShellCommand(command=puppet_ssh_command)) | ||
152 | return util.BuilderConfig( | ||
153 | name="{}_deploy".format(project.capitalize()), | ||
154 | locks=locks, | ||
155 | workernames=["deploy-worker"], factory=factory) | ||
156 | |||
157 | from twisted.internet import defer | ||
158 | from buildbot.process.buildstep import FAILURE | ||
159 | from buildbot.process.buildstep import SUCCESS | ||
160 | from buildbot.process.buildstep import BuildStep | ||
161 | |||
162 | class LdapPush(BuildStep): | ||
163 | name = "LdapPush" | ||
164 | renderables = ["environment", "project", "build_version", "build_hash", "ldap_password"] | ||
165 | |||
166 | def __init__(self, **kwargs): | ||
167 | self.environment = kwargs.pop("environment") | ||
168 | self.project = kwargs.pop("project") | ||
169 | self.build_version = kwargs.pop("build_version") | ||
170 | self.build_hash = kwargs.pop("build_hash") | ||
171 | self.ldap_password = kwargs.pop("ldap_password") | ||
172 | self.ldap_host = kwargs.pop("ldap_host", E.LDAP_HOST) | ||
173 | super().__init__(**kwargs) | ||
174 | |||
175 | def run(self): | ||
176 | import json | ||
177 | from ldap3 import Reader, Writer, Server, Connection, ObjectDef | ||
178 | server = Server(self.ldap_host) | ||
179 | conn = Connection(server, | ||
180 | user=E.LDAP_DN, | ||
181 | password=self.ldap_password) | ||
182 | conn.bind() | ||
183 | obj = ObjectDef("immaePuppetClass", conn) | ||
184 | r = Reader(conn, obj, | ||
185 | "cn=caldance.{},{}".format(self.environment, E.LDAP_ROLES_BASE)) | ||
186 | r.search() | ||
187 | if len(r) > 0: | ||
188 | w = Writer.from_cursor(r) | ||
189 | for value in w[0].immaePuppetJson.values: | ||
190 | config = json.loads(value) | ||
191 | if "role::caldance::{}_version".format(self.project) in config: | ||
192 | config["role::caldance::{}_version".format(self.project)] = self.build_version | ||
193 | config["role::caldance::{}_sha256".format(self.project)] = self.build_hash | ||
194 | w[0].immaePuppetJson -= value | ||
195 | w[0].immaePuppetJson += json.dumps(config, indent=" ") | ||
196 | w.commit() | ||
197 | return defer.succeed(SUCCESS) | ||
198 | return defer.succeed(FAILURE) | ||
diff --git a/modules/private/buildbot/projects/cryptoportfolio/__init__.py b/modules/private/buildbot/projects/cryptoportfolio/__init__.py deleted file mode 100644 index 5d70f95..0000000 --- a/modules/private/buildbot/projects/cryptoportfolio/__init__.py +++ /dev/null | |||
@@ -1,169 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import os | ||
4 | |||
5 | __all__ = [ "configure", "E" ] | ||
6 | |||
7 | class E(): | ||
8 | PROJECT = "cryptoportfolio" | ||
9 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
10 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
11 | PB_SOCKET = "unix:address=/run/buildbot/{}_pb.sock".format(PROJECT) | ||
12 | RELEASE_PATH = "/var/lib/ftp/release.immae.eu/{}".format(PROJECT) | ||
13 | RELEASE_URL = "https://release.immae.eu/{}".format(PROJECT) | ||
14 | GIT_URL = "https://git.immae.eu/perso/Immae/Projets/Cryptomonnaies/Cryptoportfolio/{0}.git" | ||
15 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
16 | LDAP_HOST = "ldap.immae.eu" | ||
17 | LDAP_DN = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
18 | LDAP_ROLES_BASE = "ou=roles,ou=hosts,dc=immae,dc=eu" | ||
19 | |||
20 | PUPPET_HOST = { | ||
21 | "production": "root@cryptoportfolio.immae.eu", | ||
22 | "integration": "root@cryptoportfolio-dev.immae.eu" | ||
23 | } | ||
24 | |||
25 | # master.cfg | ||
26 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
27 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
28 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
29 | LDAP_BASE = "dc=immae,dc=eu" | ||
30 | LDAP_PATTERN = "(uid=%(username)s)" | ||
31 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=cryptoportfolio,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
32 | TITLE_URL = "https://git.immae.eu" | ||
33 | TITLE = "Cryptoportfolio" | ||
34 | |||
35 | # eval .. dans .zshrc_local | ||
36 | # mkdir -p $BUILD/go | ||
37 | # export GOPATH=$BUILD/go | ||
38 | # go get -u github.com/golang/dep/cmd/dep | ||
39 | # export PATH=$PATH:$BUILD/go/bin | ||
40 | # go get git.immae.eu/Cryptoportfolio/Front.git | ||
41 | # cd $BUILD/go/src/git.immae.eu/Cryptoportfolio/Front.git | ||
42 | # git checkout dev | ||
43 | # dep ensure | ||
44 | def configure(c): | ||
45 | c["buildbotURL"] = E.BUILDBOT_URL | ||
46 | c["www"]["port"] = E.SOCKET | ||
47 | |||
48 | c['workers'].append(worker.LocalWorker("generic-worker")) | ||
49 | c['workers'].append(worker.LocalWorker("deploy-worker")) | ||
50 | |||
51 | c['schedulers'].append(hook_scheduler("Trader")) | ||
52 | c['schedulers'].append(hook_scheduler("Front")) | ||
53 | c['schedulers'].append(force_scheduler( | ||
54 | "force_cryptoportfolio", ["Trader_build", "Front_build"])) | ||
55 | c['schedulers'].append(deploy_scheduler("deploy_cryptoportfolio", | ||
56 | ["Trader_deploy", "Front_deploy"])) | ||
57 | |||
58 | c['builders'].append(factory("trader")) | ||
59 | c['builders'].append(factory("front", ignore_fails=True)) | ||
60 | |||
61 | c['builders'].append(deploy_factory("trader")) | ||
62 | c['builders'].append(deploy_factory("front")) | ||
63 | |||
64 | c['services'].append(SlackStatusPush( | ||
65 | name="slack_status_cryptoportfolio", | ||
66 | builders=["Front_build", "Trader_build", "Front_deploy", "Trader_deploy"], | ||
67 | serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip())) | ||
68 | |||
69 | def factory(project, ignore_fails=False): | ||
70 | release_file = "{1}/{0}/{0}_%(kw:clean_branch)s.tar.gz" | ||
71 | |||
72 | url = E.GIT_URL.format(project.capitalize()) | ||
73 | |||
74 | package = util.Interpolate("{0}_%(kw:clean_branch)s.tar.gz".format(project), clean_branch=clean_branch) | ||
75 | package_dest = util.Interpolate(release_file.format(project, E.RELEASE_PATH), clean_branch=clean_branch) | ||
76 | package_url = util.Interpolate(release_file.format(project, E.RELEASE_URL), clean_branch=clean_branch) | ||
77 | |||
78 | factory = util.BuildFactory() | ||
79 | factory.addStep(steps.Git(logEnviron=False, repourl=url, | ||
80 | mode="full", method="copy")) | ||
81 | factory.addStep(steps.ShellCommand(name="make install", | ||
82 | logEnviron=False, haltOnFailure=(not ignore_fails), | ||
83 | warnOnFailure=ignore_fails, flunkOnFailure=(not ignore_fails), | ||
84 | command=["make", "install"])) | ||
85 | factory.addStep(steps.ShellCommand(name="make test", | ||
86 | logEnviron=False, haltOnFailure=(not ignore_fails), | ||
87 | warnOnFailure=ignore_fails, flunkOnFailure=(not ignore_fails), | ||
88 | command=["make", "test"])) | ||
89 | factory.addSteps(package_and_upload(package, package_dest, package_url)) | ||
90 | |||
91 | return util.BuilderConfig( | ||
92 | name="{}_build".format(project.capitalize()), | ||
93 | workernames=["generic-worker"], factory=factory) | ||
94 | |||
95 | def compute_build_infos(project): | ||
96 | @util.renderer | ||
97 | def compute(props): | ||
98 | import re, hashlib | ||
99 | build_file = props.getProperty("build") | ||
100 | package_dest = "{2}/{0}/{1}".format(project, build_file, E.RELEASE_PATH) | ||
101 | version = re.match(r"{0}_(.*).tar.gz".format(project), build_file).group(1) | ||
102 | with open(package_dest, "rb") as f: | ||
103 | sha = hashlib.sha256(f.read()).hexdigest() | ||
104 | return { | ||
105 | "build_version": version, | ||
106 | "build_hash": sha, | ||
107 | } | ||
108 | return compute | ||
109 | |||
110 | @util.renderer | ||
111 | def puppet_host(props): | ||
112 | environment = props["environment"] if props.hasProperty("environment") else "integration" | ||
113 | return E.PUPPET_HOST.get(environment, "host.invalid") | ||
114 | |||
115 | def deploy_factory(project): | ||
116 | package_dest = util.Interpolate("{1}/{0}/%(prop:build)s".format(project, E.RELEASE_PATH)) | ||
117 | |||
118 | factory = util.BuildFactory() | ||
119 | factory.addStep(steps.MasterShellCommand(command=["test", "-f", package_dest])) | ||
120 | factory.addStep(steps.SetProperties(properties=compute_build_infos(project))) | ||
121 | factory.addStep(LdapPush(environment=util.Property("environment"), | ||
122 | project=project, build_version=util.Property("build_version"), | ||
123 | build_hash=util.Property("build_hash"), ldap_password=util.Secret("ldap"))) | ||
124 | factory.addStep(steps.MasterShellCommand(command=[ | ||
125 | "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no", "-i", E.SSH_KEY_PATH, puppet_host])) | ||
126 | return util.BuilderConfig(name="{}_deploy".format(project.capitalize()), workernames=["deploy-worker"], factory=factory) | ||
127 | |||
128 | from twisted.internet import defer | ||
129 | from buildbot.process.buildstep import FAILURE | ||
130 | from buildbot.process.buildstep import SUCCESS | ||
131 | from buildbot.process.buildstep import BuildStep | ||
132 | |||
133 | class LdapPush(BuildStep): | ||
134 | name = "LdapPush" | ||
135 | renderables = ["environment", "project", "build_version", "build_hash", "ldap_password"] | ||
136 | |||
137 | def __init__(self, **kwargs): | ||
138 | self.environment = kwargs.pop("environment") | ||
139 | self.project = kwargs.pop("project") | ||
140 | self.build_version = kwargs.pop("build_version") | ||
141 | self.build_hash = kwargs.pop("build_hash") | ||
142 | self.ldap_password = kwargs.pop("ldap_password") | ||
143 | self.ldap_host = kwargs.pop("ldap_host", E.LDAP_HOST) | ||
144 | super().__init__(**kwargs) | ||
145 | |||
146 | def run(self): | ||
147 | import json | ||
148 | from ldap3 import Reader, Writer, Server, Connection, ObjectDef | ||
149 | server = Server(self.ldap_host) | ||
150 | conn = Connection(server, | ||
151 | user=E.LDAP_DN, | ||
152 | password=self.ldap_password) | ||
153 | conn.bind() | ||
154 | obj = ObjectDef("immaePuppetClass", conn) | ||
155 | r = Reader(conn, obj, | ||
156 | "cn=cryptoportfolio.{},{}".format(self.environment, E.LDAP_ROLES_BASE)) | ||
157 | r.search() | ||
158 | if len(r) > 0: | ||
159 | w = Writer.from_cursor(r) | ||
160 | for value in w[0].immaePuppetJson.values: | ||
161 | config = json.loads(value) | ||
162 | if "role::cryptoportfolio::{}_version".format(self.project) in config: | ||
163 | config["role::cryptoportfolio::{}_version".format(self.project)] = self.build_version | ||
164 | config["role::cryptoportfolio::{}_sha256".format(self.project)] = self.build_hash | ||
165 | w[0].immaePuppetJson -= value | ||
166 | w[0].immaePuppetJson += json.dumps(config, indent=" ") | ||
167 | w.commit() | ||
168 | return defer.succeed(SUCCESS) | ||
169 | return defer.succeed(FAILURE) | ||
diff --git a/modules/private/buildbot/projects/denise/__init__.py b/modules/private/buildbot/projects/denise/__init__.py deleted file mode 100644 index abeba3c..0000000 --- a/modules/private/buildbot/projects/denise/__init__.py +++ /dev/null | |||
@@ -1,186 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import os | ||
4 | from buildbot.util import bytes2unicode | ||
5 | import json | ||
6 | |||
7 | __all__ = [ "configure", "E" ] | ||
8 | |||
9 | class E(): | ||
10 | PROJECT = "denise" | ||
11 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
12 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
13 | PB_SOCKET = "unix:address=/run/buildbot/{}_pb.sock".format(PROJECT) | ||
14 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
15 | SSH_HOST_KEY = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIFbhFTl2A2RJn5L51yxJM4XfCS2ZaiSX/jo9jFSdghF" | ||
16 | |||
17 | BINGO_RELEASE_PATH = "/var/lib/buildbot/outputs/denise/bingo" | ||
18 | BINGO_BETA_PATH = "/var/lib/buildbot/outputs/denise/bingo_beta" | ||
19 | BINGO_GIT_URL = "gitolite@git.immae.eu:perso/Denise/bingo" | ||
20 | |||
21 | OMS_RELEASE_PATH = "/var/lib/buildbot/outputs/denise/oms" | ||
22 | OMS_BETA_PATH = "/var/lib/buildbot/outputs/denise/oms_beta" | ||
23 | OMS_GIT_URL = "gitolite@git.immae.eu:perso/Denise/oms" | ||
24 | |||
25 | AVENTURIERS_RELEASE_PATH = "/var/lib/buildbot/outputs/denise/aventuriers" | ||
26 | AVENTURIERS_GIT_URL = "https://git.immae.eu/perso/Denise/aventuriers.git" | ||
27 | |||
28 | # master.cfg | ||
29 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
30 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
31 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
32 | LDAP_BASE = "dc=immae,dc=eu" | ||
33 | LDAP_PATTERN = "(uid=%(username)s)" | ||
34 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=denise,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
35 | TITLE_URL = "https://oms.syanni.eu" | ||
36 | TITLE = "Syanni website" | ||
37 | |||
38 | class CustomBase(webhooks.base): | ||
39 | def getChanges(self, request): | ||
40 | try: | ||
41 | content = request.content.read() | ||
42 | args = json.loads(bytes2unicode(content)) | ||
43 | except Exception as e: | ||
44 | raise ValueError("Error loading JSON: " + str(e)) | ||
45 | |||
46 | args.setdefault("comments", "") | ||
47 | args.setdefault("repository", "") | ||
48 | args.setdefault("author", args.get("who", "unknown")) | ||
49 | |||
50 | return ([args], None) | ||
51 | |||
52 | def configure(c): | ||
53 | c["buildbotURL"] = E.BUILDBOT_URL | ||
54 | c["www"]["port"] = E.SOCKET | ||
55 | |||
56 | c["www"]["change_hook_dialects"]["base"] = { | ||
57 | "custom_class": CustomBase | ||
58 | } | ||
59 | c['workers'].append(worker.LocalWorker("generic-worker-denise")) | ||
60 | |||
61 | c['schedulers'].append(hook_scheduler("DeniseBingo", timer=1)) | ||
62 | c['schedulers'].append(hook_scheduler("DeniseOMS", timer=1)) | ||
63 | c['schedulers'].append(hook_scheduler("DeniseAventuriers", timer=1)) | ||
64 | c['schedulers'].append(force_scheduler("force_denise", [ | ||
65 | "DeniseBingo_build", "DeniseOMS_build", "DeniseAventuriers_build" | ||
66 | ])) | ||
67 | |||
68 | c['builders'].append(denise_oms_factory()) | ||
69 | c['builders'].append(denise_bingo_factory()) | ||
70 | c['builders'].append(denise_aventuriers_factory()) | ||
71 | |||
72 | def denise_bingo_factory(): | ||
73 | @util.renderer | ||
74 | def bingo_run_path(props): | ||
75 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
76 | return "/run/denise_bingo/gunicorn.pid" | ||
77 | else: | ||
78 | return "/run/denise_bingo_beta/gunicorn.pid" | ||
79 | |||
80 | @util.renderer | ||
81 | def bingo_systemd_service(props): | ||
82 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
83 | return "denise-bingo" | ||
84 | else: | ||
85 | return "denise-bingo-beta" | ||
86 | |||
87 | @util.renderer | ||
88 | def bingo_url(props): | ||
89 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
90 | return "https://bingo.syanni.eu" | ||
91 | else: | ||
92 | return "https://beta.bingo.syanni.eu" | ||
93 | |||
94 | @util.renderer | ||
95 | def bingo_path(props): | ||
96 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
97 | return E.BINGO_RELEASE_PATH | ||
98 | else: | ||
99 | return E.BINGO_BETA_PATH | ||
100 | |||
101 | factory = util.BuildFactory() | ||
102 | factory.addStep(steps.Git(logEnviron=False, repourl=E.BINGO_GIT_URL, | ||
103 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
104 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
105 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("rm -rf %(kw:bingo_path)s", bingo_path=bingo_path))) | ||
106 | factory.addStep(steps.DirectoryUpload(workersrc="../source", | ||
107 | masterdest=bingo_path, | ||
108 | url=bingo_url)) | ||
109 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("chmod -R a+rX %(kw:bingo_path)s", bingo_path=bingo_path))) | ||
110 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("/run/wrappers/bin/sudo systemctl restart %(kw:bingo_service)s.service", bingo_service=bingo_systemd_service))) | ||
111 | |||
112 | return util.BuilderConfig(name="DeniseBingo_build", workernames=["generic-worker-denise"], factory=factory) | ||
113 | |||
114 | def denise_oms_factory(): | ||
115 | @util.renderer | ||
116 | def oms_run_path(props): | ||
117 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
118 | return "/run/denise_oms/gunicorn.pid" | ||
119 | else: | ||
120 | return "/run/denise_oms_beta/gunicorn.pid" | ||
121 | |||
122 | @util.renderer | ||
123 | def oms_systemd_service(props): | ||
124 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
125 | return "denise-oms" | ||
126 | else: | ||
127 | return "denise-oms-beta" | ||
128 | |||
129 | @util.renderer | ||
130 | def oms_url(props): | ||
131 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
132 | return "https://oms.syanni.eu" | ||
133 | else: | ||
134 | return "https://beta.oms.syanni.eu" | ||
135 | |||
136 | @util.renderer | ||
137 | def oms_path(props): | ||
138 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
139 | return E.OMS_RELEASE_PATH | ||
140 | else: | ||
141 | return E.OMS_BETA_PATH | ||
142 | |||
143 | factory = util.BuildFactory() | ||
144 | factory.addStep(steps.Git(logEnviron=False, repourl=E.OMS_GIT_URL, | ||
145 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
146 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
147 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("rm -rf %(kw:oms_path)s", oms_path=oms_path))) | ||
148 | factory.addStep(steps.DirectoryUpload(workersrc="../source", | ||
149 | masterdest=oms_path, | ||
150 | url=oms_url)) | ||
151 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("chmod -R a+rX %(kw:oms_path)s", oms_path=oms_path))) | ||
152 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("/run/wrappers/bin/sudo systemctl restart %(kw:oms_service)s.service", oms_service=oms_systemd_service))) | ||
153 | |||
154 | return util.BuilderConfig(name="DeniseOMS_build", workernames=["generic-worker-denise"], factory=factory) | ||
155 | |||
156 | def denise_aventuriers_factory(): | ||
157 | path_env = { | ||
158 | "PATH": os.environ["BUILDBOT_PATH_Aventuriers"] + ":${PATH}", | ||
159 | "TZ": "Europe/Paris", | ||
160 | } | ||
161 | |||
162 | factory = util.BuildFactory() | ||
163 | factory.addStep(steps.Git(logEnviron=False, repourl=E.AVENTURIERS_GIT_URL, | ||
164 | submodules=True, mode="full", method="fresh")) | ||
165 | factory.addStep(steps.ShellCommand(name="build files", | ||
166 | logEnviron=False, haltOnFailure=True, workdir="build", | ||
167 | env=path_env, command=["make", "tout", "encyclo"])) | ||
168 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.AVENTURIERS_RELEASE_PATH))) | ||
169 | factory.addStep(steps.DirectoryUpload(workersrc="../build/html", | ||
170 | masterdest=E.AVENTURIERS_RELEASE_PATH, | ||
171 | url="https://aventuriers.syanni.eu")) | ||
172 | factory.addStep(steps.FileUpload(name="upload epub file", workersrc="aventuriers.epub", | ||
173 | workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.epub", | ||
174 | url="https://aventuriers.syanni.eu/aventuriers.epub", mode=0o644)) | ||
175 | factory.addStep(steps.FileUpload(name="upload mobi file", workersrc="aventuriers.mobi", | ||
176 | workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.mobi", | ||
177 | url="https://aventuriers.syanni.eu/aventuriers.mobi", mode=0o644)) | ||
178 | factory.addStep(steps.FileUpload(name="upload pdf file", workersrc="aventuriers.pdf", | ||
179 | workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.pdf", | ||
180 | url="https://aventuriers.syanni.eu/aventuriers.pdf", mode=0o644)) | ||
181 | factory.addStep(steps.FileUpload(name="upload encyclo pdf file", workersrc="encyclo.pdf", | ||
182 | workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/encyclo.pdf", | ||
183 | url="https://aventuriers.syanni.eu/encyclo.pdf", mode=0o644)) | ||
184 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.AVENTURIERS_RELEASE_PATH))) | ||
185 | |||
186 | return util.BuilderConfig(name="DeniseAventuriers_build", workernames=["generic-worker-denise"], factory=factory) | ||
diff --git a/modules/private/buildbot/projects/immaeEu/__init__.py b/modules/private/buildbot/projects/immaeEu/__init__.py deleted file mode 100644 index 83265cd..0000000 --- a/modules/private/buildbot/projects/immaeEu/__init__.py +++ /dev/null | |||
@@ -1,314 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import os | ||
4 | from buildbot.util import bytes2unicode | ||
5 | import json | ||
6 | |||
7 | __all__ = [ "configure", "E" ] | ||
8 | |||
9 | class E(): | ||
10 | PROJECT = "immaeEu" | ||
11 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
12 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
13 | PB_SOCKET = "unix:address=/run/buildbot/{}_pb.sock".format(PROJECT) | ||
14 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
15 | SSH_HOST_KEY = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIFbhFTl2A2RJn5L51yxJM4XfCS2ZaiSX/jo9jFSdghF" | ||
16 | XMPP_RECIPIENTS = os.environ["BUILDBOT_XMPP_RECIPIENTS"].split(" ") | ||
17 | |||
18 | BIP39_GIT_URL = "https://git.immae.eu/perso/Immae/Projets/Cryptomonnaies/BIP39.git" | ||
19 | IMMAE_EU_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/Blog" | ||
20 | HISTORY_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/History" | ||
21 | RECETTES_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/Recettes" | ||
22 | COURS_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/Cours" | ||
23 | DOCS_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/Docs" | ||
24 | NORMALESUP_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Projets/Sites/Normalesup" | ||
25 | |||
26 | COURS_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/cours" | ||
27 | COURS_TARBALL_PATH = "/var/lib/ftp/release.immae.eu/cours" | ||
28 | COURS_TARBALL_URL = "https://release.immae.eu/cours" | ||
29 | BIP39_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/bip39" | ||
30 | HISTORY_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/history" | ||
31 | IMMAE_EU_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/blog" | ||
32 | DOCS_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/docs" | ||
33 | RECETTES_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/recettes" | ||
34 | NORMALESUP_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/recherche" | ||
35 | GSMCELLS_RELEASE_PATH = "/var/lib/ftp/release.immae.eu/gsm_cells" | ||
36 | GSMCELLS_RELEASE_URL = "https://release.immae.eu/gsm_cells" | ||
37 | |||
38 | # master.cfg | ||
39 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
40 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
41 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
42 | LDAP_BASE = "dc=immae,dc=eu" | ||
43 | LDAP_PATTERN = "(uid=%(username)s)" | ||
44 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=immaeEu,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
45 | TITLE_URL = "https://www.immae.eu" | ||
46 | TITLE = "Immae website" | ||
47 | |||
48 | class CustomBase(webhooks.base): | ||
49 | def getChanges(self, request): | ||
50 | try: | ||
51 | content = request.content.read() | ||
52 | args = json.loads(bytes2unicode(content)) | ||
53 | except Exception as e: | ||
54 | raise ValueError("Error loading JSON: " + str(e)) | ||
55 | |||
56 | args.setdefault("comments", "") | ||
57 | args.setdefault("repository", "") | ||
58 | args.setdefault("author", args.get("who", "unknown")) | ||
59 | |||
60 | return ([args], None) | ||
61 | |||
62 | def configure(c): | ||
63 | c["buildbotURL"] = E.BUILDBOT_URL | ||
64 | c["www"]["port"] = E.SOCKET | ||
65 | |||
66 | c["www"]["change_hook_dialects"]["base"] = { | ||
67 | "custom_class": CustomBase | ||
68 | } | ||
69 | c['workers'].append(worker.LocalWorker("generic-worker-immae-eu")) | ||
70 | |||
71 | c['schedulers'].append(hook_scheduler("ImmaeEu", timer=1)) | ||
72 | c['schedulers'].append(hook_scheduler("Normalesup", timer=1)) | ||
73 | c['schedulers'].append(hook_scheduler("Cours", timer=1)) | ||
74 | c['schedulers'].append(hook_scheduler("Recettes", timer=1)) | ||
75 | c['schedulers'].append(hook_scheduler("Docs", timer=1)) | ||
76 | c['schedulers'].append(hook_scheduler("History", timer=1)) | ||
77 | c['schedulers'].append(hook_scheduler("BIP39", timer=1)) | ||
78 | c['schedulers'].append(schedulers.Nightly(name="GSMCells-weekly", | ||
79 | builderNames=["GSMCells_build"], dayOfWeek=6, hour=3)) | ||
80 | c['schedulers'].append(force_scheduler("force_immae_eu", [ | ||
81 | "ImmaeEu_build", "Normalesup_build", "Cours_build", "Docs_build", | ||
82 | "Recettes_build", "History_build", "BIP39_build" | ||
83 | ])) | ||
84 | c['schedulers'].append(schedulers.ForceScheduler( | ||
85 | name="GSMCells-force", label="Force build", | ||
86 | buttonName="Force build", | ||
87 | reason=util.StringParameter(name="reason", label="Reason", default="Force build"), | ||
88 | codebases=[ | ||
89 | util.CodebaseParameter("", | ||
90 | branch=util.FixedParameter(name="branch", default=""), | ||
91 | revision=util.FixedParameter(name="revision", default=""), | ||
92 | repository=util.FixedParameter(name="repository", default=""), | ||
93 | project=util.FixedParameter(name="project", default=""), | ||
94 | ), | ||
95 | ], | ||
96 | username=util.FixedParameter(name="username", default="Web button"), | ||
97 | builderNames=["GSMCells_build"] | ||
98 | )) | ||
99 | |||
100 | c['builders'].append(immae_eu_factory()) | ||
101 | c['builders'].append(normalesup_factory()) | ||
102 | c['builders'].append(cours_factory()) | ||
103 | c['builders'].append(gsm_cells_factory()) | ||
104 | c['builders'].append(recettes_factory()) | ||
105 | c['builders'].append(docs_factory()) | ||
106 | c['builders'].append(history_factory()) | ||
107 | c['builders'].append(bip39_factory()) | ||
108 | |||
109 | c['services'].append(SlackStatusPush( | ||
110 | name="slack_status_immae_eu_project", | ||
111 | builders=[ | ||
112 | "ImmaeEu_build", "Normalesup_build", "Cours_build", "Docs_build", | ||
113 | "GSMCells_build", "Recettes_build", "History_build", | ||
114 | "BIP39_build" | ||
115 | ], | ||
116 | serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip())) | ||
117 | c['services'].append(XMPPStatusPush( | ||
118 | name="xmpp_status_immae_eu_project", | ||
119 | builders=[ | ||
120 | "ImmaeEu_build", "Normalesup_build", "Cours_build", "Docs_build", | ||
121 | "GSMCells_build", "Recettes_build", "History_build", | ||
122 | "BIP39_build" | ||
123 | ], | ||
124 | recipients=E.XMPP_RECIPIENTS, | ||
125 | password=open(E.SECRETS_FILE + "/notify_xmpp_password", "r").read().rstrip())) | ||
126 | |||
127 | def history_factory(): | ||
128 | path_env = { | ||
129 | "PATH": os.environ["BUILDBOT_PATH_History"] + ":${PATH}" | ||
130 | } | ||
131 | factory = util.BuildFactory() | ||
132 | factory.addStep(steps.Git(logEnviron=False, repourl=E.HISTORY_GIT_URL, | ||
133 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
134 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
135 | factory.addStep(steps.ShellCommand(name="build website", | ||
136 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
137 | env=path_env, command=["jekyll", "build"])) | ||
138 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.HISTORY_RELEASE_PATH))) | ||
139 | factory.addStep(steps.DirectoryUpload(workersrc="../source/_site", | ||
140 | masterdest=E.HISTORY_RELEASE_PATH, | ||
141 | url="https://www.immae.eu/history")) | ||
142 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.HISTORY_RELEASE_PATH))) | ||
143 | |||
144 | return util.BuilderConfig(name="History_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
145 | |||
146 | def docs_factory(): | ||
147 | path_env = { | ||
148 | "PATH": os.environ["BUILDBOT_PATH_Docs"] + ":${PATH}" | ||
149 | } | ||
150 | factory = util.BuildFactory() | ||
151 | factory.addStep(steps.Git(logEnviron=False, repourl=E.DOCS_GIT_URL, | ||
152 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
153 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
154 | factory.addStep(steps.ShellCommand(name="build website", | ||
155 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
156 | env=path_env, command=["make", "clean", "html"])) | ||
157 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.DOCS_RELEASE_PATH))) | ||
158 | factory.addStep(steps.DirectoryUpload(workersrc="../source/_build/html", | ||
159 | masterdest=E.DOCS_RELEASE_PATH, | ||
160 | url="https://www.immae.eu/docs")) | ||
161 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.DOCS_RELEASE_PATH))) | ||
162 | |||
163 | return util.BuilderConfig(name="Docs_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
164 | |||
165 | def recettes_factory(): | ||
166 | path_env = { | ||
167 | "PATH": os.environ["BUILDBOT_PATH_Recettes"] + ":${PATH}" | ||
168 | } | ||
169 | factory = util.BuildFactory() | ||
170 | factory.addStep(steps.Git(logEnviron=False, repourl=E.RECETTES_GIT_URL, | ||
171 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
172 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
173 | factory.addStep(NixShellCommand(name="build website", | ||
174 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
175 | env=path_env, command="jekyll build --trace --baseurl /recettes")) | ||
176 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.RECETTES_RELEASE_PATH))) | ||
177 | factory.addStep(steps.DirectoryUpload(workersrc="../source/_site", | ||
178 | masterdest=E.RECETTES_RELEASE_PATH, | ||
179 | url="https://www.immae.eu/recettes")) | ||
180 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.RECETTES_RELEASE_PATH))) | ||
181 | |||
182 | return util.BuilderConfig(name="Recettes_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
183 | |||
184 | def bip39_factory(): | ||
185 | path_env = { | ||
186 | "PATH": os.environ["BUILDBOT_PATH_BIP39"] + ":${PATH}" | ||
187 | } | ||
188 | factory = util.BuildFactory() | ||
189 | factory.addStep(steps.Git(logEnviron=False, repourl=E.BIP39_GIT_URL, | ||
190 | submodules=True, mode="full", method="copy")) | ||
191 | factory.addStep(steps.ShellCommand(name="build file", | ||
192 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
193 | env=path_env, command=["python", "compile.py"])) | ||
194 | factory.addStep(steps.FileUpload(name="upload file", workersrc="bip39-standalone.html", | ||
195 | workdir="source", masterdest=E.BIP39_RELEASE_PATH + "/index.html", | ||
196 | url="https://tools.immae.eu/BIP39", mode=0o644)) | ||
197 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.BIP39_RELEASE_PATH))) | ||
198 | |||
199 | return util.BuilderConfig(name="BIP39_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
200 | |||
201 | def immae_eu_factory(): | ||
202 | path_env = { | ||
203 | "PATH": os.environ["BUILDBOT_PATH_ImmaeEu"] + ":${PATH}" | ||
204 | } | ||
205 | factory = util.BuildFactory() | ||
206 | factory.addStep(steps.Git(logEnviron=False, repourl=E.IMMAE_EU_GIT_URL, | ||
207 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
208 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
209 | factory.addStep(steps.ShellCommand(name="build website", | ||
210 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
211 | env=path_env, command=["make", "html"])) | ||
212 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.IMMAE_EU_RELEASE_PATH))) | ||
213 | factory.addStep(steps.DirectoryUpload(workersrc="../source/output", | ||
214 | masterdest=E.IMMAE_EU_RELEASE_PATH, | ||
215 | url="https://www.immae.eu")) | ||
216 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.IMMAE_EU_RELEASE_PATH))) | ||
217 | |||
218 | return util.BuilderConfig(name="ImmaeEu_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
219 | |||
220 | def cours_factory(): | ||
221 | path_env = { | ||
222 | "PATH": os.environ["BUILDBOT_PATH_Cours"] + ":${PATH}", | ||
223 | "CI": "yes" | ||
224 | } | ||
225 | factory = util.BuildFactory() | ||
226 | factory.addStep(steps.Git(logEnviron=False, repourl=E.COURS_GIT_URL, | ||
227 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
228 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
229 | factory.addStep(steps.ShellCommand(name="build website", | ||
230 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
231 | command=["make", "build"], env=path_env)) | ||
232 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.COURS_RELEASE_PATH))) | ||
233 | factory.addStep(steps.DirectoryUpload(workersrc="../source/build", | ||
234 | masterdest=E.COURS_RELEASE_PATH, | ||
235 | url="https://www.immae.eu/cours")) | ||
236 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.COURS_RELEASE_PATH))) | ||
237 | |||
238 | factory.addStep(steps.ShellCommand(name="build pdfs", | ||
239 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
240 | command=["make", "pdfs"], env=path_env)) | ||
241 | |||
242 | package = util.Interpolate("cours_%(kw:clean_branch)s.tar.gz", clean_branch=clean_branch) | ||
243 | release_file = "{0}/cours_%(kw:clean_branch)s.tar.gz" | ||
244 | package_dest = util.Interpolate(release_file.format(E.COURS_TARBALL_PATH), clean_branch=clean_branch) | ||
245 | package_url = util.Interpolate(release_file.format(E.COURS_TARBALL_URL), clean_branch=clean_branch) | ||
246 | factory.addStep(steps.ShellCommand(name="build pdf tarball", | ||
247 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
248 | command=["tar", "-cvf", package, "-C", "pdfs", "mp", "mpsi"], env=path_env)) | ||
249 | factory.addStep(steps.FileUpload(name="upload package", workersrc=package, | ||
250 | workdir="source", masterdest=package_dest, | ||
251 | url=package_url, mode=0o644)) | ||
252 | |||
253 | return util.BuilderConfig(name="Cours_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
254 | |||
255 | def normalesup_factory(): | ||
256 | path_env = { | ||
257 | "PATH": os.environ["BUILDBOT_PATH_Normalesup"] + ":${PATH}" | ||
258 | } | ||
259 | factory = util.BuildFactory() | ||
260 | factory.addStep(steps.Git(logEnviron=False, repourl=E.NORMALESUP_GIT_URL, | ||
261 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
262 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
263 | factory.addStep(steps.ShellCommand(name="build website", | ||
264 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
265 | command=["make", "build"], env=path_env)) | ||
266 | factory.addStep(steps.ShellCommand(name="give read access to all files", | ||
267 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
268 | command="chmod -R a+rX build", env=path_env)) | ||
269 | factory.addStep(steps.ShellCommand(name="synchronize with phare", | ||
270 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
271 | env=path_env, command=[ | ||
272 | "rsync", "-av", "--delete", | ||
273 | "-e", "ssh -i {} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no".format(E.SSH_KEY_PATH), | ||
274 | "build/", | ||
275 | os.environ["BUILDBOT_NORMALESUP_HOST"] | ||
276 | ])) | ||
277 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.NORMALESUP_RELEASE_PATH))) | ||
278 | factory.addStep(steps.DirectoryUpload(workersrc="../source/build", masterdest=E.NORMALESUP_RELEASE_PATH, | ||
279 | url="https://www.immae.eu/recherche")) | ||
280 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.NORMALESUP_RELEASE_PATH))) | ||
281 | |||
282 | return util.BuilderConfig(name="Normalesup_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
283 | |||
284 | def gsm_cells_factory(): | ||
285 | path_env = { | ||
286 | "PATH": os.environ["BUILDBOT_PATH_GSMCells"] + ":${PATH}", | ||
287 | "IN_BUILDBOT": "yes", | ||
288 | } | ||
289 | master_env = { | ||
290 | "HTACCESS": ''' | ||
291 | Options +FollowSymLinks | ||
292 | IndexIgnore * | ||
293 | ''' | ||
294 | } | ||
295 | for k, v in os.environ.items(): | ||
296 | if k.startswith("BUILDBOT_GSM_CELLS_"): | ||
297 | path_env[k[len("BUILDBOT_GSM_CELLS_"):]] = v | ||
298 | |||
299 | script = os.environ["BUILDBOT_PROJECT_DIR"] + "/scripts/lacells_download" | ||
300 | factory = util.BuildFactory() | ||
301 | factory.addStep(steps.ShellCommand(name="download files", | ||
302 | logEnviron=False, haltOnFailure=True, command=[script], env=path_env)) | ||
303 | factory.addStep(steps.ShellCommand(name="give read access to all files", | ||
304 | logEnviron=False, haltOnFailure=True, | ||
305 | command="chmod a+r lacells.db", env=path_env)) | ||
306 | factory.addStep(steps.FileUpload(workersrc="lacells.db", | ||
307 | masterdest=(E.GSMCELLS_RELEASE_PATH+"/lacells.db"), url=(E.GSMCELLS_RELEASE_URL+"/lacells.db"))) | ||
308 | factory.addStep(steps.MasterShellCommand(command="touch {}/.duplicity-ignore".format(E.GSMCELLS_RELEASE_PATH))) | ||
309 | factory.addStep(steps.MasterShellCommand(command='echo "$HTACCESS" > {}/.htaccess'.format(E.GSMCELLS_RELEASE_PATH), | ||
310 | env=master_env)) | ||
311 | factory.addStep(steps.MasterShellCommand(command="ln -sf lacells.db {}/lacells.db.new".format(E.GSMCELLS_RELEASE_PATH))) | ||
312 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.GSMCELLS_RELEASE_PATH))) | ||
313 | |||
314 | return util.BuilderConfig(name="GSMCells_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
diff --git a/modules/private/buildbot/projects/immaeEu/scripts/lacells_download b/modules/private/buildbot/projects/immaeEu/scripts/lacells_download deleted file mode 100755 index 1193cf3..0000000 --- a/modules/private/buildbot/projects/immaeEu/scripts/lacells_download +++ /dev/null | |||
@@ -1,163 +0,0 @@ | |||
1 | #!/usr/bin/env bash | ||
2 | |||
3 | # FLG - Fast Lacells.db Generator | ||
4 | # | ||
5 | # Simple script to quickly download and generate lacells.db for LocalGSMBackend by n76 | ||
6 | # https://github.com/n76/Local-GSM-Backend | ||
7 | # Uses Mozilla Location Service, OpenCellID and radiocells.org databases as source | ||
8 | # Based on lacells-creator by wvengen and n76 | ||
9 | # | ||
10 | # Licensed under GPLv3 or later | ||
11 | # (C)2016 Sebastian Obrusiewicz | ||
12 | # sobrus@o2.pl | ||
13 | |||
14 | if [ -z "$IN_BUILDBOT" ]; then | ||
15 | #DEFAULT_CONFIG_BEGIN | ||
16 | ENABLE_OCI=1 #enable OpenCellID data source | ||
17 | ENABLE_MOZ=1 #enable Mozilla Location Services (MLS) data source | ||
18 | ENABLE_RCO=0 #enable radiocells.org data source (it can be quite slow) | ||
19 | |||
20 | # See https://en.wikipedia.org/wiki/Mobile_country_code | ||
21 | # 208 France | ||
22 | MCC="" #contry codes separated with "|", for example "260|262". Leave dot+asterisk ".*" for all countries | ||
23 | RCO_SRC_FILE="fr.sqlite" #radiocells.org source database file, set "openbmap.sqlite" for entire world database, see https://radiocells.org/downloads for smaller country specific files, for example "pl.sqlite" for Poland | ||
24 | RADIO="" #you can remove LTE if your phone does not support it | ||
25 | TOKEN="" #your OCID token, required to download from OpenCellID. Get your free token at https://opencellid.org/ | ||
26 | fi | ||
27 | TMPDIR='.' #for temporary files only, use disk if you don't have enough RAM, AND remember to have enough disk space in /var/tmp for sqlite temporary files | ||
28 | KEEP_FILES=1 #whether to keep (1) or delete (0) the CSV files after processing | ||
29 | |||
30 | #do not edit following variables, unless you know what you're doing | ||
31 | EMPTY=',,,,,,,,,,,,,' #dummy empty file for disabled sources | ||
32 | OCI_FILE=$TMPDIR"/ocid.csv" #opencellid temporary file | ||
33 | MOZ_FILE=$TMPDIR"/mozilla.csv" #mozilla temporary file | ||
34 | RCO_FILE=$TMPDIR"/rco.csv" #radiocells.org temporary file | ||
35 | #DEFAULT_CONFIG_END | ||
36 | |||
37 | #USER_CONFIG_BEGIN | ||
38 | BINDIR=$( dirname "$(readlink -f "$0")" ) #" | ||
39 | if [[ -f "${BINDIR}/config" ]]; then | ||
40 | . "${BINDIR}/config" | ||
41 | fi | ||
42 | #USER_CONFIG_END | ||
43 | |||
44 | function manage_backup | ||
45 | { | ||
46 | file=$1 | ||
47 | if [ -s $file ] | ||
48 | then | ||
49 | if [ $KEEP_FILES == "1" ] | ||
50 | then | ||
51 | gzip -kf $file | ||
52 | fi | ||
53 | elif [ -s $file".gz" ] && [ "${file##*.}" == "csv" ] | ||
54 | then | ||
55 | echo "Using" $file".gz backup file" | ||
56 | gzip -dkf $file".gz" | ||
57 | fi | ||
58 | } | ||
59 | |||
60 | |||
61 | function download_ocid | ||
62 | { | ||
63 | URL="https://opencellid.org/ocid/downloads?token=${TOKEN}&type=full&file=cell_towers.csv.gz" | ||
64 | if [ $ENABLE_OCI == "1" ] | ||
65 | then | ||
66 | wget -qO- "$URL" | gunzip | egrep "^($RADIO),($MCC)," > $OCI_FILE | ||
67 | manage_backup $OCI_FILE | ||
68 | else | ||
69 | echo $EMPTY > $OCI_FILE | ||
70 | fi | ||
71 | } | ||
72 | |||
73 | function download_mozilla | ||
74 | { | ||
75 | if [ $ENABLE_MOZ == "1" ] | ||
76 | then | ||
77 | NW=`date -u "+%Y-%m-%d"` | ||
78 | wget -qO- "https://d17pt8qph6ncyq.cloudfront.net/export/MLS-full-cell-export-${NW}T000000.csv.gz" | gunzip | egrep "^($RADIO),($MCC)," > $MOZ_FILE | ||
79 | manage_backup $MOZ_FILE | ||
80 | else | ||
81 | echo $EMPTY > $MOZ_FILE | ||
82 | fi | ||
83 | } | ||
84 | |||
85 | function download_radiocells | ||
86 | { | ||
87 | if [ $ENABLE_RCO == "1" ] | ||
88 | then | ||
89 | RCO_SELECT="SELECT technology, mcc, mnc, area, cid, NULL, longitude, latitude, 1000 accuracy, measurements, NULL, NULL, NULL, NULL FROM cell_zone;" | ||
90 | wget -qO- "https://cdn.radiocells.org/"$RCO_SRC_FILE > $TMPDIR"/"$RCO_SRC_FILE | ||
91 | sqlite3 -header -csv $TMPDIR"/"$RCO_SRC_FILE "$RCO_SELECT" | egrep "^($RADIO),($MCC)," > $RCO_FILE | ||
92 | rm $TMPDIR"/"$RCO_SRC_FILE | ||
93 | manage_backup $RCO_FILE | ||
94 | else | ||
95 | echo $EMPTY > $RCO_FILE | ||
96 | fi | ||
97 | } | ||
98 | |||
99 | echo "Downloading data" | ||
100 | |||
101 | download_ocid & | ||
102 | OP=$! | ||
103 | download_mozilla & | ||
104 | MO=$! | ||
105 | download_radiocells & | ||
106 | RO=$! | ||
107 | |||
108 | wait $OP | ||
109 | wait $MO | ||
110 | wait $RO | ||
111 | |||
112 | if [ -s $MOZ_FILE ] && [ -s $OCI_FILE ] && [ -s $RCO_FILE ] | ||
113 | then | ||
114 | |||
115 | manage_backup lacells.db | ||
116 | rm lacells.db | ||
117 | |||
118 | echo "Generating database" | ||
119 | |||
120 | sqlite3 lacells.db <<-SQL | ||
121 | PRAGMA synchronous = OFF; | ||
122 | PRAGMA journal_mode = OFF; | ||
123 | |||
124 | CREATE TEMP TABLE cells_import (radio TEXT,mcc INTEGER,mnc INTEGER,lac INTEGER,cid INTEGER,unit STRING,longitude NUMERIC,latitude NUMERIC,accuracy INTEGER,samples INTEGER,changeable BOOLEAN,created INTEGER,updated INTEGER, avgSignal INTEGER); | ||
125 | CREATE TABLE cells (mcc INTEGER,mnc INTEGER,lac INTEGER,cid INTEGER,longitude REAL,latitude REAL,altitude REAL,accuracy REAL,samples INTEGER); | ||
126 | |||
127 | .header on | ||
128 | .mode csv | ||
129 | |||
130 | .import "$OCI_FILE" cells_import | ||
131 | .import "$MOZ_FILE" cells_import | ||
132 | .import "$RCO_FILE" cells_import | ||
133 | |||
134 | UPDATE cells_import SET samples=1 WHERE samples IS NULL OR samples < 1; | ||
135 | |||
136 | INSERT INTO cells | ||
137 | SELECT mcc, mnc, lac, cid, | ||
138 | sum(longitude * samples) / sum(samples) as longitude, | ||
139 | sum(latitude * samples) / sum(samples) as latitude, | ||
140 | -1 as altitude, | ||
141 | sum(accuracy * samples) / sum(samples) as accuracy, | ||
142 | sum(samples) as samples | ||
143 | FROM cells_import | ||
144 | GROUP BY mcc, mnc, lac, cid; | ||
145 | |||
146 | DROP TABLE cells_import; | ||
147 | |||
148 | UPDATE cells SET accuracy=500 WHERE accuracy < 500; | ||
149 | UPDATE cells SET accuracy=100000 WHERE accuracy > 100000; | ||
150 | |||
151 | CREATE INDEX _idx1 ON cells (mcc, mnc, lac, cid); | ||
152 | CREATE INDEX _idx2 ON cells (lac, cid); | ||
153 | |||
154 | VACUUM; | ||
155 | SQL | ||
156 | |||
157 | else | ||
158 | echo "Download error" | ||
159 | fi | ||
160 | |||
161 | rm $OCI_FILE | ||
162 | rm $MOZ_FILE | ||
163 | rm $RCO_FILE | ||
diff --git a/modules/private/buildbot/projects/test/__init__.py b/modules/private/buildbot/projects/test/__init__.py deleted file mode 100644 index e2f6f82..0000000 --- a/modules/private/buildbot/projects/test/__init__.py +++ /dev/null | |||
@@ -1,197 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import buildbot_common.libvirt as ilibvirt | ||
4 | import os | ||
5 | from buildbot.util import bytes2unicode | ||
6 | import json | ||
7 | |||
8 | __all__ = [ "configure", "E" ] | ||
9 | |||
10 | class E(): | ||
11 | PROJECT = "test" | ||
12 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
13 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
14 | PB_SOCKET = os.environ["BUILDBOT_WORKER_PORT"] | ||
15 | WORKER_HOST = "{}:{}".format(os.environ["BUILDBOT_HOST"], PB_SOCKET) | ||
16 | RELEASE_PATH = "/var/lib/ftp/release.immae.eu/{}".format(PROJECT) | ||
17 | RELEASE_URL = "https://release.immae.eu/{}".format(PROJECT) | ||
18 | GIT_URL = "https://git.immae.eu/perso/Immae/TestProject.git" | ||
19 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
20 | LIBVIRT_URL = os.environ["BUILDBOT_VIRT_URL"] + "?keyfile=" + SSH_KEY_PATH | ||
21 | PUPPET_HOST = "root@backup-1.v.immae.eu" | ||
22 | LDAP_HOST = "ldap.immae.eu" | ||
23 | LDAP_DN = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
24 | LDAP_ROLES_BASE = "ou=roles,ou=hosts,dc=immae,dc=eu" | ||
25 | XMPP_RECIPIENTS = os.environ["BUILDBOT_XMPP_RECIPIENTS"].split(" ") | ||
26 | |||
27 | # master.cfg | ||
28 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
29 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
30 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
31 | LDAP_BASE = "dc=immae,dc=eu" | ||
32 | LDAP_PATTERN = "(uid=%(username)s)" | ||
33 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=test,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
34 | TITLE_URL = "https://git.immae.eu/?p=perso/Immae/TestProject.git;a=summary" | ||
35 | TITLE = "Test project" | ||
36 | |||
37 | class CustomBase(webhooks.base): | ||
38 | def getChanges(self, request): | ||
39 | try: | ||
40 | content = request.content.read() | ||
41 | args = json.loads(bytes2unicode(content)) | ||
42 | except Exception as e: | ||
43 | raise ValueError("Error loading JSON: " + str(e)) | ||
44 | |||
45 | args.setdefault("comments", "") | ||
46 | args.setdefault("repository", "") | ||
47 | args.setdefault("author", args.get("who", "unknown")) | ||
48 | |||
49 | if args["category"] == "deploy_webhook": | ||
50 | args = { | ||
51 | "category": "deploy_webhook", | ||
52 | "comments": "", | ||
53 | "repository": "", | ||
54 | "author": "unknown", | ||
55 | "project": "TestProject", | ||
56 | "properties": { | ||
57 | "environment": args.get("environment", "integration"), | ||
58 | "build": "test_{}.tar.gz".format(args.get("branch", "master")) | ||
59 | } | ||
60 | } | ||
61 | |||
62 | return ([args], None) | ||
63 | |||
64 | def deploy_hook_scheduler(project, timer=1): | ||
65 | return schedulers.AnyBranchScheduler( | ||
66 | change_filter=util.ChangeFilter(category="deploy_webhook", project=project), | ||
67 | name="{}_deploy".format(project), treeStableTimer=timer, builderNames=["{}_deploy".format(project)]) | ||
68 | |||
69 | def configure(c): | ||
70 | c["buildbotURL"] = E.BUILDBOT_URL | ||
71 | c["www"]["port"] = E.SOCKET | ||
72 | |||
73 | c["www"]["change_hook_dialects"]["base"] = { | ||
74 | "custom_class": CustomBase | ||
75 | } | ||
76 | c['workers'].append(ilibvirt.LibVirtWorker("test-build", | ||
77 | open(E.SECRETS_FILE + "/worker_password", "r").read().rstrip(), | ||
78 | ilibvirt.Connection(E.LIBVIRT_URL), | ||
79 | E.WORKER_HOST)) | ||
80 | c['workers'].append(ilibvirt.LibVirtWorker("test-deploy", | ||
81 | open(E.SECRETS_FILE + "/worker_password", "r").read().rstrip(), | ||
82 | ilibvirt.Connection(E.LIBVIRT_URL), | ||
83 | E.WORKER_HOST)) | ||
84 | |||
85 | c['schedulers'].append(hook_scheduler("TestProject", timer=1)) | ||
86 | c['schedulers'].append(force_scheduler("force_test", ["TestProject_build"])) | ||
87 | c['schedulers'].append(deploy_scheduler("deploy_test", ["TestProject_deploy"])) | ||
88 | c['schedulers'].append(deploy_hook_scheduler("TestProject", timer=1)) | ||
89 | |||
90 | c['builders'].append(factory()) | ||
91 | c['builders'].append(deploy_factory()) | ||
92 | |||
93 | c['services'].append(SlackStatusPush( | ||
94 | name="slack_status_test_project", | ||
95 | builders=["TestProject_build", "TestProject_deploy"], | ||
96 | serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip())) | ||
97 | c['services'].append(XMPPStatusPush( | ||
98 | name="xmpp_status_test_project", | ||
99 | builders=["TestProject_build", "TestProject_deploy"], | ||
100 | recipients=E.XMPP_RECIPIENTS, | ||
101 | password=open(E.SECRETS_FILE + "/notify_xmpp_password", "r").read().rstrip())) | ||
102 | |||
103 | def factory(): | ||
104 | package = util.Interpolate("test_%(kw:clean_branch)s.tar.gz", clean_branch=clean_branch) | ||
105 | package_dest = util.Interpolate("{}/test_%(kw:clean_branch)s.tar.gz".format(E.RELEASE_PATH), clean_branch=clean_branch) | ||
106 | package_url = util.Interpolate("{}/test_%(kw:clean_branch)s.tar.gz".format(E.RELEASE_URL), clean_branch=clean_branch) | ||
107 | |||
108 | factory = util.BuildFactory() | ||
109 | factory.addStep(steps.Git(logEnviron=False, | ||
110 | repourl=E.GIT_URL, mode="full", method="copy")) | ||
111 | factory.addStep(steps.ShellCommand(name="env", | ||
112 | logEnviron=False, command=["env"])) | ||
113 | factory.addStep(steps.ShellCommand(name="pwd", | ||
114 | logEnviron=False, command=["pwd"])) | ||
115 | factory.addStep(steps.ShellCommand(name="true", | ||
116 | logEnviron=False, command=["true"])) | ||
117 | factory.addStep(steps.ShellCommand(name="echo", | ||
118 | logEnviron=False, command=["echo", package])) | ||
119 | factory.addSteps(package_and_upload(package, package_dest, package_url)) | ||
120 | |||
121 | return util.BuilderConfig(name="TestProject_build", workernames=["test-build"], factory=factory) | ||
122 | |||
123 | |||
124 | def compute_build_infos(): | ||
125 | @util.renderer | ||
126 | def compute(props): | ||
127 | import re, hashlib | ||
128 | build_file = props.getProperty("build") | ||
129 | package_dest = "{}/{}".format(E.RELEASE_PATH, build_file) | ||
130 | version = re.match(r"{0}_(.*).tar.gz".format("test"), build_file).group(1) | ||
131 | with open(package_dest, "rb") as f: | ||
132 | sha = hashlib.sha256(f.read()).hexdigest() | ||
133 | return { | ||
134 | "build_version": version, | ||
135 | "build_hash": sha, | ||
136 | } | ||
137 | return compute | ||
138 | |||
139 | @util.renderer | ||
140 | def puppet_host(props): | ||
141 | return E.PUPPET_HOST | ||
142 | |||
143 | def deploy_factory(): | ||
144 | package_dest = util.Interpolate("{}/%(prop:build)s".format(E.RELEASE_PATH)) | ||
145 | |||
146 | factory = util.BuildFactory() | ||
147 | factory.addStep(steps.MasterShellCommand(command=["test", "-f", package_dest])) | ||
148 | factory.addStep(steps.SetProperties(properties=compute_build_infos())) | ||
149 | factory.addStep(LdapPush(environment=util.Property("environment"), | ||
150 | build_version=util.Property("build_version"), | ||
151 | build_hash=util.Property("build_hash"), | ||
152 | ldap_password=util.Secret("ldap"))) | ||
153 | factory.addStep(steps.MasterShellCommand(command=[ | ||
154 | "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no", "-i", E.SSH_KEY_PATH, puppet_host])) | ||
155 | return util.BuilderConfig(name="TestProject_deploy", workernames=["test-deploy"], factory=factory) | ||
156 | |||
157 | from twisted.internet import defer | ||
158 | from buildbot.process.buildstep import FAILURE | ||
159 | from buildbot.process.buildstep import SUCCESS | ||
160 | from buildbot.process.buildstep import BuildStep | ||
161 | |||
162 | class LdapPush(BuildStep): | ||
163 | name = "LdapPush" | ||
164 | renderables = ["environment", "build_version", "build_hash", "ldap_password"] | ||
165 | |||
166 | def __init__(self, **kwargs): | ||
167 | self.environment = kwargs.pop("environment") | ||
168 | self.build_version = kwargs.pop("build_version") | ||
169 | self.build_hash = kwargs.pop("build_hash") | ||
170 | self.ldap_password = kwargs.pop("ldap_password") | ||
171 | self.ldap_host = kwargs.pop("ldap_host", E.LDAP_HOST) | ||
172 | super().__init__(**kwargs) | ||
173 | |||
174 | def run(self): | ||
175 | import json | ||
176 | from ldap3 import Reader, Writer, Server, Connection, ObjectDef | ||
177 | server = Server(self.ldap_host) | ||
178 | conn = Connection(server, | ||
179 | user=E.LDAP_DN, | ||
180 | password=self.ldap_password) | ||
181 | conn.bind() | ||
182 | obj = ObjectDef("immaePuppetClass", conn) | ||
183 | r = Reader(conn, obj, | ||
184 | "cn=test.{},{}".format(self.environment, E.LDAP_ROLES_BASE)) | ||
185 | r.search() | ||
186 | if len(r) > 0: | ||
187 | w = Writer.from_cursor(r) | ||
188 | for value in w[0].immaePuppetJson.values: | ||
189 | config = json.loads(value) | ||
190 | if "test_version" in config: | ||
191 | config["test_version"] = self.build_version | ||
192 | config["test_sha256"] = self.build_hash | ||
193 | w[0].immaePuppetJson -= value | ||
194 | w[0].immaePuppetJson += json.dumps(config, indent=" ") | ||
195 | w.commit() | ||
196 | return defer.succeed(SUCCESS) | ||
197 | return defer.succeed(FAILURE) | ||