diff options
Diffstat (limited to 'modules/private/buildbot')
-rw-r--r-- | modules/private/buildbot/common/build_helpers.py | 277 | ||||
-rw-r--r-- | modules/private/buildbot/common/libvirt.py | 318 | ||||
-rw-r--r-- | modules/private/buildbot/common/master.cfg | 69 | ||||
-rw-r--r-- | modules/private/buildbot/default.nix | 244 | ||||
-rw-r--r-- | modules/private/buildbot/projects/caldance/__init__.py | 198 | ||||
-rw-r--r-- | modules/private/buildbot/projects/cryptoportfolio/__init__.py | 169 | ||||
-rw-r--r-- | modules/private/buildbot/projects/denise/__init__.py | 186 | ||||
-rw-r--r-- | modules/private/buildbot/projects/immaeEu/__init__.py | 314 | ||||
-rwxr-xr-x | modules/private/buildbot/projects/immaeEu/scripts/lacells_download | 163 | ||||
-rw-r--r-- | modules/private/buildbot/projects/test/__init__.py | 197 |
10 files changed, 0 insertions, 2135 deletions
diff --git a/modules/private/buildbot/common/build_helpers.py b/modules/private/buildbot/common/build_helpers.py deleted file mode 100644 index acea905..0000000 --- a/modules/private/buildbot/common/build_helpers.py +++ /dev/null | |||
@@ -1,277 +0,0 @@ | |||
1 | from buildbot.plugins import util, steps, schedulers | ||
2 | from buildbot_buildslist import BuildsList | ||
3 | from shutil import which | ||
4 | |||
5 | __all__ = [ | ||
6 | "force_scheduler", "deploy_scheduler", "hook_scheduler", | ||
7 | "clean_branch", "package_and_upload", "SlackStatusPush", | ||
8 | "XMPPStatusPush", "NixShellCommand" | ||
9 | ] | ||
10 | |||
11 | # Small helpers" | ||
12 | @util.renderer | ||
13 | def clean_branch(props): | ||
14 | if props.hasProperty("branch") and len(props["branch"]) > 0: | ||
15 | return props["branch"].replace("/", "_") | ||
16 | else: | ||
17 | return "HEAD" | ||
18 | |||
19 | def package_and_upload(package, package_dest, package_url): | ||
20 | return [ | ||
21 | steps.ShellCommand(name="build package", | ||
22 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
23 | command=["git", "archive", "HEAD", "-o", package]), | ||
24 | |||
25 | steps.FileUpload(name="upload package", workersrc=package, | ||
26 | workdir="source", masterdest=package_dest, | ||
27 | url=package_url, mode=0o644), | ||
28 | |||
29 | steps.ShellCommand(name="cleanup package", logEnviron=False, | ||
30 | haltOnFailure=True, workdir="source", alwaysRun=True, | ||
31 | command=["rm", "-f", package]), | ||
32 | ] | ||
33 | |||
34 | # Steps | ||
35 | class NixShellCommand(steps.ShellCommand): | ||
36 | def __init__(self, command=None, pure=True, nixfile=None, **kwargs): | ||
37 | assert(isinstance(command, str)) | ||
38 | oldpath = kwargs.get("env", {}).get("PATH", None) | ||
39 | if which("nix-shell", path=oldpath) is None: | ||
40 | kwargs["env"] = kwargs.get("env", {}) | ||
41 | if isinstance(oldpath, str): | ||
42 | kwargs["env"]["PATH"] = "/run/current-system/sw/bin:" + oldpath | ||
43 | elif isinstance(oldpath, list): | ||
44 | kwargs["env"]["PATH"] = ["/run/current-system/sw/bin"] + oldpath | ||
45 | nixcommand = ["nix-shell"] | ||
46 | if pure: | ||
47 | nixcommand.append("--pure") | ||
48 | nixcommand.append("--run") | ||
49 | nixcommand.append(command) | ||
50 | if nixfile is not None: | ||
51 | nixcommand.append(nixfile) | ||
52 | super().__init__(command=nixcommand, **kwargs) | ||
53 | |||
54 | # Schedulers | ||
55 | def force_scheduler(name, builders): | ||
56 | return schedulers.ForceScheduler(name=name, | ||
57 | label="Force build", buttonName="Force build", | ||
58 | reason=util.StringParameter(name="reason", label="Reason", default="Force build"), | ||
59 | codebases=[ | ||
60 | util.CodebaseParameter("", | ||
61 | branch=util.StringParameter( | ||
62 | name="branch", label="Git reference (tag, branch)", required=True), | ||
63 | revision=util.FixedParameter(name="revision", default=""), | ||
64 | repository=util.FixedParameter(name="repository", default=""), | ||
65 | project=util.FixedParameter(name="project", default=""), | ||
66 | ), | ||
67 | ], | ||
68 | username=util.FixedParameter(name="username", default="Web button"), | ||
69 | builderNames=builders) | ||
70 | |||
71 | def deploy_scheduler(name, builders): | ||
72 | return schedulers.ForceScheduler(name=name, | ||
73 | builderNames=builders, | ||
74 | label="Deploy built package", buttonName="Deploy", | ||
75 | username=util.FixedParameter(name="username", default="Web button"), | ||
76 | codebases=[ | ||
77 | util.CodebaseParameter(codebase="", | ||
78 | branch=util.FixedParameter(name="branch", default=""), | ||
79 | revision=util.FixedParameter(name="revision", default=""), | ||
80 | repository=util.FixedParameter(name="repository", default=""), | ||
81 | project=util.FixedParameter(name="project", default=""))], | ||
82 | reason=util.FixedParameter(name="reason", default="Deploy"), | ||
83 | properties=[ | ||
84 | util.ChoiceStringParameter(label="Environment", | ||
85 | name="environment", default="integration", | ||
86 | choices=["integration", "production"]), | ||
87 | BuildsList(label="Build to deploy", name="build"), | ||
88 | ] | ||
89 | ) | ||
90 | |||
91 | def hook_scheduler(project, timer=10): | ||
92 | return schedulers.AnyBranchScheduler( | ||
93 | change_filter=util.ChangeFilter(category="hooks", project=project), | ||
94 | name=project, treeStableTimer=timer, builderNames=["{}_build".format(project)]) | ||
95 | |||
96 | # Slack/XMPP status push | ||
97 | from buildbot.reporters.http import HttpStatusPushBase | ||
98 | from twisted.internet import defer | ||
99 | from twisted.python import log | ||
100 | from buildbot.util import httpclientservice | ||
101 | from buildbot.reporters import utils | ||
102 | from buildbot.process import results | ||
103 | from twisted.words.protocols.jabber.jid import JID | ||
104 | from wokkel import client, xmppim | ||
105 | from functools import partial | ||
106 | |||
107 | class SlackStatusPush(HttpStatusPushBase): | ||
108 | name = "SlackStatusPush" | ||
109 | |||
110 | @defer.inlineCallbacks | ||
111 | def reconfigService(self, serverUrl, **kwargs): | ||
112 | yield HttpStatusPushBase.reconfigService(self, **kwargs) | ||
113 | self._http = yield httpclientservice.HTTPClientService.getService( | ||
114 | self.master, serverUrl) | ||
115 | |||
116 | @defer.inlineCallbacks | ||
117 | def send(self, build): | ||
118 | yield utils.getDetailsForBuild(self.master, build, wantProperties=True) | ||
119 | response = yield self._http.post("", json=self.format(build)) | ||
120 | if response.code != 200: | ||
121 | log.msg("%s: unable to upload status: %s" % | ||
122 | (response.code, response.content)) | ||
123 | |||
124 | def format(self, build): | ||
125 | colors = [ | ||
126 | "#36A64F", # success | ||
127 | "#F1E903", # warnings | ||
128 | "#DA0505", # failure | ||
129 | "#FFFFFF", # skipped | ||
130 | "#000000", # exception | ||
131 | "#FFFFFF", # retry | ||
132 | "#D02CA9", # cancelled | ||
133 | ] | ||
134 | |||
135 | if "environment" in build["properties"]: | ||
136 | msg = "{} environment".format(build["properties"]["environment"][0]) | ||
137 | if "build" in build["properties"]: | ||
138 | msg = "of archive {} in ".format(build["properties"]["build"][0]) + msg | ||
139 | elif len(build["buildset"]["sourcestamps"][0]["branch"] or []) > 0: | ||
140 | msg = "revision {}".format(build["buildset"]["sourcestamps"][0]["branch"]) | ||
141 | else: | ||
142 | msg = "build" | ||
143 | |||
144 | if build["complete"]: | ||
145 | timedelta = int((build["complete_at"] - build["started_at"]).total_seconds()) | ||
146 | hours, rest = divmod(timedelta, 3600) | ||
147 | minutes, seconds = divmod(rest, 60) | ||
148 | if hours > 0: | ||
149 | duration = "{}h {}min {}s".format(hours, minutes, seconds) | ||
150 | elif minutes > 0: | ||
151 | duration = "{}min {}s".format(minutes, seconds) | ||
152 | else: | ||
153 | duration = "{}s".format(seconds) | ||
154 | |||
155 | text = "Build <{}|{}> of {}'s {} was {} in {}.".format( | ||
156 | build["url"], build["buildid"], | ||
157 | build["builder"]["name"], | ||
158 | msg, | ||
159 | results.Results[build["results"]], | ||
160 | duration, | ||
161 | ) | ||
162 | fields = [ | ||
163 | { | ||
164 | "title": "Build", | ||
165 | "value": "<{}|{}>".format(build["url"], build["buildid"]), | ||
166 | "short": True, | ||
167 | }, | ||
168 | { | ||
169 | "title": "Project", | ||
170 | "value": build["builder"]["name"], | ||
171 | "short": True, | ||
172 | }, | ||
173 | { | ||
174 | "title": "Build status", | ||
175 | "value": results.Results[build["results"]], | ||
176 | "short": True, | ||
177 | }, | ||
178 | { | ||
179 | "title": "Build duration", | ||
180 | "value": duration, | ||
181 | "short": True, | ||
182 | }, | ||
183 | ] | ||
184 | if "environment" in build["properties"]: | ||
185 | fields.append({ | ||
186 | "title": "Environment", | ||
187 | "value": build["properties"]["environment"][0], | ||
188 | "short": True, | ||
189 | }) | ||
190 | if "build" in build["properties"]: | ||
191 | fields.append({ | ||
192 | "title": "Archive", | ||
193 | "value": build["properties"]["build"][0], | ||
194 | "short": True, | ||
195 | }) | ||
196 | attachments = [{ | ||
197 | "fallback": "", | ||
198 | "color": colors[build["results"]], | ||
199 | "fields": fields | ||
200 | }] | ||
201 | else: | ||
202 | text = "Build <{}|{}> of {}'s {} started.".format( | ||
203 | build["url"], build["buildid"], | ||
204 | build["builder"]["name"], | ||
205 | msg, | ||
206 | ) | ||
207 | attachments = [] | ||
208 | |||
209 | return { | ||
210 | "username": "Buildbot", | ||
211 | "icon_url": "http://docs.buildbot.net/current/_static/icon.png", | ||
212 | "text": text, | ||
213 | "attachments": attachments, | ||
214 | } | ||
215 | |||
216 | class XMPPStatusPush(HttpStatusPushBase): | ||
217 | name = "XMPPStatusPush" | ||
218 | |||
219 | @defer.inlineCallbacks | ||
220 | def reconfigService(self, password, recipients, **kwargs): | ||
221 | yield HttpStatusPushBase.reconfigService(self, **kwargs) | ||
222 | self.password = password | ||
223 | self.recipients = recipients | ||
224 | |||
225 | @defer.inlineCallbacks | ||
226 | def send(self, build): | ||
227 | yield utils.getDetailsForBuild(self.master, build, wantProperties=True) | ||
228 | body = self.format(build) | ||
229 | factory = client.DeferredClientFactory(JID("notify_bot@immae.fr/buildbot"), self.password) | ||
230 | d = client.clientCreator(factory) | ||
231 | def send_message(recipient, stream): | ||
232 | message = xmppim.Message(recipient=JID(recipient), body=body) | ||
233 | message.stanzaType = 'chat' | ||
234 | stream.send(message.toElement()) | ||
235 | # To allow chaining | ||
236 | return stream | ||
237 | for recipient in self.recipients: | ||
238 | d.addCallback(partial(send_message, recipient)) | ||
239 | d.addCallback(lambda _: factory.streamManager.xmlstream.sendFooter()) | ||
240 | d.addErrback(log.err) | ||
241 | |||
242 | def format(self, build): | ||
243 | if "environment" in build["properties"]: | ||
244 | msg = "{} environment".format(build["properties"]["environment"][0]) | ||
245 | if "build" in build["properties"]: | ||
246 | msg = "of archive {} in ".format(build["properties"]["build"][0]) + msg | ||
247 | elif len(build["buildset"]["sourcestamps"][0]["branch"] or []) > 0: | ||
248 | msg = "revision {}".format(build["buildset"]["sourcestamps"][0]["branch"]) | ||
249 | else: | ||
250 | msg = "build" | ||
251 | |||
252 | if build["complete"]: | ||
253 | timedelta = int((build["complete_at"] - build["started_at"]).total_seconds()) | ||
254 | hours, rest = divmod(timedelta, 3600) | ||
255 | minutes, seconds = divmod(rest, 60) | ||
256 | if hours > 0: | ||
257 | duration = "{}h {}min {}s".format(hours, minutes, seconds) | ||
258 | elif minutes > 0: | ||
259 | duration = "{}min {}s".format(minutes, seconds) | ||
260 | else: | ||
261 | duration = "{}s".format(seconds) | ||
262 | |||
263 | text = "Build {} ( {} ) of {}'s {} was {} in {}.".format( | ||
264 | build["buildid"], build["url"], | ||
265 | build["builder"]["name"], | ||
266 | msg, | ||
267 | results.Results[build["results"]], | ||
268 | duration, | ||
269 | ) | ||
270 | else: | ||
271 | text = "Build {} ( {} ) of {}'s {} started.".format( | ||
272 | build["buildid"], build["url"], | ||
273 | build["builder"]["name"], | ||
274 | msg, | ||
275 | ) | ||
276 | |||
277 | return text | ||
diff --git a/modules/private/buildbot/common/libvirt.py b/modules/private/buildbot/common/libvirt.py deleted file mode 100644 index e250627..0000000 --- a/modules/private/buildbot/common/libvirt.py +++ /dev/null | |||
@@ -1,318 +0,0 @@ | |||
1 | # This file was part of Buildbot. Buildbot is free software: you can | ||
2 | # redistribute it and/or modify it under the terms of the GNU General Public | ||
3 | # License as published by the Free Software Foundation, version 2. | ||
4 | # | ||
5 | # This program is distributed in the hope that it will be useful, but WITHOUT | ||
6 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
7 | # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more | ||
8 | # details. | ||
9 | # | ||
10 | # You should have received a copy of the GNU General Public License along with | ||
11 | # this program; if not, write to the Free Software Foundation, Inc., 51 | ||
12 | # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
13 | # | ||
14 | # Portions Copyright Buildbot Team Members | ||
15 | # Portions Copyright 2010 Isotoma Limited | ||
16 | |||
17 | |||
18 | import os | ||
19 | |||
20 | from twisted.internet import defer | ||
21 | from twisted.internet import threads | ||
22 | from twisted.internet import utils | ||
23 | from twisted.python import failure | ||
24 | from twisted.python import log | ||
25 | |||
26 | from buildbot import config | ||
27 | from buildbot.util.eventual import eventually | ||
28 | from buildbot.worker import AbstractLatentWorker | ||
29 | |||
30 | try: | ||
31 | import libvirt | ||
32 | except ImportError: | ||
33 | libvirt = None | ||
34 | |||
35 | import random | ||
36 | import string | ||
37 | |||
38 | def random_string_generator(): | ||
39 | chars = string.ascii_letters | ||
40 | return ''.join(random.choice(chars) for x in range(6)) | ||
41 | |||
42 | class WorkQueue: | ||
43 | |||
44 | """ | ||
45 | I am a class that turns parallel access into serial access. | ||
46 | |||
47 | I exist because we want to run libvirt access in threads as we don't | ||
48 | trust calls not to block, but under load libvirt doesn't seem to like | ||
49 | this kind of threaded use. | ||
50 | """ | ||
51 | |||
52 | def __init__(self): | ||
53 | self.queue = [] | ||
54 | |||
55 | def _process(self): | ||
56 | log.msg("Looking to start a piece of work now...") | ||
57 | |||
58 | # Is there anything to do? | ||
59 | if not self.queue: | ||
60 | log.msg("_process called when there is no work") | ||
61 | return | ||
62 | |||
63 | # Peek at the top of the stack - get a function to call and | ||
64 | # a deferred to fire when its all over | ||
65 | d, next_operation, args, kwargs = self.queue[0] | ||
66 | |||
67 | # Start doing some work - expects a deferred | ||
68 | try: | ||
69 | d2 = next_operation(*args, **kwargs) | ||
70 | except Exception: | ||
71 | d2 = defer.fail() | ||
72 | |||
73 | # Whenever a piece of work is done, whether it worked or not | ||
74 | # call this to schedule the next piece of work | ||
75 | @d2.addBoth | ||
76 | def _work_done(res): | ||
77 | log.msg("Completed a piece of work") | ||
78 | self.queue.pop(0) | ||
79 | if self.queue: | ||
80 | log.msg("Preparing next piece of work") | ||
81 | eventually(self._process) | ||
82 | return res | ||
83 | |||
84 | # When the work is done, trigger d | ||
85 | d2.chainDeferred(d) | ||
86 | |||
87 | def execute(self, cb, *args, **kwargs): | ||
88 | kickstart_processing = not self.queue | ||
89 | d = defer.Deferred() | ||
90 | self.queue.append((d, cb, args, kwargs)) | ||
91 | if kickstart_processing: | ||
92 | self._process() | ||
93 | return d | ||
94 | |||
95 | def executeInThread(self, cb, *args, **kwargs): | ||
96 | return self.execute(threads.deferToThread, cb, *args, **kwargs) | ||
97 | |||
98 | |||
99 | # A module is effectively a singleton class, so this is OK | ||
100 | queue = WorkQueue() | ||
101 | |||
102 | |||
103 | class Domain: | ||
104 | |||
105 | """ | ||
106 | I am a wrapper around a libvirt Domain object | ||
107 | """ | ||
108 | |||
109 | def __init__(self, connection, domain): | ||
110 | self.connection = connection | ||
111 | self.domain = domain | ||
112 | |||
113 | def name(self): | ||
114 | return queue.executeInThread(self.domain.name) | ||
115 | |||
116 | def create(self): | ||
117 | return queue.executeInThread(self.domain.create) | ||
118 | |||
119 | def shutdown(self): | ||
120 | return queue.executeInThread(self.domain.shutdown) | ||
121 | |||
122 | def destroy(self): | ||
123 | return queue.executeInThread(self.domain.destroy) | ||
124 | |||
125 | class Volume: | ||
126 | def __init__(self, connection, volume): | ||
127 | self.connection = connection | ||
128 | self.volume = volume | ||
129 | |||
130 | @defer.inlineCallbacks | ||
131 | def destroy(self): | ||
132 | yield queue.executeInThread(self.volume.wipe) | ||
133 | yield queue.executeInThread(self.volume.delete) | ||
134 | |||
135 | class Pool: | ||
136 | VolumeClass = Volume | ||
137 | def __init__(self, connection, pool): | ||
138 | self.connection = connection | ||
139 | self.pool = pool | ||
140 | |||
141 | @defer.inlineCallbacks | ||
142 | def create_volume(self, xml): | ||
143 | res = yield queue.executeInThread(self.pool.createXML, xml) | ||
144 | return self.VolumeClass(self.connection, res) | ||
145 | |||
146 | class Connection: | ||
147 | |||
148 | """ | ||
149 | I am a wrapper around a libvirt Connection object. | ||
150 | """ | ||
151 | |||
152 | DomainClass = Domain | ||
153 | PoolClass = Pool | ||
154 | |||
155 | def __init__(self, uri): | ||
156 | self.uri = uri | ||
157 | self._connection = None | ||
158 | |||
159 | @property | ||
160 | def connection(self): | ||
161 | if self._connection is not None: | ||
162 | try: | ||
163 | if not self._connection.isAlive(): | ||
164 | self._connection = None | ||
165 | except: | ||
166 | self._connection = None | ||
167 | if self._connection is None: | ||
168 | self._connection = libvirt.open(self.uri) | ||
169 | return self._connection | ||
170 | |||
171 | @defer.inlineCallbacks | ||
172 | def create(self, xml): | ||
173 | """ I take libvirt XML and start a new VM """ | ||
174 | res = yield queue.executeInThread(self.connection.createXML, xml, 0) | ||
175 | return self.DomainClass(self, res) | ||
176 | |||
177 | @defer.inlineCallbacks | ||
178 | def lookup_pool(self, name): | ||
179 | res = yield queue.executeInThread(self.connection.storagePoolLookupByName, name) | ||
180 | return self.PoolClass(self, res) | ||
181 | |||
182 | class LibVirtWorker(AbstractLatentWorker): | ||
183 | |||
184 | def __init__(self, name, password, connection, master_url, base_image=None, **kwargs): | ||
185 | super().__init__(name, password, **kwargs) | ||
186 | if not libvirt: | ||
187 | config.error( | ||
188 | "The python module 'libvirt' is needed to use a LibVirtWorker") | ||
189 | |||
190 | self.master_url = master_url | ||
191 | self.random_name = random_string_generator() | ||
192 | self.connection = connection | ||
193 | self.base_image = base_image | ||
194 | |||
195 | self.domain = None | ||
196 | self.domain_name = "buildbot-" + self.workername + "-" + self.random_name | ||
197 | self.volume = None | ||
198 | self.volume_name = "buildbot-" + self.workername + "-" + self.random_name | ||
199 | self.pool_name = "buildbot-disks" | ||
200 | |||
201 | def reconfigService(self, *args, **kwargs): | ||
202 | if 'build_wait_timeout' not in kwargs: | ||
203 | kwargs['build_wait_timeout'] = 0 | ||
204 | return super().reconfigService(*args, **kwargs) | ||
205 | |||
206 | def canStartBuild(self): | ||
207 | if self.domain and not self.isConnected(): | ||
208 | log.msg( | ||
209 | "Not accepting builds as existing domain but worker not connected") | ||
210 | return False | ||
211 | |||
212 | return super().canStartBuild() | ||
213 | |||
214 | @defer.inlineCallbacks | ||
215 | def _prepare_image(self): | ||
216 | log.msg("Creating temporary image {}".format(self.volume_name)) | ||
217 | pool = yield self.connection.lookup_pool(self.pool_name) | ||
218 | vol_xml = """ | ||
219 | <volume type='file'> | ||
220 | <name>{vol_name}</name> | ||
221 | <capacity unit='G'>10</capacity> | ||
222 | <target> | ||
223 | <format type='qcow2'/> | ||
224 | <permissions> | ||
225 | <mode>0600</mode> | ||
226 | <owner>0</owner> | ||
227 | <group>0</group> | ||
228 | </permissions> | ||
229 | </target> | ||
230 | <backingStore> | ||
231 | <path>/etc/libvirtd/base-images/buildbot.qcow2</path> | ||
232 | <format type='qcow2'/> | ||
233 | </backingStore> | ||
234 | </volume> | ||
235 | """.format(vol_name = self.volume_name) | ||
236 | self.volume = yield pool.create_volume(vol_xml) | ||
237 | |||
238 | @defer.inlineCallbacks | ||
239 | def start_instance(self, build): | ||
240 | """ | ||
241 | I start a new instance of a VM. | ||
242 | |||
243 | If a base_image is specified, I will make a clone of that otherwise i will | ||
244 | use image directly. | ||
245 | |||
246 | If i'm not given libvirt domain definition XML, I will look for my name | ||
247 | in the list of defined virtual machines and start that. | ||
248 | """ | ||
249 | domain_xml = """ | ||
250 | <domain type="kvm"> | ||
251 | <name>{domain_name}</name> | ||
252 | <memory unit="GiB">2</memory> | ||
253 | <vcpu>1</vcpu> | ||
254 | <sysinfo type='smbios'> | ||
255 | <oemStrings> | ||
256 | <entry>buildbot_master_url={master_url}</entry> | ||
257 | <entry>buildbot_worker_name={worker_name}</entry> | ||
258 | </oemStrings> | ||
259 | </sysinfo> | ||
260 | <os> | ||
261 | <type arch="x86_64">hvm</type> | ||
262 | <smbios mode='sysinfo'/> | ||
263 | </os> | ||
264 | <devices> | ||
265 | <emulator>/run/current-system/sw/bin/qemu-system-x86_64</emulator> | ||
266 | <disk type="volume" device="disk"> | ||
267 | <driver name='qemu' type='qcow2' /> | ||
268 | <source type="volume" pool="{pool_name}" volume="{volume_name}" /> | ||
269 | <backingStore type='volume'> | ||
270 | <format type='qcow2'/> | ||
271 | <source type="volume" pool="niximages" volume="buildbot.qcow2" /> | ||
272 | </backingStore> | ||
273 | <target dev="vda" bus="virtio"/> | ||
274 | </disk> | ||
275 | <input type="keyboard" bus="usb"/> | ||
276 | <graphics type="vnc" port="-1" autoport="yes"/> | ||
277 | <interface type="network"> | ||
278 | <source network="immae" /> | ||
279 | </interface> | ||
280 | </devices> | ||
281 | </domain> | ||
282 | """.format(volume_name = self.volume_name, master_url = self.master_url, pool_name = | ||
283 | self.pool_name, domain_name = self.domain_name, worker_name = self.workername) | ||
284 | |||
285 | yield self._prepare_image() | ||
286 | |||
287 | try: | ||
288 | self.domain = yield self.connection.create(domain_xml) | ||
289 | except Exception: | ||
290 | log.err(failure.Failure(), | ||
291 | ("Cannot start a VM ({}), failing gracefully and triggering" | ||
292 | "a new build check").format(self.workername)) | ||
293 | self.domain = None | ||
294 | return False | ||
295 | |||
296 | return [self.domain_name] | ||
297 | |||
298 | def stop_instance(self, fast=False): | ||
299 | """ | ||
300 | I attempt to stop a running VM. | ||
301 | I make sure any connection to the worker is removed. | ||
302 | If the VM was using a cloned image, I remove the clone | ||
303 | When everything is tidied up, I ask that bbot looks for work to do | ||
304 | """ | ||
305 | |||
306 | log.msg("Attempting to stop '{}'".format(self.workername)) | ||
307 | if self.domain is None: | ||
308 | log.msg("I don't think that domain is even running, aborting") | ||
309 | return defer.succeed(None) | ||
310 | |||
311 | domain = self.domain | ||
312 | self.domain = None | ||
313 | |||
314 | d = domain.destroy() | ||
315 | if self.volume is not None: | ||
316 | self.volume.destroy() | ||
317 | |||
318 | return d | ||
diff --git a/modules/private/buildbot/common/master.cfg b/modules/private/buildbot/common/master.cfg deleted file mode 100644 index abe08e0..0000000 --- a/modules/private/buildbot/common/master.cfg +++ /dev/null | |||
@@ -1,69 +0,0 @@ | |||
1 | # -*- python -*- | ||
2 | # ex: set filetype=python: | ||
3 | |||
4 | from buildbot.plugins import secrets, util, webhooks | ||
5 | from buildbot.util import bytes2unicode | ||
6 | import re | ||
7 | import os | ||
8 | from buildbot_config import E, configure | ||
9 | import json | ||
10 | |||
11 | class CustomBase(webhooks.base): | ||
12 | def getChanges(self, request): | ||
13 | try: | ||
14 | content = request.content.read() | ||
15 | args = json.loads(bytes2unicode(content)) | ||
16 | except Exception as e: | ||
17 | raise ValueError("Error loading JSON: " + str(e)) | ||
18 | |||
19 | args.setdefault("comments", "") | ||
20 | args.setdefault("repository", "") | ||
21 | args.setdefault("author", args.get("who")) | ||
22 | |||
23 | return ([args], None) | ||
24 | |||
25 | userInfoProvider = util.LdapUserInfo( | ||
26 | uri=E.LDAP_URL, | ||
27 | bindUser=E.LDAP_ADMIN_USER, | ||
28 | bindPw=open(E.SECRETS_FILE + "/ldap", "r").read().rstrip(), | ||
29 | accountBase=E.LDAP_BASE, | ||
30 | accountPattern=E.LDAP_PATTERN, | ||
31 | accountFullName='cn', | ||
32 | accountEmail='mail', | ||
33 | avatarData="jpegPhoto", | ||
34 | groupBase=E.LDAP_BASE, | ||
35 | groupName="cn", | ||
36 | groupMemberPattern=E.LDAP_GROUP_PATTERN, | ||
37 | ) | ||
38 | |||
39 | c = BuildmasterConfig = { | ||
40 | "title": E.TITLE, | ||
41 | "titleURL": E.TITLE_URL, | ||
42 | "db": { | ||
43 | "db_url": "sqlite:///state.sqlite" | ||
44 | }, | ||
45 | "protocols": { "pb": { "port": E.PB_SOCKET } }, | ||
46 | "workers": [], | ||
47 | "change_source": [], | ||
48 | "schedulers": [], | ||
49 | "builders": [], | ||
50 | "services": [], | ||
51 | "secretsProviders": [ | ||
52 | secrets.SecretInAFile(E.SECRETS_FILE), | ||
53 | ], | ||
54 | "www": { | ||
55 | "change_hook_dialects": { "base": { "custom_class": CustomBase } }, | ||
56 | "plugins": { | ||
57 | "waterfall_view": {}, | ||
58 | "console_view": {}, | ||
59 | "grid_view": {}, | ||
60 | "buildslist": {}, | ||
61 | }, | ||
62 | "auth": util.RemoteUserAuth( | ||
63 | header=b"X-Remote-User", | ||
64 | userInfoProvider=userInfoProvider, | ||
65 | headerRegex=re.compile(br"(?P<username>[^ @]+)")), | ||
66 | } | ||
67 | } | ||
68 | |||
69 | configure(c) | ||
diff --git a/modules/private/buildbot/default.nix b/modules/private/buildbot/default.nix deleted file mode 100644 index ec28b63..0000000 --- a/modules/private/buildbot/default.nix +++ /dev/null | |||
@@ -1,244 +0,0 @@ | |||
1 | { lib, pkgs, config, ... }: | ||
2 | let | ||
3 | varDir = "/var/lib/buildbot"; | ||
4 | buildbot_common = pkgs.python3Packages.buildPythonPackage rec { | ||
5 | name = "buildbot_common"; | ||
6 | src = ./common; | ||
7 | format = "other"; | ||
8 | installPhase = '' | ||
9 | mkdir -p $out/${pkgs.python3.pythonForBuild.sitePackages} | ||
10 | cp -a $src $out/${pkgs.python3.pythonForBuild.sitePackages}/buildbot_common | ||
11 | ''; | ||
12 | }; | ||
13 | buildbot = pkgs.python3Packages.buildbot-full; | ||
14 | in | ||
15 | { | ||
16 | options = { | ||
17 | myServices.buildbot.enable = lib.mkOption { | ||
18 | type = lib.types.bool; | ||
19 | default = false; | ||
20 | description = '' | ||
21 | Whether to enable buildbot. | ||
22 | ''; | ||
23 | }; | ||
24 | }; | ||
25 | |||
26 | config = lib.mkIf config.myServices.buildbot.enable { | ||
27 | ids.uids.buildbot = config.myEnv.buildbot.user.uid; | ||
28 | ids.gids.buildbot = config.myEnv.buildbot.user.gid; | ||
29 | |||
30 | users.groups.buildbot.gid = config.ids.gids.buildbot; | ||
31 | users.users.buildbot = { | ||
32 | name = "buildbot"; | ||
33 | uid = config.ids.uids.buildbot; | ||
34 | group = "buildbot"; | ||
35 | description = "Buildbot user"; | ||
36 | home = varDir; | ||
37 | extraGroups = [ "keys" ]; | ||
38 | }; | ||
39 | |||
40 | services.websites.env.tools.watchPaths = lib.attrsets.mapAttrsToList | ||
41 | (k: project: config.secrets.fullPaths."buildbot/${project.name}/webhook-httpd-include") | ||
42 | config.myEnv.buildbot.projects; | ||
43 | |||
44 | services.websites.env.tools.vhostConfs.git.extraConfig = lib.attrsets.mapAttrsToList (k: project: '' | ||
45 | RedirectMatch permanent "^/buildbot/${project.name}$" "/buildbot/${project.name}/" | ||
46 | RewriteEngine On | ||
47 | RewriteRule ^/buildbot/${project.name}/ws(.*)$ unix:///run/buildbot/${project.name}.sock|ws://git.immae.eu/ws$1 [P,NE,QSA,L] | ||
48 | ProxyPass /buildbot/${project.name}/ unix:///run/buildbot/${project.name}.sock|http://${project.name}-git.immae.eu/ | ||
49 | ProxyPassReverse /buildbot/${project.name}/ unix:///run/buildbot/${project.name}.sock|http://${project.name}-git.immae.eu/ | ||
50 | <Location /buildbot/${project.name}/> | ||
51 | Use LDAPConnect | ||
52 | Require ldap-group cn=users,ou=${project.name},cn=buildbot,ou=services,dc=immae,dc=eu | ||
53 | |||
54 | SetEnvIf X-Url-Scheme https HTTPS=1 | ||
55 | ProxyPreserveHost On | ||
56 | </Location> | ||
57 | <Location /buildbot/${project.name}/change_hook/base> | ||
58 | <RequireAny> | ||
59 | Require local | ||
60 | Require ldap-group cn=users,ou=${project.name},cn=buildbot,ou=services,dc=immae,dc=eu | ||
61 | Include ${config.secrets.fullPaths."buildbot/${project.name}/webhook-httpd-include"} | ||
62 | </RequireAny> | ||
63 | </Location> | ||
64 | '') config.myEnv.buildbot.projects; | ||
65 | |||
66 | system.activationScripts = lib.attrsets.mapAttrs' (k: project: lib.attrsets.nameValuePair "buildbot-${project.name}" { | ||
67 | deps = [ "users" "wrappers" ]; | ||
68 | text = '' | ||
69 | install -m 755 -o buildbot -g buildbot -d ${varDir}/${project.name} | ||
70 | |||
71 | ${project.activationScript} | ||
72 | ''; | ||
73 | }) config.myEnv.buildbot.projects; | ||
74 | |||
75 | secrets.keys = lib.listToAttrs ( | ||
76 | lib.lists.flatten ( | ||
77 | lib.attrsets.mapAttrsToList (k: project: | ||
78 | lib.attrsets.mapAttrsToList (k: v: | ||
79 | (lib.nameValuePair "buildbot/${project.name}/${k}" { | ||
80 | permissions = "0600"; | ||
81 | user = "buildbot"; | ||
82 | group = "buildbot"; | ||
83 | text = v; | ||
84 | }) | ||
85 | ) project.secrets | ||
86 | ++ [ | ||
87 | (lib.nameValuePair "buildbot/${project.name}/webhook-httpd-include" { | ||
88 | permissions = "0600"; | ||
89 | user = "wwwrun"; | ||
90 | group = "wwwrun"; | ||
91 | text = lib.optionalString (project.webhookTokens != null) '' | ||
92 | Require expr "req('Access-Key') in { ${builtins.concatStringsSep ", " (map (x: "'${x}'") project.webhookTokens)} }" | ||
93 | ''; | ||
94 | }) | ||
95 | (lib.nameValuePair "buildbot/${project.name}/environment_file" { | ||
96 | permissions = "0600"; | ||
97 | user = "buildbot"; | ||
98 | group = "buildbot"; | ||
99 | text = let | ||
100 | project_env = with lib.attrsets; | ||
101 | mapAttrs' (k: v: nameValuePair "BUILDBOT_${k}" v) project.environment // | ||
102 | mapAttrs' (k: v: nameValuePair "BUILDBOT_PATH_${k}" (v pkgs)) (attrByPath ["builderPaths"] {} project) // | ||
103 | { | ||
104 | BUILDBOT_PROJECT_DIR = ./projects + "/${project.name}"; | ||
105 | BUILDBOT_WORKER_PORT = builtins.toString project.workerPort; | ||
106 | BUILDBOT_HOST = config.hostEnv.fqdn; | ||
107 | BUILDBOT_VIRT_URL = "qemu+ssh://libvirt@dilion.immae.eu/system"; | ||
108 | }; | ||
109 | in builtins.concatStringsSep "\n" | ||
110 | (lib.mapAttrsToList (envK: envV: "${envK}=${envV}") project_env); | ||
111 | }) | ||
112 | ] | ||
113 | ) config.myEnv.buildbot.projects | ||
114 | ) | ||
115 | ) // { | ||
116 | "buildbot/ldap" = { | ||
117 | permissions = "0600"; | ||
118 | user = "buildbot"; | ||
119 | group = "buildbot"; | ||
120 | text = config.myEnv.buildbot.ldap.password; | ||
121 | }; | ||
122 | "buildbot/worker_password" = { | ||
123 | permissions = "0600"; | ||
124 | user = "buildbot"; | ||
125 | group = "buildbot"; | ||
126 | text = config.myEnv.buildbot.workerPassword; | ||
127 | }; | ||
128 | "buildbot/ssh_key" = { | ||
129 | permissions = "0600"; | ||
130 | user = "buildbot"; | ||
131 | group = "buildbot"; | ||
132 | text = config.myEnv.buildbot.ssh_key.private; | ||
133 | }; | ||
134 | }; | ||
135 | |||
136 | services.filesWatcher = lib.attrsets.mapAttrs' (k: project: lib.attrsets.nameValuePair "buildbot-${project.name}" { | ||
137 | restart = true; | ||
138 | paths = [ | ||
139 | config.secrets.fullPaths."buildbot/ldap" | ||
140 | config.secrets.fullPaths."buildbot/worker_password" | ||
141 | config.secrets.fullPaths."buildbot/ssh_key" | ||
142 | config.secrets.fullPaths."buildbot/${project.name}/environment_file" | ||
143 | ] ++ lib.attrsets.mapAttrsToList (k: v: config.secrets.fullPaths."buildbot/${project.name}/${k}") project.secrets; | ||
144 | }) config.myEnv.buildbot.projects; | ||
145 | |||
146 | systemd.slices.buildbot = { | ||
147 | description = "buildbot slice"; | ||
148 | }; | ||
149 | |||
150 | networking.firewall.allowedTCPPorts = lib.attrsets.mapAttrsToList (k: v: v.workerPort) config.myEnv.buildbot.projects; | ||
151 | systemd.services = lib.attrsets.mapAttrs' (k: project: lib.attrsets.nameValuePair "buildbot-${project.name}" { | ||
152 | description = "Buildbot Continuous Integration Server ${project.name}."; | ||
153 | after = [ "network-online.target" ]; | ||
154 | wantedBy = [ "multi-user.target" ]; | ||
155 | path = project.packages pkgs ++ (project.pythonPackages buildbot.pythonModule pkgs); | ||
156 | preStart = let | ||
157 | master-cfg = "${buildbot_common}/${pkgs.python3.pythonForBuild.sitePackages}/buildbot_common/master.cfg"; | ||
158 | tac_file = pkgs.writeText "buildbot.tac" '' | ||
159 | import os | ||
160 | |||
161 | from twisted.application import service | ||
162 | from buildbot.master import BuildMaster | ||
163 | |||
164 | basedir = '${varDir}/${project.name}' | ||
165 | rotateLength = 10000000 | ||
166 | maxRotatedFiles = 10 | ||
167 | configfile = '${master-cfg}' | ||
168 | |||
169 | # Default umask for server | ||
170 | umask = None | ||
171 | |||
172 | # if this is a relocatable tac file, get the directory containing the TAC | ||
173 | if basedir == '.': | ||
174 | import os | ||
175 | basedir = os.path.abspath(os.path.dirname(__file__)) | ||
176 | |||
177 | # note: this line is matched against to check that this is a buildmaster | ||
178 | # directory; do not edit it. | ||
179 | application = service.Application('buildmaster') | ||
180 | from twisted.python.logfile import LogFile | ||
181 | from twisted.python.log import ILogObserver, FileLogObserver | ||
182 | logfile = LogFile.fromFullPath(os.path.join(basedir, "twistd.log"), rotateLength=rotateLength, | ||
183 | maxRotatedFiles=maxRotatedFiles) | ||
184 | application.setComponent(ILogObserver, FileLogObserver(logfile).emit) | ||
185 | |||
186 | m = BuildMaster(basedir, configfile, umask) | ||
187 | m.setServiceParent(application) | ||
188 | m.log_rotation.rotateLength = rotateLength | ||
189 | m.log_rotation.maxRotatedFiles = maxRotatedFiles | ||
190 | ''; | ||
191 | in '' | ||
192 | if [ ! -f ${varDir}/${project.name}/buildbot.tac ]; then | ||
193 | ${buildbot}/bin/buildbot create-master -c "${master-cfg}" "${varDir}/${project.name}" | ||
194 | rm -f ${varDir}/${project.name}/master.cfg.sample | ||
195 | rm -f ${varDir}/${project.name}/buildbot.tac | ||
196 | fi | ||
197 | ln -sf ${tac_file} ${varDir}/${project.name}/buildbot.tac | ||
198 | # different buildbots may be trying that simultaneously, add the || true to avoid complaining in case of race | ||
199 | install -Dm600 -o buildbot -g buildbot -T ${config.secrets.fullPaths."buildbot/ssh_key"} ${varDir}/buildbot_key || true | ||
200 | buildbot_secrets=${varDir}/${project.name}/secrets | ||
201 | install -m 0700 -o buildbot -g buildbot -d $buildbot_secrets | ||
202 | install -Dm600 -o buildbot -g buildbot -T ${config.secrets.fullPaths."buildbot/ldap"} $buildbot_secrets/ldap | ||
203 | install -Dm600 -o buildbot -g buildbot -T ${config.secrets.fullPaths."buildbot/worker_password"} $buildbot_secrets/worker_password | ||
204 | ${builtins.concatStringsSep "\n" (lib.attrsets.mapAttrsToList | ||
205 | (k: v: "install -Dm600 -o buildbot -g buildbot -T ${config.secrets.fullPaths."buildbot/${project.name}/${k}"} $buildbot_secrets/${k}") project.secrets | ||
206 | )} | ||
207 | ${buildbot}/bin/buildbot upgrade-master ${varDir}/${project.name} | ||
208 | ''; | ||
209 | environment = let | ||
210 | buildbot_config = pkgs.python3Packages.buildPythonPackage (rec { | ||
211 | name = "buildbot_config-${project.name}"; | ||
212 | src = ./projects + "/${project.name}"; | ||
213 | format = "other"; | ||
214 | installPhase = '' | ||
215 | mkdir -p $out/${pkgs.python3.pythonForBuild.sitePackages} | ||
216 | cp -a $src $out/${pkgs.python3.pythonForBuild.sitePackages}/buildbot_config | ||
217 | ''; | ||
218 | }); | ||
219 | HOME = "${varDir}/${project.name}"; | ||
220 | PYTHONPATH = "${buildbot.pythonModule.withPackages (self: project.pythonPackages self pkgs ++ [ | ||
221 | pkgs.python3Packages.libvirt | ||
222 | pkgs.python3Packages.wokkel | ||
223 | pkgs.python3Packages.treq pkgs.python3Packages.ldap3 buildbot | ||
224 | pkgs.python3Packages.buildbot-worker | ||
225 | buildbot_common buildbot_config | ||
226 | ])}/${buildbot.pythonModule.sitePackages}${if project.pythonPathHome then ":${varDir}/${project.name}/.local/${pkgs.python3.pythonForBuild.sitePackages}" else ""}"; | ||
227 | in { inherit PYTHONPATH HOME; }; | ||
228 | |||
229 | serviceConfig = { | ||
230 | Slice = "buildbot.slice"; | ||
231 | Type = "forking"; | ||
232 | User = "buildbot"; | ||
233 | Group = "buildbot"; | ||
234 | RuntimeDirectory = "buildbot"; | ||
235 | RuntimeDirectoryPreserve = "yes"; | ||
236 | StateDirectory = "buildbot"; | ||
237 | SupplementaryGroups = "keys"; | ||
238 | WorkingDirectory = "${varDir}/${project.name}"; | ||
239 | ExecStart = "${buildbot}/bin/buildbot start"; | ||
240 | EnvironmentFile = config.secrets.fullPaths."buildbot/${project.name}/environment_file"; | ||
241 | }; | ||
242 | }) config.myEnv.buildbot.projects; | ||
243 | }; | ||
244 | } | ||
diff --git a/modules/private/buildbot/projects/caldance/__init__.py b/modules/private/buildbot/projects/caldance/__init__.py deleted file mode 100644 index 2074d9e..0000000 --- a/modules/private/buildbot/projects/caldance/__init__.py +++ /dev/null | |||
@@ -1,198 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import os | ||
4 | from buildbot.util import bytes2unicode | ||
5 | import json | ||
6 | |||
7 | __all__ = [ "configure", "E" ] | ||
8 | |||
9 | class E(): | ||
10 | PROJECT = "caldance" | ||
11 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
12 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
13 | PB_SOCKET = "unix:address=/run/buildbot/{}_pb.sock".format(PROJECT) | ||
14 | RELEASE_PATH = "/var/lib/ftp/release.immae.eu/{}".format(PROJECT) | ||
15 | RELEASE_URL = "https://release.immae.eu/{}".format(PROJECT) | ||
16 | GIT_URL = "gitolite@git.immae.eu:perso/simon_descarpentries/www.cal-dance.com" | ||
17 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
18 | SSH_HOST_KEY = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIFbhFTl2A2RJn5L51yxJM4XfCS2ZaiSX/jo9jFSdghF" | ||
19 | LDAP_HOST = "ldap.immae.eu" | ||
20 | LDAP_DN = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
21 | LDAP_ROLES_BASE = "ou=roles,ou=hosts,dc=immae,dc=eu" | ||
22 | XMPP_RECIPIENTS = os.environ["BUILDBOT_XMPP_RECIPIENTS"].split(" ") | ||
23 | |||
24 | PUPPET_HOST = { | ||
25 | "integration": [ "-p8022", "root@caldance.cs.immae.dev"], | ||
26 | } | ||
27 | |||
28 | # master.cfg | ||
29 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
30 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
31 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
32 | LDAP_BASE = "dc=immae,dc=eu" | ||
33 | LDAP_PATTERN = "(uid=%(username)s)" | ||
34 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=caldance,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
35 | TITLE_URL = "https://caldance.cs.immae.dev" | ||
36 | TITLE = "Caldance" | ||
37 | |||
38 | class CustomBase(webhooks.base): | ||
39 | def getChanges(self, request): | ||
40 | try: | ||
41 | content = request.content.read() | ||
42 | args = json.loads(bytes2unicode(content)) | ||
43 | except Exception as e: | ||
44 | raise ValueError("Error loading JSON: " + str(e)) | ||
45 | |||
46 | args.setdefault("comments", "") | ||
47 | args.setdefault("repository", "") | ||
48 | args.setdefault("author", args.get("who", "unknown")) | ||
49 | |||
50 | if args["category"] == "deploy_webhook": | ||
51 | args = { | ||
52 | "category": "deploy_webhook", | ||
53 | "comments": "", | ||
54 | "repository": "", | ||
55 | "author": "webhook", | ||
56 | "project": "Caldance", | ||
57 | "properties": { | ||
58 | "environment": args.get("environment", "integration"), | ||
59 | "build": "caldance_{}.tar.gz".format(args.get("build", "master")) | ||
60 | } | ||
61 | } | ||
62 | |||
63 | return ([args], None) | ||
64 | |||
65 | def deploy_hook_scheduler(project, timer=1): | ||
66 | return schedulers.AnyBranchScheduler( | ||
67 | change_filter=util.ChangeFilter(category="deploy_webhook", project=project), | ||
68 | name="{}_deploy".format(project), treeStableTimer=timer, builderNames=["{}_deploy".format(project)]) | ||
69 | |||
70 | def configure(c): | ||
71 | c["buildbotURL"] = E.BUILDBOT_URL | ||
72 | c["www"]["port"] = E.SOCKET | ||
73 | |||
74 | c["www"]["change_hook_dialects"]["base"] = { | ||
75 | "custom_class": CustomBase | ||
76 | } | ||
77 | c['workers'].append(worker.LocalWorker("generic-worker")) | ||
78 | c['workers'].append(worker.LocalWorker("deploy-worker")) | ||
79 | |||
80 | db_lock = util.MasterLock("deploy_after_build") | ||
81 | |||
82 | c['schedulers'].append(hook_scheduler("Caldance", timer=1)) | ||
83 | c['schedulers'].append(force_scheduler("force_caldance", ["Caldance_build"])) | ||
84 | c['schedulers'].append(deploy_scheduler("deploy_caldance", ["Caldance_deploy"])) | ||
85 | c['schedulers'].append(deploy_hook_scheduler("Caldance", timer=1)) | ||
86 | |||
87 | c['builders'].append(factory("caldance", locks=[db_lock.access('exclusive')])) | ||
88 | |||
89 | c['builders'].append(deploy_factory("caldance", locks=[db_lock.access('exclusive')])) | ||
90 | |||
91 | c['services'].append(SlackStatusPush( | ||
92 | name="slack_status_caldance", | ||
93 | builders=["Caldance_build", "Caldance_deploy"], | ||
94 | serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip())) | ||
95 | c['services'].append(XMPPStatusPush( | ||
96 | name="xmpp_status_caldance", | ||
97 | builders=["Caldance_build", "Caldance_deploy"], | ||
98 | recipients=E.XMPP_RECIPIENTS, | ||
99 | password=open(E.SECRETS_FILE + "/notify_xmpp_password", "r").read().rstrip())) | ||
100 | |||
101 | def factory(project, locks=[], ignore_fails=False): | ||
102 | release_file = "{1}/{0}_%(kw:clean_branch)s.tar.gz" | ||
103 | |||
104 | package = util.Interpolate("{0}_%(kw:clean_branch)s.tar.gz".format(project), clean_branch=clean_branch) | ||
105 | package_dest = util.Interpolate(release_file.format(project, E.RELEASE_PATH), clean_branch=clean_branch) | ||
106 | package_url = util.Interpolate(release_file.format(project, E.RELEASE_URL), clean_branch=clean_branch) | ||
107 | |||
108 | factory = util.BuildFactory() | ||
109 | factory.addStep(steps.Git(logEnviron=False, repourl=E.GIT_URL, | ||
110 | sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
111 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
112 | factory.addSteps(package_and_upload(package, package_dest, package_url)) | ||
113 | |||
114 | return util.BuilderConfig( | ||
115 | name="{}_build".format(project.capitalize()), | ||
116 | locks=locks, | ||
117 | workernames=["generic-worker"], factory=factory) | ||
118 | |||
119 | def compute_build_infos(project): | ||
120 | @util.renderer | ||
121 | def compute(props): | ||
122 | import re, hashlib | ||
123 | build_file = props.getProperty("build") | ||
124 | package_dest = "{1}/{0}".format(build_file, E.RELEASE_PATH) | ||
125 | version = re.match(r"{0}_(.*).tar.gz".format(project), build_file).group(1) | ||
126 | with open(package_dest, "rb") as f: | ||
127 | sha = hashlib.sha256(f.read()).hexdigest() | ||
128 | return { | ||
129 | "build_version": version, | ||
130 | "build_hash": sha, | ||
131 | } | ||
132 | return compute | ||
133 | |||
134 | @util.renderer | ||
135 | def puppet_ssh_command(props): | ||
136 | environment = props["environment"] if props.hasProperty("environment") else "integration" | ||
137 | ssh_command = [ | ||
138 | "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no", | ||
139 | "-i", E.SSH_KEY_PATH ] | ||
140 | return ssh_command + E.PUPPET_HOST.get(environment, ["host.invalid"]) | ||
141 | |||
142 | def deploy_factory(project, locks=[]): | ||
143 | package_dest = util.Interpolate("{0}/%(prop:build)s".format(E.RELEASE_PATH)) | ||
144 | |||
145 | factory = util.BuildFactory() | ||
146 | factory.addStep(steps.MasterShellCommand(command=["test", "-f", package_dest])) | ||
147 | factory.addStep(steps.SetProperties(properties=compute_build_infos(project))) | ||
148 | factory.addStep(LdapPush(environment=util.Property("environment"), | ||
149 | project=project, build_version=util.Property("build_version"), | ||
150 | build_hash=util.Property("build_hash"), ldap_password=util.Secret("ldap"))) | ||
151 | factory.addStep(steps.MasterShellCommand(command=puppet_ssh_command)) | ||
152 | return util.BuilderConfig( | ||
153 | name="{}_deploy".format(project.capitalize()), | ||
154 | locks=locks, | ||
155 | workernames=["deploy-worker"], factory=factory) | ||
156 | |||
157 | from twisted.internet import defer | ||
158 | from buildbot.process.buildstep import FAILURE | ||
159 | from buildbot.process.buildstep import SUCCESS | ||
160 | from buildbot.process.buildstep import BuildStep | ||
161 | |||
162 | class LdapPush(BuildStep): | ||
163 | name = "LdapPush" | ||
164 | renderables = ["environment", "project", "build_version", "build_hash", "ldap_password"] | ||
165 | |||
166 | def __init__(self, **kwargs): | ||
167 | self.environment = kwargs.pop("environment") | ||
168 | self.project = kwargs.pop("project") | ||
169 | self.build_version = kwargs.pop("build_version") | ||
170 | self.build_hash = kwargs.pop("build_hash") | ||
171 | self.ldap_password = kwargs.pop("ldap_password") | ||
172 | self.ldap_host = kwargs.pop("ldap_host", E.LDAP_HOST) | ||
173 | super().__init__(**kwargs) | ||
174 | |||
175 | def run(self): | ||
176 | import json | ||
177 | from ldap3 import Reader, Writer, Server, Connection, ObjectDef | ||
178 | server = Server(self.ldap_host) | ||
179 | conn = Connection(server, | ||
180 | user=E.LDAP_DN, | ||
181 | password=self.ldap_password) | ||
182 | conn.bind() | ||
183 | obj = ObjectDef("immaePuppetClass", conn) | ||
184 | r = Reader(conn, obj, | ||
185 | "cn=caldance.{},{}".format(self.environment, E.LDAP_ROLES_BASE)) | ||
186 | r.search() | ||
187 | if len(r) > 0: | ||
188 | w = Writer.from_cursor(r) | ||
189 | for value in w[0].immaePuppetJson.values: | ||
190 | config = json.loads(value) | ||
191 | if "role::caldance::{}_version".format(self.project) in config: | ||
192 | config["role::caldance::{}_version".format(self.project)] = self.build_version | ||
193 | config["role::caldance::{}_sha256".format(self.project)] = self.build_hash | ||
194 | w[0].immaePuppetJson -= value | ||
195 | w[0].immaePuppetJson += json.dumps(config, indent=" ") | ||
196 | w.commit() | ||
197 | return defer.succeed(SUCCESS) | ||
198 | return defer.succeed(FAILURE) | ||
diff --git a/modules/private/buildbot/projects/cryptoportfolio/__init__.py b/modules/private/buildbot/projects/cryptoportfolio/__init__.py deleted file mode 100644 index 5d70f95..0000000 --- a/modules/private/buildbot/projects/cryptoportfolio/__init__.py +++ /dev/null | |||
@@ -1,169 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import os | ||
4 | |||
5 | __all__ = [ "configure", "E" ] | ||
6 | |||
7 | class E(): | ||
8 | PROJECT = "cryptoportfolio" | ||
9 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
10 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
11 | PB_SOCKET = "unix:address=/run/buildbot/{}_pb.sock".format(PROJECT) | ||
12 | RELEASE_PATH = "/var/lib/ftp/release.immae.eu/{}".format(PROJECT) | ||
13 | RELEASE_URL = "https://release.immae.eu/{}".format(PROJECT) | ||
14 | GIT_URL = "https://git.immae.eu/perso/Immae/Projets/Cryptomonnaies/Cryptoportfolio/{0}.git" | ||
15 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
16 | LDAP_HOST = "ldap.immae.eu" | ||
17 | LDAP_DN = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
18 | LDAP_ROLES_BASE = "ou=roles,ou=hosts,dc=immae,dc=eu" | ||
19 | |||
20 | PUPPET_HOST = { | ||
21 | "production": "root@cryptoportfolio.immae.eu", | ||
22 | "integration": "root@cryptoportfolio-dev.immae.eu" | ||
23 | } | ||
24 | |||
25 | # master.cfg | ||
26 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
27 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
28 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
29 | LDAP_BASE = "dc=immae,dc=eu" | ||
30 | LDAP_PATTERN = "(uid=%(username)s)" | ||
31 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=cryptoportfolio,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
32 | TITLE_URL = "https://git.immae.eu" | ||
33 | TITLE = "Cryptoportfolio" | ||
34 | |||
35 | # eval .. dans .zshrc_local | ||
36 | # mkdir -p $BUILD/go | ||
37 | # export GOPATH=$BUILD/go | ||
38 | # go get -u github.com/golang/dep/cmd/dep | ||
39 | # export PATH=$PATH:$BUILD/go/bin | ||
40 | # go get git.immae.eu/Cryptoportfolio/Front.git | ||
41 | # cd $BUILD/go/src/git.immae.eu/Cryptoportfolio/Front.git | ||
42 | # git checkout dev | ||
43 | # dep ensure | ||
44 | def configure(c): | ||
45 | c["buildbotURL"] = E.BUILDBOT_URL | ||
46 | c["www"]["port"] = E.SOCKET | ||
47 | |||
48 | c['workers'].append(worker.LocalWorker("generic-worker")) | ||
49 | c['workers'].append(worker.LocalWorker("deploy-worker")) | ||
50 | |||
51 | c['schedulers'].append(hook_scheduler("Trader")) | ||
52 | c['schedulers'].append(hook_scheduler("Front")) | ||
53 | c['schedulers'].append(force_scheduler( | ||
54 | "force_cryptoportfolio", ["Trader_build", "Front_build"])) | ||
55 | c['schedulers'].append(deploy_scheduler("deploy_cryptoportfolio", | ||
56 | ["Trader_deploy", "Front_deploy"])) | ||
57 | |||
58 | c['builders'].append(factory("trader")) | ||
59 | c['builders'].append(factory("front", ignore_fails=True)) | ||
60 | |||
61 | c['builders'].append(deploy_factory("trader")) | ||
62 | c['builders'].append(deploy_factory("front")) | ||
63 | |||
64 | c['services'].append(SlackStatusPush( | ||
65 | name="slack_status_cryptoportfolio", | ||
66 | builders=["Front_build", "Trader_build", "Front_deploy", "Trader_deploy"], | ||
67 | serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip())) | ||
68 | |||
69 | def factory(project, ignore_fails=False): | ||
70 | release_file = "{1}/{0}/{0}_%(kw:clean_branch)s.tar.gz" | ||
71 | |||
72 | url = E.GIT_URL.format(project.capitalize()) | ||
73 | |||
74 | package = util.Interpolate("{0}_%(kw:clean_branch)s.tar.gz".format(project), clean_branch=clean_branch) | ||
75 | package_dest = util.Interpolate(release_file.format(project, E.RELEASE_PATH), clean_branch=clean_branch) | ||
76 | package_url = util.Interpolate(release_file.format(project, E.RELEASE_URL), clean_branch=clean_branch) | ||
77 | |||
78 | factory = util.BuildFactory() | ||
79 | factory.addStep(steps.Git(logEnviron=False, repourl=url, | ||
80 | mode="full", method="copy")) | ||
81 | factory.addStep(steps.ShellCommand(name="make install", | ||
82 | logEnviron=False, haltOnFailure=(not ignore_fails), | ||
83 | warnOnFailure=ignore_fails, flunkOnFailure=(not ignore_fails), | ||
84 | command=["make", "install"])) | ||
85 | factory.addStep(steps.ShellCommand(name="make test", | ||
86 | logEnviron=False, haltOnFailure=(not ignore_fails), | ||
87 | warnOnFailure=ignore_fails, flunkOnFailure=(not ignore_fails), | ||
88 | command=["make", "test"])) | ||
89 | factory.addSteps(package_and_upload(package, package_dest, package_url)) | ||
90 | |||
91 | return util.BuilderConfig( | ||
92 | name="{}_build".format(project.capitalize()), | ||
93 | workernames=["generic-worker"], factory=factory) | ||
94 | |||
95 | def compute_build_infos(project): | ||
96 | @util.renderer | ||
97 | def compute(props): | ||
98 | import re, hashlib | ||
99 | build_file = props.getProperty("build") | ||
100 | package_dest = "{2}/{0}/{1}".format(project, build_file, E.RELEASE_PATH) | ||
101 | version = re.match(r"{0}_(.*).tar.gz".format(project), build_file).group(1) | ||
102 | with open(package_dest, "rb") as f: | ||
103 | sha = hashlib.sha256(f.read()).hexdigest() | ||
104 | return { | ||
105 | "build_version": version, | ||
106 | "build_hash": sha, | ||
107 | } | ||
108 | return compute | ||
109 | |||
110 | @util.renderer | ||
111 | def puppet_host(props): | ||
112 | environment = props["environment"] if props.hasProperty("environment") else "integration" | ||
113 | return E.PUPPET_HOST.get(environment, "host.invalid") | ||
114 | |||
115 | def deploy_factory(project): | ||
116 | package_dest = util.Interpolate("{1}/{0}/%(prop:build)s".format(project, E.RELEASE_PATH)) | ||
117 | |||
118 | factory = util.BuildFactory() | ||
119 | factory.addStep(steps.MasterShellCommand(command=["test", "-f", package_dest])) | ||
120 | factory.addStep(steps.SetProperties(properties=compute_build_infos(project))) | ||
121 | factory.addStep(LdapPush(environment=util.Property("environment"), | ||
122 | project=project, build_version=util.Property("build_version"), | ||
123 | build_hash=util.Property("build_hash"), ldap_password=util.Secret("ldap"))) | ||
124 | factory.addStep(steps.MasterShellCommand(command=[ | ||
125 | "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no", "-i", E.SSH_KEY_PATH, puppet_host])) | ||
126 | return util.BuilderConfig(name="{}_deploy".format(project.capitalize()), workernames=["deploy-worker"], factory=factory) | ||
127 | |||
128 | from twisted.internet import defer | ||
129 | from buildbot.process.buildstep import FAILURE | ||
130 | from buildbot.process.buildstep import SUCCESS | ||
131 | from buildbot.process.buildstep import BuildStep | ||
132 | |||
133 | class LdapPush(BuildStep): | ||
134 | name = "LdapPush" | ||
135 | renderables = ["environment", "project", "build_version", "build_hash", "ldap_password"] | ||
136 | |||
137 | def __init__(self, **kwargs): | ||
138 | self.environment = kwargs.pop("environment") | ||
139 | self.project = kwargs.pop("project") | ||
140 | self.build_version = kwargs.pop("build_version") | ||
141 | self.build_hash = kwargs.pop("build_hash") | ||
142 | self.ldap_password = kwargs.pop("ldap_password") | ||
143 | self.ldap_host = kwargs.pop("ldap_host", E.LDAP_HOST) | ||
144 | super().__init__(**kwargs) | ||
145 | |||
146 | def run(self): | ||
147 | import json | ||
148 | from ldap3 import Reader, Writer, Server, Connection, ObjectDef | ||
149 | server = Server(self.ldap_host) | ||
150 | conn = Connection(server, | ||
151 | user=E.LDAP_DN, | ||
152 | password=self.ldap_password) | ||
153 | conn.bind() | ||
154 | obj = ObjectDef("immaePuppetClass", conn) | ||
155 | r = Reader(conn, obj, | ||
156 | "cn=cryptoportfolio.{},{}".format(self.environment, E.LDAP_ROLES_BASE)) | ||
157 | r.search() | ||
158 | if len(r) > 0: | ||
159 | w = Writer.from_cursor(r) | ||
160 | for value in w[0].immaePuppetJson.values: | ||
161 | config = json.loads(value) | ||
162 | if "role::cryptoportfolio::{}_version".format(self.project) in config: | ||
163 | config["role::cryptoportfolio::{}_version".format(self.project)] = self.build_version | ||
164 | config["role::cryptoportfolio::{}_sha256".format(self.project)] = self.build_hash | ||
165 | w[0].immaePuppetJson -= value | ||
166 | w[0].immaePuppetJson += json.dumps(config, indent=" ") | ||
167 | w.commit() | ||
168 | return defer.succeed(SUCCESS) | ||
169 | return defer.succeed(FAILURE) | ||
diff --git a/modules/private/buildbot/projects/denise/__init__.py b/modules/private/buildbot/projects/denise/__init__.py deleted file mode 100644 index abeba3c..0000000 --- a/modules/private/buildbot/projects/denise/__init__.py +++ /dev/null | |||
@@ -1,186 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import os | ||
4 | from buildbot.util import bytes2unicode | ||
5 | import json | ||
6 | |||
7 | __all__ = [ "configure", "E" ] | ||
8 | |||
9 | class E(): | ||
10 | PROJECT = "denise" | ||
11 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
12 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
13 | PB_SOCKET = "unix:address=/run/buildbot/{}_pb.sock".format(PROJECT) | ||
14 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
15 | SSH_HOST_KEY = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIFbhFTl2A2RJn5L51yxJM4XfCS2ZaiSX/jo9jFSdghF" | ||
16 | |||
17 | BINGO_RELEASE_PATH = "/var/lib/buildbot/outputs/denise/bingo" | ||
18 | BINGO_BETA_PATH = "/var/lib/buildbot/outputs/denise/bingo_beta" | ||
19 | BINGO_GIT_URL = "gitolite@git.immae.eu:perso/Denise/bingo" | ||
20 | |||
21 | OMS_RELEASE_PATH = "/var/lib/buildbot/outputs/denise/oms" | ||
22 | OMS_BETA_PATH = "/var/lib/buildbot/outputs/denise/oms_beta" | ||
23 | OMS_GIT_URL = "gitolite@git.immae.eu:perso/Denise/oms" | ||
24 | |||
25 | AVENTURIERS_RELEASE_PATH = "/var/lib/buildbot/outputs/denise/aventuriers" | ||
26 | AVENTURIERS_GIT_URL = "https://git.immae.eu/perso/Denise/aventuriers.git" | ||
27 | |||
28 | # master.cfg | ||
29 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
30 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
31 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
32 | LDAP_BASE = "dc=immae,dc=eu" | ||
33 | LDAP_PATTERN = "(uid=%(username)s)" | ||
34 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=denise,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
35 | TITLE_URL = "https://oms.syanni.eu" | ||
36 | TITLE = "Syanni website" | ||
37 | |||
38 | class CustomBase(webhooks.base): | ||
39 | def getChanges(self, request): | ||
40 | try: | ||
41 | content = request.content.read() | ||
42 | args = json.loads(bytes2unicode(content)) | ||
43 | except Exception as e: | ||
44 | raise ValueError("Error loading JSON: " + str(e)) | ||
45 | |||
46 | args.setdefault("comments", "") | ||
47 | args.setdefault("repository", "") | ||
48 | args.setdefault("author", args.get("who", "unknown")) | ||
49 | |||
50 | return ([args], None) | ||
51 | |||
52 | def configure(c): | ||
53 | c["buildbotURL"] = E.BUILDBOT_URL | ||
54 | c["www"]["port"] = E.SOCKET | ||
55 | |||
56 | c["www"]["change_hook_dialects"]["base"] = { | ||
57 | "custom_class": CustomBase | ||
58 | } | ||
59 | c['workers'].append(worker.LocalWorker("generic-worker-denise")) | ||
60 | |||
61 | c['schedulers'].append(hook_scheduler("DeniseBingo", timer=1)) | ||
62 | c['schedulers'].append(hook_scheduler("DeniseOMS", timer=1)) | ||
63 | c['schedulers'].append(hook_scheduler("DeniseAventuriers", timer=1)) | ||
64 | c['schedulers'].append(force_scheduler("force_denise", [ | ||
65 | "DeniseBingo_build", "DeniseOMS_build", "DeniseAventuriers_build" | ||
66 | ])) | ||
67 | |||
68 | c['builders'].append(denise_oms_factory()) | ||
69 | c['builders'].append(denise_bingo_factory()) | ||
70 | c['builders'].append(denise_aventuriers_factory()) | ||
71 | |||
72 | def denise_bingo_factory(): | ||
73 | @util.renderer | ||
74 | def bingo_run_path(props): | ||
75 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
76 | return "/run/denise_bingo/gunicorn.pid" | ||
77 | else: | ||
78 | return "/run/denise_bingo_beta/gunicorn.pid" | ||
79 | |||
80 | @util.renderer | ||
81 | def bingo_systemd_service(props): | ||
82 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
83 | return "denise-bingo" | ||
84 | else: | ||
85 | return "denise-bingo-beta" | ||
86 | |||
87 | @util.renderer | ||
88 | def bingo_url(props): | ||
89 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
90 | return "https://bingo.syanni.eu" | ||
91 | else: | ||
92 | return "https://beta.bingo.syanni.eu" | ||
93 | |||
94 | @util.renderer | ||
95 | def bingo_path(props): | ||
96 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
97 | return E.BINGO_RELEASE_PATH | ||
98 | else: | ||
99 | return E.BINGO_BETA_PATH | ||
100 | |||
101 | factory = util.BuildFactory() | ||
102 | factory.addStep(steps.Git(logEnviron=False, repourl=E.BINGO_GIT_URL, | ||
103 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
104 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
105 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("rm -rf %(kw:bingo_path)s", bingo_path=bingo_path))) | ||
106 | factory.addStep(steps.DirectoryUpload(workersrc="../source", | ||
107 | masterdest=bingo_path, | ||
108 | url=bingo_url)) | ||
109 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("chmod -R a+rX %(kw:bingo_path)s", bingo_path=bingo_path))) | ||
110 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("/run/wrappers/bin/sudo systemctl restart %(kw:bingo_service)s.service", bingo_service=bingo_systemd_service))) | ||
111 | |||
112 | return util.BuilderConfig(name="DeniseBingo_build", workernames=["generic-worker-denise"], factory=factory) | ||
113 | |||
114 | def denise_oms_factory(): | ||
115 | @util.renderer | ||
116 | def oms_run_path(props): | ||
117 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
118 | return "/run/denise_oms/gunicorn.pid" | ||
119 | else: | ||
120 | return "/run/denise_oms_beta/gunicorn.pid" | ||
121 | |||
122 | @util.renderer | ||
123 | def oms_systemd_service(props): | ||
124 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
125 | return "denise-oms" | ||
126 | else: | ||
127 | return "denise-oms-beta" | ||
128 | |||
129 | @util.renderer | ||
130 | def oms_url(props): | ||
131 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
132 | return "https://oms.syanni.eu" | ||
133 | else: | ||
134 | return "https://beta.oms.syanni.eu" | ||
135 | |||
136 | @util.renderer | ||
137 | def oms_path(props): | ||
138 | if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master": | ||
139 | return E.OMS_RELEASE_PATH | ||
140 | else: | ||
141 | return E.OMS_BETA_PATH | ||
142 | |||
143 | factory = util.BuildFactory() | ||
144 | factory.addStep(steps.Git(logEnviron=False, repourl=E.OMS_GIT_URL, | ||
145 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
146 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
147 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("rm -rf %(kw:oms_path)s", oms_path=oms_path))) | ||
148 | factory.addStep(steps.DirectoryUpload(workersrc="../source", | ||
149 | masterdest=oms_path, | ||
150 | url=oms_url)) | ||
151 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("chmod -R a+rX %(kw:oms_path)s", oms_path=oms_path))) | ||
152 | factory.addStep(steps.MasterShellCommand(command=util.Interpolate("/run/wrappers/bin/sudo systemctl restart %(kw:oms_service)s.service", oms_service=oms_systemd_service))) | ||
153 | |||
154 | return util.BuilderConfig(name="DeniseOMS_build", workernames=["generic-worker-denise"], factory=factory) | ||
155 | |||
156 | def denise_aventuriers_factory(): | ||
157 | path_env = { | ||
158 | "PATH": os.environ["BUILDBOT_PATH_Aventuriers"] + ":${PATH}", | ||
159 | "TZ": "Europe/Paris", | ||
160 | } | ||
161 | |||
162 | factory = util.BuildFactory() | ||
163 | factory.addStep(steps.Git(logEnviron=False, repourl=E.AVENTURIERS_GIT_URL, | ||
164 | submodules=True, mode="full", method="fresh")) | ||
165 | factory.addStep(steps.ShellCommand(name="build files", | ||
166 | logEnviron=False, haltOnFailure=True, workdir="build", | ||
167 | env=path_env, command=["make", "tout", "encyclo"])) | ||
168 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.AVENTURIERS_RELEASE_PATH))) | ||
169 | factory.addStep(steps.DirectoryUpload(workersrc="../build/html", | ||
170 | masterdest=E.AVENTURIERS_RELEASE_PATH, | ||
171 | url="https://aventuriers.syanni.eu")) | ||
172 | factory.addStep(steps.FileUpload(name="upload epub file", workersrc="aventuriers.epub", | ||
173 | workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.epub", | ||
174 | url="https://aventuriers.syanni.eu/aventuriers.epub", mode=0o644)) | ||
175 | factory.addStep(steps.FileUpload(name="upload mobi file", workersrc="aventuriers.mobi", | ||
176 | workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.mobi", | ||
177 | url="https://aventuriers.syanni.eu/aventuriers.mobi", mode=0o644)) | ||
178 | factory.addStep(steps.FileUpload(name="upload pdf file", workersrc="aventuriers.pdf", | ||
179 | workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.pdf", | ||
180 | url="https://aventuriers.syanni.eu/aventuriers.pdf", mode=0o644)) | ||
181 | factory.addStep(steps.FileUpload(name="upload encyclo pdf file", workersrc="encyclo.pdf", | ||
182 | workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/encyclo.pdf", | ||
183 | url="https://aventuriers.syanni.eu/encyclo.pdf", mode=0o644)) | ||
184 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.AVENTURIERS_RELEASE_PATH))) | ||
185 | |||
186 | return util.BuilderConfig(name="DeniseAventuriers_build", workernames=["generic-worker-denise"], factory=factory) | ||
diff --git a/modules/private/buildbot/projects/immaeEu/__init__.py b/modules/private/buildbot/projects/immaeEu/__init__.py deleted file mode 100644 index 83265cd..0000000 --- a/modules/private/buildbot/projects/immaeEu/__init__.py +++ /dev/null | |||
@@ -1,314 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import os | ||
4 | from buildbot.util import bytes2unicode | ||
5 | import json | ||
6 | |||
7 | __all__ = [ "configure", "E" ] | ||
8 | |||
9 | class E(): | ||
10 | PROJECT = "immaeEu" | ||
11 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
12 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
13 | PB_SOCKET = "unix:address=/run/buildbot/{}_pb.sock".format(PROJECT) | ||
14 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
15 | SSH_HOST_KEY = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIFbhFTl2A2RJn5L51yxJM4XfCS2ZaiSX/jo9jFSdghF" | ||
16 | XMPP_RECIPIENTS = os.environ["BUILDBOT_XMPP_RECIPIENTS"].split(" ") | ||
17 | |||
18 | BIP39_GIT_URL = "https://git.immae.eu/perso/Immae/Projets/Cryptomonnaies/BIP39.git" | ||
19 | IMMAE_EU_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/Blog" | ||
20 | HISTORY_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/History" | ||
21 | RECETTES_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/Recettes" | ||
22 | COURS_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/Cours" | ||
23 | DOCS_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Sites/Docs" | ||
24 | NORMALESUP_GIT_URL = "gitolite@git.immae.eu:perso/Immae/Projets/Sites/Normalesup" | ||
25 | |||
26 | COURS_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/cours" | ||
27 | COURS_TARBALL_PATH = "/var/lib/ftp/release.immae.eu/cours" | ||
28 | COURS_TARBALL_URL = "https://release.immae.eu/cours" | ||
29 | BIP39_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/bip39" | ||
30 | HISTORY_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/history" | ||
31 | IMMAE_EU_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/blog" | ||
32 | DOCS_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/docs" | ||
33 | RECETTES_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/recettes" | ||
34 | NORMALESUP_RELEASE_PATH = "/var/lib/buildbot/outputs/immae/recherche" | ||
35 | GSMCELLS_RELEASE_PATH = "/var/lib/ftp/release.immae.eu/gsm_cells" | ||
36 | GSMCELLS_RELEASE_URL = "https://release.immae.eu/gsm_cells" | ||
37 | |||
38 | # master.cfg | ||
39 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
40 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
41 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
42 | LDAP_BASE = "dc=immae,dc=eu" | ||
43 | LDAP_PATTERN = "(uid=%(username)s)" | ||
44 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=immaeEu,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
45 | TITLE_URL = "https://www.immae.eu" | ||
46 | TITLE = "Immae website" | ||
47 | |||
48 | class CustomBase(webhooks.base): | ||
49 | def getChanges(self, request): | ||
50 | try: | ||
51 | content = request.content.read() | ||
52 | args = json.loads(bytes2unicode(content)) | ||
53 | except Exception as e: | ||
54 | raise ValueError("Error loading JSON: " + str(e)) | ||
55 | |||
56 | args.setdefault("comments", "") | ||
57 | args.setdefault("repository", "") | ||
58 | args.setdefault("author", args.get("who", "unknown")) | ||
59 | |||
60 | return ([args], None) | ||
61 | |||
62 | def configure(c): | ||
63 | c["buildbotURL"] = E.BUILDBOT_URL | ||
64 | c["www"]["port"] = E.SOCKET | ||
65 | |||
66 | c["www"]["change_hook_dialects"]["base"] = { | ||
67 | "custom_class": CustomBase | ||
68 | } | ||
69 | c['workers'].append(worker.LocalWorker("generic-worker-immae-eu")) | ||
70 | |||
71 | c['schedulers'].append(hook_scheduler("ImmaeEu", timer=1)) | ||
72 | c['schedulers'].append(hook_scheduler("Normalesup", timer=1)) | ||
73 | c['schedulers'].append(hook_scheduler("Cours", timer=1)) | ||
74 | c['schedulers'].append(hook_scheduler("Recettes", timer=1)) | ||
75 | c['schedulers'].append(hook_scheduler("Docs", timer=1)) | ||
76 | c['schedulers'].append(hook_scheduler("History", timer=1)) | ||
77 | c['schedulers'].append(hook_scheduler("BIP39", timer=1)) | ||
78 | c['schedulers'].append(schedulers.Nightly(name="GSMCells-weekly", | ||
79 | builderNames=["GSMCells_build"], dayOfWeek=6, hour=3)) | ||
80 | c['schedulers'].append(force_scheduler("force_immae_eu", [ | ||
81 | "ImmaeEu_build", "Normalesup_build", "Cours_build", "Docs_build", | ||
82 | "Recettes_build", "History_build", "BIP39_build" | ||
83 | ])) | ||
84 | c['schedulers'].append(schedulers.ForceScheduler( | ||
85 | name="GSMCells-force", label="Force build", | ||
86 | buttonName="Force build", | ||
87 | reason=util.StringParameter(name="reason", label="Reason", default="Force build"), | ||
88 | codebases=[ | ||
89 | util.CodebaseParameter("", | ||
90 | branch=util.FixedParameter(name="branch", default=""), | ||
91 | revision=util.FixedParameter(name="revision", default=""), | ||
92 | repository=util.FixedParameter(name="repository", default=""), | ||
93 | project=util.FixedParameter(name="project", default=""), | ||
94 | ), | ||
95 | ], | ||
96 | username=util.FixedParameter(name="username", default="Web button"), | ||
97 | builderNames=["GSMCells_build"] | ||
98 | )) | ||
99 | |||
100 | c['builders'].append(immae_eu_factory()) | ||
101 | c['builders'].append(normalesup_factory()) | ||
102 | c['builders'].append(cours_factory()) | ||
103 | c['builders'].append(gsm_cells_factory()) | ||
104 | c['builders'].append(recettes_factory()) | ||
105 | c['builders'].append(docs_factory()) | ||
106 | c['builders'].append(history_factory()) | ||
107 | c['builders'].append(bip39_factory()) | ||
108 | |||
109 | c['services'].append(SlackStatusPush( | ||
110 | name="slack_status_immae_eu_project", | ||
111 | builders=[ | ||
112 | "ImmaeEu_build", "Normalesup_build", "Cours_build", "Docs_build", | ||
113 | "GSMCells_build", "Recettes_build", "History_build", | ||
114 | "BIP39_build" | ||
115 | ], | ||
116 | serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip())) | ||
117 | c['services'].append(XMPPStatusPush( | ||
118 | name="xmpp_status_immae_eu_project", | ||
119 | builders=[ | ||
120 | "ImmaeEu_build", "Normalesup_build", "Cours_build", "Docs_build", | ||
121 | "GSMCells_build", "Recettes_build", "History_build", | ||
122 | "BIP39_build" | ||
123 | ], | ||
124 | recipients=E.XMPP_RECIPIENTS, | ||
125 | password=open(E.SECRETS_FILE + "/notify_xmpp_password", "r").read().rstrip())) | ||
126 | |||
127 | def history_factory(): | ||
128 | path_env = { | ||
129 | "PATH": os.environ["BUILDBOT_PATH_History"] + ":${PATH}" | ||
130 | } | ||
131 | factory = util.BuildFactory() | ||
132 | factory.addStep(steps.Git(logEnviron=False, repourl=E.HISTORY_GIT_URL, | ||
133 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
134 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
135 | factory.addStep(steps.ShellCommand(name="build website", | ||
136 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
137 | env=path_env, command=["jekyll", "build"])) | ||
138 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.HISTORY_RELEASE_PATH))) | ||
139 | factory.addStep(steps.DirectoryUpload(workersrc="../source/_site", | ||
140 | masterdest=E.HISTORY_RELEASE_PATH, | ||
141 | url="https://www.immae.eu/history")) | ||
142 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.HISTORY_RELEASE_PATH))) | ||
143 | |||
144 | return util.BuilderConfig(name="History_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
145 | |||
146 | def docs_factory(): | ||
147 | path_env = { | ||
148 | "PATH": os.environ["BUILDBOT_PATH_Docs"] + ":${PATH}" | ||
149 | } | ||
150 | factory = util.BuildFactory() | ||
151 | factory.addStep(steps.Git(logEnviron=False, repourl=E.DOCS_GIT_URL, | ||
152 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
153 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
154 | factory.addStep(steps.ShellCommand(name="build website", | ||
155 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
156 | env=path_env, command=["make", "clean", "html"])) | ||
157 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.DOCS_RELEASE_PATH))) | ||
158 | factory.addStep(steps.DirectoryUpload(workersrc="../source/_build/html", | ||
159 | masterdest=E.DOCS_RELEASE_PATH, | ||
160 | url="https://www.immae.eu/docs")) | ||
161 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.DOCS_RELEASE_PATH))) | ||
162 | |||
163 | return util.BuilderConfig(name="Docs_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
164 | |||
165 | def recettes_factory(): | ||
166 | path_env = { | ||
167 | "PATH": os.environ["BUILDBOT_PATH_Recettes"] + ":${PATH}" | ||
168 | } | ||
169 | factory = util.BuildFactory() | ||
170 | factory.addStep(steps.Git(logEnviron=False, repourl=E.RECETTES_GIT_URL, | ||
171 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
172 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
173 | factory.addStep(NixShellCommand(name="build website", | ||
174 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
175 | env=path_env, command="jekyll build --trace --baseurl /recettes")) | ||
176 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.RECETTES_RELEASE_PATH))) | ||
177 | factory.addStep(steps.DirectoryUpload(workersrc="../source/_site", | ||
178 | masterdest=E.RECETTES_RELEASE_PATH, | ||
179 | url="https://www.immae.eu/recettes")) | ||
180 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.RECETTES_RELEASE_PATH))) | ||
181 | |||
182 | return util.BuilderConfig(name="Recettes_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
183 | |||
184 | def bip39_factory(): | ||
185 | path_env = { | ||
186 | "PATH": os.environ["BUILDBOT_PATH_BIP39"] + ":${PATH}" | ||
187 | } | ||
188 | factory = util.BuildFactory() | ||
189 | factory.addStep(steps.Git(logEnviron=False, repourl=E.BIP39_GIT_URL, | ||
190 | submodules=True, mode="full", method="copy")) | ||
191 | factory.addStep(steps.ShellCommand(name="build file", | ||
192 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
193 | env=path_env, command=["python", "compile.py"])) | ||
194 | factory.addStep(steps.FileUpload(name="upload file", workersrc="bip39-standalone.html", | ||
195 | workdir="source", masterdest=E.BIP39_RELEASE_PATH + "/index.html", | ||
196 | url="https://tools.immae.eu/BIP39", mode=0o644)) | ||
197 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.BIP39_RELEASE_PATH))) | ||
198 | |||
199 | return util.BuilderConfig(name="BIP39_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
200 | |||
201 | def immae_eu_factory(): | ||
202 | path_env = { | ||
203 | "PATH": os.environ["BUILDBOT_PATH_ImmaeEu"] + ":${PATH}" | ||
204 | } | ||
205 | factory = util.BuildFactory() | ||
206 | factory.addStep(steps.Git(logEnviron=False, repourl=E.IMMAE_EU_GIT_URL, | ||
207 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
208 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
209 | factory.addStep(steps.ShellCommand(name="build website", | ||
210 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
211 | env=path_env, command=["make", "html"])) | ||
212 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.IMMAE_EU_RELEASE_PATH))) | ||
213 | factory.addStep(steps.DirectoryUpload(workersrc="../source/output", | ||
214 | masterdest=E.IMMAE_EU_RELEASE_PATH, | ||
215 | url="https://www.immae.eu")) | ||
216 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.IMMAE_EU_RELEASE_PATH))) | ||
217 | |||
218 | return util.BuilderConfig(name="ImmaeEu_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
219 | |||
220 | def cours_factory(): | ||
221 | path_env = { | ||
222 | "PATH": os.environ["BUILDBOT_PATH_Cours"] + ":${PATH}", | ||
223 | "CI": "yes" | ||
224 | } | ||
225 | factory = util.BuildFactory() | ||
226 | factory.addStep(steps.Git(logEnviron=False, repourl=E.COURS_GIT_URL, | ||
227 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
228 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
229 | factory.addStep(steps.ShellCommand(name="build website", | ||
230 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
231 | command=["make", "build"], env=path_env)) | ||
232 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.COURS_RELEASE_PATH))) | ||
233 | factory.addStep(steps.DirectoryUpload(workersrc="../source/build", | ||
234 | masterdest=E.COURS_RELEASE_PATH, | ||
235 | url="https://www.immae.eu/cours")) | ||
236 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.COURS_RELEASE_PATH))) | ||
237 | |||
238 | factory.addStep(steps.ShellCommand(name="build pdfs", | ||
239 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
240 | command=["make", "pdfs"], env=path_env)) | ||
241 | |||
242 | package = util.Interpolate("cours_%(kw:clean_branch)s.tar.gz", clean_branch=clean_branch) | ||
243 | release_file = "{0}/cours_%(kw:clean_branch)s.tar.gz" | ||
244 | package_dest = util.Interpolate(release_file.format(E.COURS_TARBALL_PATH), clean_branch=clean_branch) | ||
245 | package_url = util.Interpolate(release_file.format(E.COURS_TARBALL_URL), clean_branch=clean_branch) | ||
246 | factory.addStep(steps.ShellCommand(name="build pdf tarball", | ||
247 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
248 | command=["tar", "-cvf", package, "-C", "pdfs", "mp", "mpsi"], env=path_env)) | ||
249 | factory.addStep(steps.FileUpload(name="upload package", workersrc=package, | ||
250 | workdir="source", masterdest=package_dest, | ||
251 | url=package_url, mode=0o644)) | ||
252 | |||
253 | return util.BuilderConfig(name="Cours_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
254 | |||
255 | def normalesup_factory(): | ||
256 | path_env = { | ||
257 | "PATH": os.environ["BUILDBOT_PATH_Normalesup"] + ":${PATH}" | ||
258 | } | ||
259 | factory = util.BuildFactory() | ||
260 | factory.addStep(steps.Git(logEnviron=False, repourl=E.NORMALESUP_GIT_URL, | ||
261 | submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(), | ||
262 | sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy")) | ||
263 | factory.addStep(steps.ShellCommand(name="build website", | ||
264 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
265 | command=["make", "build"], env=path_env)) | ||
266 | factory.addStep(steps.ShellCommand(name="give read access to all files", | ||
267 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
268 | command="chmod -R a+rX build", env=path_env)) | ||
269 | factory.addStep(steps.ShellCommand(name="synchronize with phare", | ||
270 | logEnviron=False, haltOnFailure=True, workdir="source", | ||
271 | env=path_env, command=[ | ||
272 | "rsync", "-av", "--delete", | ||
273 | "-e", "ssh -i {} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no".format(E.SSH_KEY_PATH), | ||
274 | "build/", | ||
275 | os.environ["BUILDBOT_NORMALESUP_HOST"] | ||
276 | ])) | ||
277 | factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.NORMALESUP_RELEASE_PATH))) | ||
278 | factory.addStep(steps.DirectoryUpload(workersrc="../source/build", masterdest=E.NORMALESUP_RELEASE_PATH, | ||
279 | url="https://www.immae.eu/recherche")) | ||
280 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.NORMALESUP_RELEASE_PATH))) | ||
281 | |||
282 | return util.BuilderConfig(name="Normalesup_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
283 | |||
284 | def gsm_cells_factory(): | ||
285 | path_env = { | ||
286 | "PATH": os.environ["BUILDBOT_PATH_GSMCells"] + ":${PATH}", | ||
287 | "IN_BUILDBOT": "yes", | ||
288 | } | ||
289 | master_env = { | ||
290 | "HTACCESS": ''' | ||
291 | Options +FollowSymLinks | ||
292 | IndexIgnore * | ||
293 | ''' | ||
294 | } | ||
295 | for k, v in os.environ.items(): | ||
296 | if k.startswith("BUILDBOT_GSM_CELLS_"): | ||
297 | path_env[k[len("BUILDBOT_GSM_CELLS_"):]] = v | ||
298 | |||
299 | script = os.environ["BUILDBOT_PROJECT_DIR"] + "/scripts/lacells_download" | ||
300 | factory = util.BuildFactory() | ||
301 | factory.addStep(steps.ShellCommand(name="download files", | ||
302 | logEnviron=False, haltOnFailure=True, command=[script], env=path_env)) | ||
303 | factory.addStep(steps.ShellCommand(name="give read access to all files", | ||
304 | logEnviron=False, haltOnFailure=True, | ||
305 | command="chmod a+r lacells.db", env=path_env)) | ||
306 | factory.addStep(steps.FileUpload(workersrc="lacells.db", | ||
307 | masterdest=(E.GSMCELLS_RELEASE_PATH+"/lacells.db"), url=(E.GSMCELLS_RELEASE_URL+"/lacells.db"))) | ||
308 | factory.addStep(steps.MasterShellCommand(command="touch {}/.duplicity-ignore".format(E.GSMCELLS_RELEASE_PATH))) | ||
309 | factory.addStep(steps.MasterShellCommand(command='echo "$HTACCESS" > {}/.htaccess'.format(E.GSMCELLS_RELEASE_PATH), | ||
310 | env=master_env)) | ||
311 | factory.addStep(steps.MasterShellCommand(command="ln -sf lacells.db {}/lacells.db.new".format(E.GSMCELLS_RELEASE_PATH))) | ||
312 | factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.GSMCELLS_RELEASE_PATH))) | ||
313 | |||
314 | return util.BuilderConfig(name="GSMCells_build", workernames=["generic-worker-immae-eu"], factory=factory) | ||
diff --git a/modules/private/buildbot/projects/immaeEu/scripts/lacells_download b/modules/private/buildbot/projects/immaeEu/scripts/lacells_download deleted file mode 100755 index 1193cf3..0000000 --- a/modules/private/buildbot/projects/immaeEu/scripts/lacells_download +++ /dev/null | |||
@@ -1,163 +0,0 @@ | |||
1 | #!/usr/bin/env bash | ||
2 | |||
3 | # FLG - Fast Lacells.db Generator | ||
4 | # | ||
5 | # Simple script to quickly download and generate lacells.db for LocalGSMBackend by n76 | ||
6 | # https://github.com/n76/Local-GSM-Backend | ||
7 | # Uses Mozilla Location Service, OpenCellID and radiocells.org databases as source | ||
8 | # Based on lacells-creator by wvengen and n76 | ||
9 | # | ||
10 | # Licensed under GPLv3 or later | ||
11 | # (C)2016 Sebastian Obrusiewicz | ||
12 | # sobrus@o2.pl | ||
13 | |||
14 | if [ -z "$IN_BUILDBOT" ]; then | ||
15 | #DEFAULT_CONFIG_BEGIN | ||
16 | ENABLE_OCI=1 #enable OpenCellID data source | ||
17 | ENABLE_MOZ=1 #enable Mozilla Location Services (MLS) data source | ||
18 | ENABLE_RCO=0 #enable radiocells.org data source (it can be quite slow) | ||
19 | |||
20 | # See https://en.wikipedia.org/wiki/Mobile_country_code | ||
21 | # 208 France | ||
22 | MCC="" #contry codes separated with "|", for example "260|262". Leave dot+asterisk ".*" for all countries | ||
23 | RCO_SRC_FILE="fr.sqlite" #radiocells.org source database file, set "openbmap.sqlite" for entire world database, see https://radiocells.org/downloads for smaller country specific files, for example "pl.sqlite" for Poland | ||
24 | RADIO="" #you can remove LTE if your phone does not support it | ||
25 | TOKEN="" #your OCID token, required to download from OpenCellID. Get your free token at https://opencellid.org/ | ||
26 | fi | ||
27 | TMPDIR='.' #for temporary files only, use disk if you don't have enough RAM, AND remember to have enough disk space in /var/tmp for sqlite temporary files | ||
28 | KEEP_FILES=1 #whether to keep (1) or delete (0) the CSV files after processing | ||
29 | |||
30 | #do not edit following variables, unless you know what you're doing | ||
31 | EMPTY=',,,,,,,,,,,,,' #dummy empty file for disabled sources | ||
32 | OCI_FILE=$TMPDIR"/ocid.csv" #opencellid temporary file | ||
33 | MOZ_FILE=$TMPDIR"/mozilla.csv" #mozilla temporary file | ||
34 | RCO_FILE=$TMPDIR"/rco.csv" #radiocells.org temporary file | ||
35 | #DEFAULT_CONFIG_END | ||
36 | |||
37 | #USER_CONFIG_BEGIN | ||
38 | BINDIR=$( dirname "$(readlink -f "$0")" ) #" | ||
39 | if [[ -f "${BINDIR}/config" ]]; then | ||
40 | . "${BINDIR}/config" | ||
41 | fi | ||
42 | #USER_CONFIG_END | ||
43 | |||
44 | function manage_backup | ||
45 | { | ||
46 | file=$1 | ||
47 | if [ -s $file ] | ||
48 | then | ||
49 | if [ $KEEP_FILES == "1" ] | ||
50 | then | ||
51 | gzip -kf $file | ||
52 | fi | ||
53 | elif [ -s $file".gz" ] && [ "${file##*.}" == "csv" ] | ||
54 | then | ||
55 | echo "Using" $file".gz backup file" | ||
56 | gzip -dkf $file".gz" | ||
57 | fi | ||
58 | } | ||
59 | |||
60 | |||
61 | function download_ocid | ||
62 | { | ||
63 | URL="https://opencellid.org/ocid/downloads?token=${TOKEN}&type=full&file=cell_towers.csv.gz" | ||
64 | if [ $ENABLE_OCI == "1" ] | ||
65 | then | ||
66 | wget -qO- "$URL" | gunzip | egrep "^($RADIO),($MCC)," > $OCI_FILE | ||
67 | manage_backup $OCI_FILE | ||
68 | else | ||
69 | echo $EMPTY > $OCI_FILE | ||
70 | fi | ||
71 | } | ||
72 | |||
73 | function download_mozilla | ||
74 | { | ||
75 | if [ $ENABLE_MOZ == "1" ] | ||
76 | then | ||
77 | NW=`date -u "+%Y-%m-%d"` | ||
78 | wget -qO- "https://d17pt8qph6ncyq.cloudfront.net/export/MLS-full-cell-export-${NW}T000000.csv.gz" | gunzip | egrep "^($RADIO),($MCC)," > $MOZ_FILE | ||
79 | manage_backup $MOZ_FILE | ||
80 | else | ||
81 | echo $EMPTY > $MOZ_FILE | ||
82 | fi | ||
83 | } | ||
84 | |||
85 | function download_radiocells | ||
86 | { | ||
87 | if [ $ENABLE_RCO == "1" ] | ||
88 | then | ||
89 | RCO_SELECT="SELECT technology, mcc, mnc, area, cid, NULL, longitude, latitude, 1000 accuracy, measurements, NULL, NULL, NULL, NULL FROM cell_zone;" | ||
90 | wget -qO- "https://cdn.radiocells.org/"$RCO_SRC_FILE > $TMPDIR"/"$RCO_SRC_FILE | ||
91 | sqlite3 -header -csv $TMPDIR"/"$RCO_SRC_FILE "$RCO_SELECT" | egrep "^($RADIO),($MCC)," > $RCO_FILE | ||
92 | rm $TMPDIR"/"$RCO_SRC_FILE | ||
93 | manage_backup $RCO_FILE | ||
94 | else | ||
95 | echo $EMPTY > $RCO_FILE | ||
96 | fi | ||
97 | } | ||
98 | |||
99 | echo "Downloading data" | ||
100 | |||
101 | download_ocid & | ||
102 | OP=$! | ||
103 | download_mozilla & | ||
104 | MO=$! | ||
105 | download_radiocells & | ||
106 | RO=$! | ||
107 | |||
108 | wait $OP | ||
109 | wait $MO | ||
110 | wait $RO | ||
111 | |||
112 | if [ -s $MOZ_FILE ] && [ -s $OCI_FILE ] && [ -s $RCO_FILE ] | ||
113 | then | ||
114 | |||
115 | manage_backup lacells.db | ||
116 | rm lacells.db | ||
117 | |||
118 | echo "Generating database" | ||
119 | |||
120 | sqlite3 lacells.db <<-SQL | ||
121 | PRAGMA synchronous = OFF; | ||
122 | PRAGMA journal_mode = OFF; | ||
123 | |||
124 | CREATE TEMP TABLE cells_import (radio TEXT,mcc INTEGER,mnc INTEGER,lac INTEGER,cid INTEGER,unit STRING,longitude NUMERIC,latitude NUMERIC,accuracy INTEGER,samples INTEGER,changeable BOOLEAN,created INTEGER,updated INTEGER, avgSignal INTEGER); | ||
125 | CREATE TABLE cells (mcc INTEGER,mnc INTEGER,lac INTEGER,cid INTEGER,longitude REAL,latitude REAL,altitude REAL,accuracy REAL,samples INTEGER); | ||
126 | |||
127 | .header on | ||
128 | .mode csv | ||
129 | |||
130 | .import "$OCI_FILE" cells_import | ||
131 | .import "$MOZ_FILE" cells_import | ||
132 | .import "$RCO_FILE" cells_import | ||
133 | |||
134 | UPDATE cells_import SET samples=1 WHERE samples IS NULL OR samples < 1; | ||
135 | |||
136 | INSERT INTO cells | ||
137 | SELECT mcc, mnc, lac, cid, | ||
138 | sum(longitude * samples) / sum(samples) as longitude, | ||
139 | sum(latitude * samples) / sum(samples) as latitude, | ||
140 | -1 as altitude, | ||
141 | sum(accuracy * samples) / sum(samples) as accuracy, | ||
142 | sum(samples) as samples | ||
143 | FROM cells_import | ||
144 | GROUP BY mcc, mnc, lac, cid; | ||
145 | |||
146 | DROP TABLE cells_import; | ||
147 | |||
148 | UPDATE cells SET accuracy=500 WHERE accuracy < 500; | ||
149 | UPDATE cells SET accuracy=100000 WHERE accuracy > 100000; | ||
150 | |||
151 | CREATE INDEX _idx1 ON cells (mcc, mnc, lac, cid); | ||
152 | CREATE INDEX _idx2 ON cells (lac, cid); | ||
153 | |||
154 | VACUUM; | ||
155 | SQL | ||
156 | |||
157 | else | ||
158 | echo "Download error" | ||
159 | fi | ||
160 | |||
161 | rm $OCI_FILE | ||
162 | rm $MOZ_FILE | ||
163 | rm $RCO_FILE | ||
diff --git a/modules/private/buildbot/projects/test/__init__.py b/modules/private/buildbot/projects/test/__init__.py deleted file mode 100644 index e2f6f82..0000000 --- a/modules/private/buildbot/projects/test/__init__.py +++ /dev/null | |||
@@ -1,197 +0,0 @@ | |||
1 | from buildbot.plugins import * | ||
2 | from buildbot_common.build_helpers import * | ||
3 | import buildbot_common.libvirt as ilibvirt | ||
4 | import os | ||
5 | from buildbot.util import bytes2unicode | ||
6 | import json | ||
7 | |||
8 | __all__ = [ "configure", "E" ] | ||
9 | |||
10 | class E(): | ||
11 | PROJECT = "test" | ||
12 | BUILDBOT_URL = "https://git.immae.eu/buildbot/{}/".format(PROJECT) | ||
13 | SOCKET = "unix:/run/buildbot/{}.sock".format(PROJECT) | ||
14 | PB_SOCKET = os.environ["BUILDBOT_WORKER_PORT"] | ||
15 | WORKER_HOST = "{}:{}".format(os.environ["BUILDBOT_HOST"], PB_SOCKET) | ||
16 | RELEASE_PATH = "/var/lib/ftp/release.immae.eu/{}".format(PROJECT) | ||
17 | RELEASE_URL = "https://release.immae.eu/{}".format(PROJECT) | ||
18 | GIT_URL = "https://git.immae.eu/perso/Immae/TestProject.git" | ||
19 | SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key" | ||
20 | LIBVIRT_URL = os.environ["BUILDBOT_VIRT_URL"] + "?keyfile=" + SSH_KEY_PATH | ||
21 | PUPPET_HOST = "root@backup-1.v.immae.eu" | ||
22 | LDAP_HOST = "ldap.immae.eu" | ||
23 | LDAP_DN = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
24 | LDAP_ROLES_BASE = "ou=roles,ou=hosts,dc=immae,dc=eu" | ||
25 | XMPP_RECIPIENTS = os.environ["BUILDBOT_XMPP_RECIPIENTS"].split(" ") | ||
26 | |||
27 | # master.cfg | ||
28 | SECRETS_FILE = os.getcwd() + "/secrets" | ||
29 | LDAP_URL = "ldaps://ldap.immae.eu:636" | ||
30 | LDAP_ADMIN_USER = "cn=buildbot,ou=services,dc=immae,dc=eu" | ||
31 | LDAP_BASE = "dc=immae,dc=eu" | ||
32 | LDAP_PATTERN = "(uid=%(username)s)" | ||
33 | LDAP_GROUP_PATTERN = "(&(memberOf=cn=groups,ou=test,cn=buildbot,ou=services,dc=immae,dc=eu)(member=%(dn)s))" | ||
34 | TITLE_URL = "https://git.immae.eu/?p=perso/Immae/TestProject.git;a=summary" | ||
35 | TITLE = "Test project" | ||
36 | |||
37 | class CustomBase(webhooks.base): | ||
38 | def getChanges(self, request): | ||
39 | try: | ||
40 | content = request.content.read() | ||
41 | args = json.loads(bytes2unicode(content)) | ||
42 | except Exception as e: | ||
43 | raise ValueError("Error loading JSON: " + str(e)) | ||
44 | |||
45 | args.setdefault("comments", "") | ||
46 | args.setdefault("repository", "") | ||
47 | args.setdefault("author", args.get("who", "unknown")) | ||
48 | |||
49 | if args["category"] == "deploy_webhook": | ||
50 | args = { | ||
51 | "category": "deploy_webhook", | ||
52 | "comments": "", | ||
53 | "repository": "", | ||
54 | "author": "unknown", | ||
55 | "project": "TestProject", | ||
56 | "properties": { | ||
57 | "environment": args.get("environment", "integration"), | ||
58 | "build": "test_{}.tar.gz".format(args.get("branch", "master")) | ||
59 | } | ||
60 | } | ||
61 | |||
62 | return ([args], None) | ||
63 | |||
64 | def deploy_hook_scheduler(project, timer=1): | ||
65 | return schedulers.AnyBranchScheduler( | ||
66 | change_filter=util.ChangeFilter(category="deploy_webhook", project=project), | ||
67 | name="{}_deploy".format(project), treeStableTimer=timer, builderNames=["{}_deploy".format(project)]) | ||
68 | |||
69 | def configure(c): | ||
70 | c["buildbotURL"] = E.BUILDBOT_URL | ||
71 | c["www"]["port"] = E.SOCKET | ||
72 | |||
73 | c["www"]["change_hook_dialects"]["base"] = { | ||
74 | "custom_class": CustomBase | ||
75 | } | ||
76 | c['workers'].append(ilibvirt.LibVirtWorker("test-build", | ||
77 | open(E.SECRETS_FILE + "/worker_password", "r").read().rstrip(), | ||
78 | ilibvirt.Connection(E.LIBVIRT_URL), | ||
79 | E.WORKER_HOST)) | ||
80 | c['workers'].append(ilibvirt.LibVirtWorker("test-deploy", | ||
81 | open(E.SECRETS_FILE + "/worker_password", "r").read().rstrip(), | ||
82 | ilibvirt.Connection(E.LIBVIRT_URL), | ||
83 | E.WORKER_HOST)) | ||
84 | |||
85 | c['schedulers'].append(hook_scheduler("TestProject", timer=1)) | ||
86 | c['schedulers'].append(force_scheduler("force_test", ["TestProject_build"])) | ||
87 | c['schedulers'].append(deploy_scheduler("deploy_test", ["TestProject_deploy"])) | ||
88 | c['schedulers'].append(deploy_hook_scheduler("TestProject", timer=1)) | ||
89 | |||
90 | c['builders'].append(factory()) | ||
91 | c['builders'].append(deploy_factory()) | ||
92 | |||
93 | c['services'].append(SlackStatusPush( | ||
94 | name="slack_status_test_project", | ||
95 | builders=["TestProject_build", "TestProject_deploy"], | ||
96 | serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip())) | ||
97 | c['services'].append(XMPPStatusPush( | ||
98 | name="xmpp_status_test_project", | ||
99 | builders=["TestProject_build", "TestProject_deploy"], | ||
100 | recipients=E.XMPP_RECIPIENTS, | ||
101 | password=open(E.SECRETS_FILE + "/notify_xmpp_password", "r").read().rstrip())) | ||
102 | |||
103 | def factory(): | ||
104 | package = util.Interpolate("test_%(kw:clean_branch)s.tar.gz", clean_branch=clean_branch) | ||
105 | package_dest = util.Interpolate("{}/test_%(kw:clean_branch)s.tar.gz".format(E.RELEASE_PATH), clean_branch=clean_branch) | ||
106 | package_url = util.Interpolate("{}/test_%(kw:clean_branch)s.tar.gz".format(E.RELEASE_URL), clean_branch=clean_branch) | ||
107 | |||
108 | factory = util.BuildFactory() | ||
109 | factory.addStep(steps.Git(logEnviron=False, | ||
110 | repourl=E.GIT_URL, mode="full", method="copy")) | ||
111 | factory.addStep(steps.ShellCommand(name="env", | ||
112 | logEnviron=False, command=["env"])) | ||
113 | factory.addStep(steps.ShellCommand(name="pwd", | ||
114 | logEnviron=False, command=["pwd"])) | ||
115 | factory.addStep(steps.ShellCommand(name="true", | ||
116 | logEnviron=False, command=["true"])) | ||
117 | factory.addStep(steps.ShellCommand(name="echo", | ||
118 | logEnviron=False, command=["echo", package])) | ||
119 | factory.addSteps(package_and_upload(package, package_dest, package_url)) | ||
120 | |||
121 | return util.BuilderConfig(name="TestProject_build", workernames=["test-build"], factory=factory) | ||
122 | |||
123 | |||
124 | def compute_build_infos(): | ||
125 | @util.renderer | ||
126 | def compute(props): | ||
127 | import re, hashlib | ||
128 | build_file = props.getProperty("build") | ||
129 | package_dest = "{}/{}".format(E.RELEASE_PATH, build_file) | ||
130 | version = re.match(r"{0}_(.*).tar.gz".format("test"), build_file).group(1) | ||
131 | with open(package_dest, "rb") as f: | ||
132 | sha = hashlib.sha256(f.read()).hexdigest() | ||
133 | return { | ||
134 | "build_version": version, | ||
135 | "build_hash": sha, | ||
136 | } | ||
137 | return compute | ||
138 | |||
139 | @util.renderer | ||
140 | def puppet_host(props): | ||
141 | return E.PUPPET_HOST | ||
142 | |||
143 | def deploy_factory(): | ||
144 | package_dest = util.Interpolate("{}/%(prop:build)s".format(E.RELEASE_PATH)) | ||
145 | |||
146 | factory = util.BuildFactory() | ||
147 | factory.addStep(steps.MasterShellCommand(command=["test", "-f", package_dest])) | ||
148 | factory.addStep(steps.SetProperties(properties=compute_build_infos())) | ||
149 | factory.addStep(LdapPush(environment=util.Property("environment"), | ||
150 | build_version=util.Property("build_version"), | ||
151 | build_hash=util.Property("build_hash"), | ||
152 | ldap_password=util.Secret("ldap"))) | ||
153 | factory.addStep(steps.MasterShellCommand(command=[ | ||
154 | "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no", "-i", E.SSH_KEY_PATH, puppet_host])) | ||
155 | return util.BuilderConfig(name="TestProject_deploy", workernames=["test-deploy"], factory=factory) | ||
156 | |||
157 | from twisted.internet import defer | ||
158 | from buildbot.process.buildstep import FAILURE | ||
159 | from buildbot.process.buildstep import SUCCESS | ||
160 | from buildbot.process.buildstep import BuildStep | ||
161 | |||
162 | class LdapPush(BuildStep): | ||
163 | name = "LdapPush" | ||
164 | renderables = ["environment", "build_version", "build_hash", "ldap_password"] | ||
165 | |||
166 | def __init__(self, **kwargs): | ||
167 | self.environment = kwargs.pop("environment") | ||
168 | self.build_version = kwargs.pop("build_version") | ||
169 | self.build_hash = kwargs.pop("build_hash") | ||
170 | self.ldap_password = kwargs.pop("ldap_password") | ||
171 | self.ldap_host = kwargs.pop("ldap_host", E.LDAP_HOST) | ||
172 | super().__init__(**kwargs) | ||
173 | |||
174 | def run(self): | ||
175 | import json | ||
176 | from ldap3 import Reader, Writer, Server, Connection, ObjectDef | ||
177 | server = Server(self.ldap_host) | ||
178 | conn = Connection(server, | ||
179 | user=E.LDAP_DN, | ||
180 | password=self.ldap_password) | ||
181 | conn.bind() | ||
182 | obj = ObjectDef("immaePuppetClass", conn) | ||
183 | r = Reader(conn, obj, | ||
184 | "cn=test.{},{}".format(self.environment, E.LDAP_ROLES_BASE)) | ||
185 | r.search() | ||
186 | if len(r) > 0: | ||
187 | w = Writer.from_cursor(r) | ||
188 | for value in w[0].immaePuppetJson.values: | ||
189 | config = json.loads(value) | ||
190 | if "test_version" in config: | ||
191 | config["test_version"] = self.build_version | ||
192 | config["test_sha256"] = self.build_hash | ||
193 | w[0].immaePuppetJson -= value | ||
194 | w[0].immaePuppetJson += json.dumps(config, indent=" ") | ||
195 | w.commit() | ||
196 | return defer.succeed(SUCCESS) | ||
197 | return defer.succeed(FAILURE) | ||