from shutil import which
__all__ = [
- "force_scheduler", "deploy_scheduler", "hook_scheduler",
+ "force_scheduler", "deploy_scheduler", "git_hook_scheduler",
"clean_branch", "package_and_upload", "SlackStatusPush",
- "XMPPStatusPush", "NixShellCommand"
+ "XMPPStatusPush", "LdapEdit", "NixShellCommand",
+ "all_builder_names", "compute_build_infos", "deploy_ssh_command",
+ "configure_slack_push", "configure_xmpp_push", "deploy_hook_scheduler",
]
# Small helpers"
def package_and_upload(package, package_dest, package_url):
return [
steps.ShellCommand(name="build package",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
command=["git", "archive", "HEAD", "-o", package]),
steps.FileUpload(name="upload package", workersrc=package,
- workdir="source", masterdest=package_dest,
+ masterdest=package_dest,
url=package_url, mode=0o644),
steps.ShellCommand(name="cleanup package", logEnviron=False,
- haltOnFailure=True, workdir="source", alwaysRun=True,
+ haltOnFailure=True, alwaysRun=True,
command=["rm", "-f", package]),
]
# Steps
class NixShellCommand(steps.ShellCommand):
def __init__(self, command=None, pure=True, nixfile=None, **kwargs):
- assert(isinstance(command, str))
oldpath = kwargs.get("env", {}).get("PATH", None)
if which("nix-shell", path=oldpath) is None:
kwargs["env"] = kwargs.get("env", {})
super().__init__(command=nixcommand, **kwargs)
# Schedulers
-def force_scheduler(name, builders):
+def force_scheduler(name, builders, nobranch=False):
+ if nobranch:
+ branch = util.FixedParameter(name="branch", default="")
+ else:
+ branch=util.StringParameter(name="branch", label="Git reference (tag, branch)", required=True)
+
return schedulers.ForceScheduler(name=name,
label="Force build", buttonName="Force build",
reason=util.StringParameter(name="reason", label="Reason", default="Force build"),
codebases=[
util.CodebaseParameter("",
- branch=util.StringParameter(
- name="branch", label="Git reference (tag, branch)", required=True),
+ branch=branch,
revision=util.FixedParameter(name="revision", default=""),
repository=util.FixedParameter(name="repository", default=""),
project=util.FixedParameter(name="project", default=""),
]
)
-def hook_scheduler(project, timer=10):
+def git_hook_scheduler(project, builders=[], timer=1):
+ if len(builders) == 0:
+ builders = ["{}_build".format(project)]
+ return schedulers.AnyBranchScheduler(
+ change_filter=util.ChangeFilter(category="gitolite-hooks", project=project),
+ name="{}_git_hook".format(project), treeStableTimer=timer, builderNames=builders)
+
+def deploy_hook_scheduler(project, builders, timer=1):
return schedulers.AnyBranchScheduler(
- change_filter=util.ChangeFilter(category="hooks", project=project),
- name=project, treeStableTimer=timer, builderNames=["{}_build".format(project)])
+ change_filter=util.ChangeFilter(category="deploy_webhook", project=project),
+ name="{}_deploy".format(project), treeStableTimer=timer, builderNames=builders)
+
+# Builders
+def all_builder_names(c):
+ return [builder.name for builder in c['builders']]
# Slack/XMPP status push
from buildbot.reporters.http import HttpStatusPushBase
"attachments": attachments,
}
+def configure_slack_push(c, secrets_file, builders):
+ c['services'].append(SlackStatusPush(
+ name="slack_status", builders=builders,
+ serverUrl=open(secrets_file + "/slack_webhook", "r").read().rstrip()))
+
class XMPPStatusPush(HttpStatusPushBase):
name = "XMPPStatusPush"
)
return text
+
+def configure_xmpp_push(c, secrets_file, builders, recipients):
+ c['services'].append(XMPPStatusPush(
+ name="xmpp_status", builders=builders, recipients=recipients,
+ password=open(secrets_file + "/notify_xmpp_password", "r").read().rstrip()))
+
+# LDAP edit
+from buildbot.process.buildstep import FAILURE
+from buildbot.process.buildstep import SUCCESS
+from buildbot.process.buildstep import BuildStep
+
+class LdapEdit(BuildStep):
+ name = "LdapEdit"
+ renderables = ["environment", "build_version", "build_hash", "ldap_password"]
+
+ def __init__(self, **kwargs):
+ self.environment = kwargs.pop("environment")
+ self.build_version = kwargs.pop("build_version")
+ self.build_hash = kwargs.pop("build_hash")
+ self.ldap_password = kwargs.pop("ldap_password")
+ self.ldap_host = kwargs.pop("ldap_host")
+ self.ldap_dn = kwargs.pop("ldap_dn")
+ self.ldap_roles_base = kwargs.pop("ldap_roles_base")
+ self.ldap_cn_template = kwargs.pop("ldap_cn_template")
+ self.config_key = kwargs.pop("config_key")
+ super().__init__(**kwargs)
+
+ def run(self):
+ import json
+ from ldap3 import Reader, Writer, Server, Connection, ObjectDef
+ server = Server(self.ldap_host)
+ conn = Connection(server,
+ user=self.ldap_dn,
+ password=self.ldap_password)
+ conn.bind()
+ obj = ObjectDef("immaePuppetClass", conn)
+ r = Reader(conn, obj,
+ "cn={},{}".format(self.ldap_cn_template.format(self.environment), self.ldap_roles_base))
+ r.search()
+ if len(r) > 0:
+ w = Writer.from_cursor(r)
+ for value in w[0].immaePuppetJson.values:
+ config = json.loads(value)
+ if "{}_version".format(self.config_key) in config:
+ config["{}_version".format(self.config_key)] = self.build_version
+ config["{}_sha256".format(self.config_key)] = self.build_hash
+ w[0].immaePuppetJson -= value
+ w[0].immaePuppetJson += json.dumps(config, indent=" ")
+ w.commit()
+ return defer.succeed(SUCCESS)
+ return defer.succeed(FAILURE)
+
+def compute_build_infos(prefix, release_path):
+ @util.renderer
+ def compute(props):
+ import re, hashlib
+ build_file = props.getProperty("build")
+ package_dest = "{}/{}".format(release_path, build_file)
+ version = re.match(r"{0}_(.*).tar.gz".format(prefix), build_file).group(1)
+ with open(package_dest, "rb") as f:
+ sha = hashlib.sha256(f.read()).hexdigest()
+ return {
+ "build_version": version,
+ "build_hash": sha,
+ }
+ return compute
+
+def deploy_ssh_command(ssh_key_path, deploy_hosts):
+ @util.renderer
+ def compute(props):
+ environment = props["environment"] if props.hasProperty("environment") else "integration"
+ ssh_command = [
+ "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no",
+ "-i", ssh_key_path ]
+ return ssh_command + deploy_hosts.get(environment, ["host.invalid"])
+ return compute
};
config = lib.mkIf config.myServices.buildbot.enable {
+ nixpkgs.overlays = [
+ (self: super: {
+ follow-systemd-unit = self.writeScriptBin "follow-systemd-unit" ''
+ #!${self.stdenv.shell}
+
+ set -euo pipefail
+
+ service=$1
+ before_invocation_id=$2
+
+ get_id() {
+ systemctl show -p InvocationID --value "$service"
+ }
+
+ while [ "$(get_id)" = "$before_invocation_id" ]; do sleep 1; done
+
+ invocation_id="$(get_id)"
+ cursor="$(mktemp)"
+ trap "rm -f $cursor" EXIT
+
+ get_logs() {
+ journalctl --quiet --cursor-file=$cursor INVOCATION_ID=$invocation_id + _SYSTEMD_INVOCATION_ID=$invocation_id
+ }
+
+ while [ -n "$(systemctl show -p Job --value "$service")" ]; do
+ get_logs
+ done
+ get_logs
+ '';
+ })
+ ];
ids.uids.buildbot = config.myEnv.buildbot.user.uid;
ids.gids.buildbot = config.myEnv.buildbot.user.gid;
group = "buildbot";
description = "Buildbot user";
home = varDir;
- extraGroups = [ "keys" ];
+ extraGroups = [ "keys" "systemd-journal" ];
+ useDefaultShell = true;
+ openssh.authorizedKeys.keys = [ config.myEnv.buildbot.ssh_key.public ];
};
services.websites.env.tools.watchPaths = lib.attrsets.mapAttrsToList
permissions = "0600";
user = "buildbot";
group = "buildbot";
- text = if builtins.isFunction v then v pkgs else v;
+ text = if builtins.isFunction v then v pkgs config else v;
})
) project.secrets
++ [
LDAP_ROLES_BASE = "ou=roles,ou=hosts,dc=immae,dc=eu"
XMPP_RECIPIENTS = os.environ["BUILDBOT_XMPP_RECIPIENTS"].split(" ")
- PUPPET_HOST = {
+ DEPLOY_HOSTS = {
"integration": [ "-p8022", "root@caldance.cs.immae.dev"],
}
return ([args], None)
-def deploy_hook_scheduler(project, timer=1):
- return schedulers.AnyBranchScheduler(
- change_filter=util.ChangeFilter(category="deploy_webhook", project=project),
- name="{}_deploy".format(project), treeStableTimer=timer, builderNames=["{}_deploy".format(project)])
-
def configure(c):
c["buildbotURL"] = E.BUILDBOT_URL
c["www"]["port"] = E.SOCKET
- c["www"]["change_hook_dialects"]["base"] = {
- "custom_class": CustomBase
- }
- c['workers'].append(worker.LocalWorker("generic-worker"))
- c['workers'].append(worker.LocalWorker("deploy-worker"))
+ c["www"]["change_hook_dialects"]["base"] = { "custom_class": CustomBase }
db_lock = util.MasterLock("deploy_after_build")
- c['schedulers'].append(hook_scheduler("Caldance", timer=1))
- c['schedulers'].append(force_scheduler("force_caldance", ["Caldance_build"]))
- c['schedulers'].append(deploy_scheduler("deploy_caldance", ["Caldance_deploy"]))
- c['schedulers'].append(deploy_hook_scheduler("Caldance", timer=1))
+ configure_build(c, db_lock.access('exclusive'))
+ configure_deploy(c, db_lock.access('exclusive'))
+
+ configure_slack_push(c, E.SECRETS_FILE, all_builder_names(c))
+ configure_xmpp_push(c, E.SECRETS_FILE, all_builder_names(c), E.XMPP_RECIPIENTS)
+
+def configure_build(c, lock):
+ builder_name = "Caldance_build"
+ worker_name = "caldance-build"
- c['builders'].append(factory("caldance", locks=[db_lock.access('exclusive')]))
+ c['schedulers'].append(force_scheduler("force_caldance", [builder_name]))
+ c['schedulers'].append(git_hook_scheduler("Caldance", [builder_name]))
+ c['workers'].append(worker.LocalWorker(worker_name))
+ c['builders'].append(util.BuilderConfig(name=builder_name, locks=[lock], workernames=[worker_name], factory=build_factory()))
- c['builders'].append(deploy_factory("caldance", locks=[db_lock.access('exclusive')]))
+def configure_deploy(c, lock):
+ builder_name = "Caldance_deploy"
+ worker_name = "caldance-deploy"
- c['services'].append(SlackStatusPush(
- name="slack_status_caldance",
- builders=["Caldance_build", "Caldance_deploy"],
- serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip()))
- c['services'].append(XMPPStatusPush(
- name="xmpp_status_caldance",
- builders=["Caldance_build", "Caldance_deploy"],
- recipients=E.XMPP_RECIPIENTS,
- password=open(E.SECRETS_FILE + "/notify_xmpp_password", "r").read().rstrip()))
+ c['schedulers'].append(deploy_hook_scheduler("Caldance", [builder_name]))
+ c['schedulers'].append(deploy_scheduler("deploy_caldance", [builder_name]))
+ c['workers'].append(worker.LocalWorker(worker_name))
+ c['builders'].append(util.BuilderConfig(name=builder_name, locks=[lock], workernames=[worker_name], factory=deploy_factory()))
-def factory(project, locks=[], ignore_fails=False):
+def build_factory():
+ project = "caldance"
release_file = "{1}/{0}_%(kw:clean_branch)s.tar.gz"
package = util.Interpolate("{0}_%(kw:clean_branch)s.tar.gz".format(project), clean_branch=clean_branch)
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.GIT_URL,
sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="full", method="fresh"))
factory.addSteps(package_and_upload(package, package_dest, package_url))
+ return factory
- return util.BuilderConfig(
- name="{}_build".format(project.capitalize()),
- locks=locks,
- workernames=["generic-worker"], factory=factory)
-
-def compute_build_infos(project):
- @util.renderer
- def compute(props):
- import re, hashlib
- build_file = props.getProperty("build")
- package_dest = "{1}/{0}".format(build_file, E.RELEASE_PATH)
- version = re.match(r"{0}_(.*).tar.gz".format(project), build_file).group(1)
- with open(package_dest, "rb") as f:
- sha = hashlib.sha256(f.read()).hexdigest()
- return {
- "build_version": version,
- "build_hash": sha,
- }
- return compute
-
-@util.renderer
-def puppet_ssh_command(props):
- environment = props["environment"] if props.hasProperty("environment") else "integration"
- ssh_command = [
- "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no",
- "-i", E.SSH_KEY_PATH ]
- return ssh_command + E.PUPPET_HOST.get(environment, ["host.invalid"])
-
-def deploy_factory(project, locks=[]):
+def deploy_factory():
+ project = "caldance"
package_dest = util.Interpolate("{0}/%(prop:build)s".format(E.RELEASE_PATH))
factory = util.BuildFactory()
factory.addStep(steps.MasterShellCommand(command=["test", "-f", package_dest]))
- factory.addStep(steps.SetProperties(properties=compute_build_infos(project)))
- factory.addStep(LdapPush(environment=util.Property("environment"),
- project=project, build_version=util.Property("build_version"),
- build_hash=util.Property("build_hash"), ldap_password=util.Secret("ldap")))
- factory.addStep(steps.MasterShellCommand(command=puppet_ssh_command))
- return util.BuilderConfig(
- name="{}_deploy".format(project.capitalize()),
- locks=locks,
- workernames=["deploy-worker"], factory=factory)
-
-from twisted.internet import defer
-from buildbot.process.buildstep import FAILURE
-from buildbot.process.buildstep import SUCCESS
-from buildbot.process.buildstep import BuildStep
-
-class LdapPush(BuildStep):
- name = "LdapPush"
- renderables = ["environment", "project", "build_version", "build_hash", "ldap_password"]
-
- def __init__(self, **kwargs):
- self.environment = kwargs.pop("environment")
- self.project = kwargs.pop("project")
- self.build_version = kwargs.pop("build_version")
- self.build_hash = kwargs.pop("build_hash")
- self.ldap_password = kwargs.pop("ldap_password")
- self.ldap_host = kwargs.pop("ldap_host", E.LDAP_HOST)
- super().__init__(**kwargs)
-
- def run(self):
- import json
- from ldap3 import Reader, Writer, Server, Connection, ObjectDef
- server = Server(self.ldap_host)
- conn = Connection(server,
- user=E.LDAP_DN,
- password=self.ldap_password)
- conn.bind()
- obj = ObjectDef("immaePuppetClass", conn)
- r = Reader(conn, obj,
- "cn=caldance.{},{}".format(self.environment, E.LDAP_ROLES_BASE))
- r.search()
- if len(r) > 0:
- w = Writer.from_cursor(r)
- for value in w[0].immaePuppetJson.values:
- config = json.loads(value)
- if "role::caldance::{}_version".format(self.project) in config:
- config["role::caldance::{}_version".format(self.project)] = self.build_version
- config["role::caldance::{}_sha256".format(self.project)] = self.build_hash
- w[0].immaePuppetJson -= value
- w[0].immaePuppetJson += json.dumps(config, indent=" ")
- w.commit()
- return defer.succeed(SUCCESS)
- return defer.succeed(FAILURE)
+ factory.addStep(steps.SetProperties(properties=compute_build_infos(project, E.RELEASE_PATH)))
+ factory.addStep(LdapEdit(environment=util.Property("environment"),
+ build_version=util.Property("build_version"),
+ build_hash=util.Property("build_hash"),
+ config_key="role::caldance::{}".format(project),
+ ldap_host=E.LDAP_HOST, ldap_roles_base=E.LDAP_ROLES_BASE, ldap_dn=E.LDAP_DN,
+ ldap_cn_template="caldance.{}", ldap_password=util.Secret("ldap")))
+ factory.addStep(steps.MasterShellCommand(command=deploy_ssh_command(E.SSH_KEY_PATH, E.DEPLOY_HOSTS)))
+ return factory
LDAP_DN = "cn=buildbot,ou=services,dc=immae,dc=eu"
LDAP_ROLES_BASE = "ou=roles,ou=hosts,dc=immae,dc=eu"
- PUPPET_HOST = {
+ DEPLOY_HOSTS = {
"production": "root@cryptoportfolio.immae.eu",
"integration": "root@cryptoportfolio-dev.immae.eu"
}
c["buildbotURL"] = E.BUILDBOT_URL
c["www"]["port"] = E.SOCKET
- c['workers'].append(worker.LocalWorker("generic-worker"))
- c['workers'].append(worker.LocalWorker("deploy-worker"))
+ configure_build(c)
+ configure_deploy(c)
+ configure_slack_push(c, E.SECRETS_FILE, all_builder_names(c))
- c['schedulers'].append(hook_scheduler("Trader"))
- c['schedulers'].append(hook_scheduler("Front"))
- c['schedulers'].append(force_scheduler(
- "force_cryptoportfolio", ["Trader_build", "Front_build"]))
- c['schedulers'].append(deploy_scheduler("deploy_cryptoportfolio",
- ["Trader_deploy", "Front_deploy"]))
+def configure_build(c):
+ front_builder_name = "Front_build"
+ trader_builder_name = "Trader_build"
+ worker_name = "cryptoportfolio-build"
- c['builders'].append(factory("trader"))
- c['builders'].append(factory("front", ignore_fails=True))
+ c['schedulers'].append(force_scheduler("force_cryptoportfolio", [front_builder_name,trader_builder_name]))
+ c['schedulers'].append(git_hook_scheduler("Trader", [trader_builder_name]))
+ c['schedulers'].append(git_hook_scheduler("Front", [front_builder_name]))
- c['builders'].append(deploy_factory("trader"))
- c['builders'].append(deploy_factory("front"))
+ c['workers'].append(worker.LocalWorker(worker_name))
- c['services'].append(SlackStatusPush(
- name="slack_status_cryptoportfolio",
- builders=["Front_build", "Trader_build", "Front_deploy", "Trader_deploy"],
- serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip()))
+ c['builders'].append(util.BuilderConfig(name=trader_builder_name, workernames=[worker_name], factory=build_factory("trader")))
+ c['builders'].append(util.BuilderConfig(name=front_builder_name, workernames=[worker_name], factory=build_factory("front", ignore_fails=True)))
-def factory(project, ignore_fails=False):
+def configure_deploy(c):
+ front_builder_name = "Front_deploy"
+ trader_builder_name = "Trader_deploy"
+ worker_name = "cryptoportfolio-deploy"
+
+ c['schedulers'].append(deploy_scheduler("deploy_cryptoportfolio", [front_builder_name, trader_builder_name]))
+
+ c['workers'].append(worker.LocalWorker(worker_name))
+
+ c['builders'].append(util.BuilderConfig(name=front_builder_name, workernames=[worker_name], factory=deploy_factory("front")))
+ c['builders'].append(util.BuilderConfig(name=trader_builder_name, workernames=[worker_name], factory=deploy_factory("trader")))
+
+def build_factory(project, ignore_fails=False):
release_file = "{1}/{0}/{0}_%(kw:clean_branch)s.tar.gz"
url = E.GIT_URL.format(project.capitalize())
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=url,
- mode="full", method="copy"))
+ mode="full", method="fresh"))
factory.addStep(steps.ShellCommand(name="make install",
logEnviron=False, haltOnFailure=(not ignore_fails),
warnOnFailure=ignore_fails, flunkOnFailure=(not ignore_fails),
command=["make", "test"]))
factory.addSteps(package_and_upload(package, package_dest, package_url))
- return util.BuilderConfig(
- name="{}_build".format(project.capitalize()),
- workernames=["generic-worker"], factory=factory)
-
-def compute_build_infos(project):
- @util.renderer
- def compute(props):
- import re, hashlib
- build_file = props.getProperty("build")
- package_dest = "{2}/{0}/{1}".format(project, build_file, E.RELEASE_PATH)
- version = re.match(r"{0}_(.*).tar.gz".format(project), build_file).group(1)
- with open(package_dest, "rb") as f:
- sha = hashlib.sha256(f.read()).hexdigest()
- return {
- "build_version": version,
- "build_hash": sha,
- }
- return compute
-
-@util.renderer
-def puppet_host(props):
- environment = props["environment"] if props.hasProperty("environment") else "integration"
- return E.PUPPET_HOST.get(environment, "host.invalid")
+ return factory
def deploy_factory(project):
package_dest = util.Interpolate("{1}/{0}/%(prop:build)s".format(project, E.RELEASE_PATH))
factory = util.BuildFactory()
factory.addStep(steps.MasterShellCommand(command=["test", "-f", package_dest]))
- factory.addStep(steps.SetProperties(properties=compute_build_infos(project)))
- factory.addStep(LdapPush(environment=util.Property("environment"),
- project=project, build_version=util.Property("build_version"),
- build_hash=util.Property("build_hash"), ldap_password=util.Secret("ldap")))
- factory.addStep(steps.MasterShellCommand(command=[
- "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no", "-i", E.SSH_KEY_PATH, puppet_host]))
- return util.BuilderConfig(name="{}_deploy".format(project.capitalize()), workernames=["deploy-worker"], factory=factory)
-
-from twisted.internet import defer
-from buildbot.process.buildstep import FAILURE
-from buildbot.process.buildstep import SUCCESS
-from buildbot.process.buildstep import BuildStep
-
-class LdapPush(BuildStep):
- name = "LdapPush"
- renderables = ["environment", "project", "build_version", "build_hash", "ldap_password"]
-
- def __init__(self, **kwargs):
- self.environment = kwargs.pop("environment")
- self.project = kwargs.pop("project")
- self.build_version = kwargs.pop("build_version")
- self.build_hash = kwargs.pop("build_hash")
- self.ldap_password = kwargs.pop("ldap_password")
- self.ldap_host = kwargs.pop("ldap_host", E.LDAP_HOST)
- super().__init__(**kwargs)
-
- def run(self):
- import json
- from ldap3 import Reader, Writer, Server, Connection, ObjectDef
- server = Server(self.ldap_host)
- conn = Connection(server,
- user=E.LDAP_DN,
- password=self.ldap_password)
- conn.bind()
- obj = ObjectDef("immaePuppetClass", conn)
- r = Reader(conn, obj,
- "cn=cryptoportfolio.{},{}".format(self.environment, E.LDAP_ROLES_BASE))
- r.search()
- if len(r) > 0:
- w = Writer.from_cursor(r)
- for value in w[0].immaePuppetJson.values:
- config = json.loads(value)
- if "role::cryptoportfolio::{}_version".format(self.project) in config:
- config["role::cryptoportfolio::{}_version".format(self.project)] = self.build_version
- config["role::cryptoportfolio::{}_sha256".format(self.project)] = self.build_hash
- w[0].immaePuppetJson -= value
- w[0].immaePuppetJson += json.dumps(config, indent=" ")
- w.commit()
- return defer.succeed(SUCCESS)
- return defer.succeed(FAILURE)
+ factory.addStep(steps.SetProperties(properties=compute_build_infos(project, "{}/{}".format(E.RELEASE_PATH, project))))
+ factory.addStep(LdapEdit(environment=util.Property("environment"),
+ build_version=util.Property("build_version"),
+ build_hash=util.Property("build_hash"),
+ config_key="role::cryptoportfolio::{}".format(project),
+ ldap_host=E.LDAP_HOST, ldap_roles_base=E.LDAP_ROLES_BASE, ldap_dn=E.LDAP_DN,
+ ldap_cn_template="cryptoportfolio.{}", ldap_password=util.Secret("ldap")))
+ factory.addStep(steps.MasterShellCommand(command=deploy_ssh_command(E.SSH_KEY_PATH, E.DEPLOY_HOSTS)))
+ return factory
TITLE_URL = "https://oms.syanni.eu"
TITLE = "Syanni website"
-class CustomBase(webhooks.base):
- def getChanges(self, request):
- try:
- content = request.content.read()
- args = json.loads(bytes2unicode(content))
- except Exception as e:
- raise ValueError("Error loading JSON: " + str(e))
-
- args.setdefault("comments", "")
- args.setdefault("repository", "")
- args.setdefault("author", args.get("who", "unknown"))
-
- return ([args], None)
-
def configure(c):
c["buildbotURL"] = E.BUILDBOT_URL
c["www"]["port"] = E.SOCKET
- c["www"]["change_hook_dialects"]["base"] = {
- "custom_class": CustomBase
- }
- c['workers'].append(worker.LocalWorker("generic-worker-denise"))
+ worker_name = "generic-worker-denise"
+ c['workers'].append(worker.LocalWorker(worker_name))
+
+ configure_bingo(c, worker_name)
+ configure_oms(c, worker_name)
+ configure_aventuriers(c, worker_name)
+
+ c['schedulers'].append(force_scheduler("force_denise", all_builder_names(c)))
+
+def configure_bingo(c, worker_name):
+ builder_name = "DeniseBingo_build"
+
+ c['schedulers'].append(git_hook_scheduler("DeniseBingo", [builder_name]))
+ c['builders'].append(util.BuilderConfig(name=builder_name, workernames=[worker_name], factory=bingo_build_factory()))
+
+def configure_oms(c, worker_name):
+ builder_name = "DeniseOMS_build"
+
+ c['schedulers'].append(git_hook_scheduler("DeniseOMS", [builder_name]))
+ c['builders'].append(util.BuilderConfig(name=builder_name, workernames=[worker_name], factory=oms_build_factory()))
- c['schedulers'].append(hook_scheduler("DeniseBingo", timer=1))
- c['schedulers'].append(hook_scheduler("DeniseOMS", timer=1))
- c['schedulers'].append(hook_scheduler("DeniseAventuriers", timer=1))
- c['schedulers'].append(force_scheduler("force_denise", [
- "DeniseBingo_build", "DeniseOMS_build", "DeniseAventuriers_build"
- ]))
+def configure_aventuriers(c, worker_name):
+ builder_name = "DeniseAventuriers_build"
- c['builders'].append(denise_oms_factory())
- c['builders'].append(denise_bingo_factory())
- c['builders'].append(denise_aventuriers_factory())
+ c['schedulers'].append(git_hook_scheduler("DeniseAventuriers", [builder_name]))
+ c['builders'].append(util.BuilderConfig(name=builder_name, workernames=[worker_name], factory=aventuriers_build_factory()))
-def denise_bingo_factory():
+def bingo_build_factory():
@util.renderer
def bingo_run_path(props):
if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master":
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.BINGO_GIT_URL,
submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="full", method="fresh"))
factory.addStep(steps.MasterShellCommand(command=util.Interpolate("rm -rf %(kw:bingo_path)s", bingo_path=bingo_path)))
- factory.addStep(steps.DirectoryUpload(workersrc="../source",
- masterdest=bingo_path,
- url=bingo_url))
+ factory.addStep(steps.DirectoryUpload(workersrc=".", masterdest=bingo_path, url=bingo_url))
factory.addStep(steps.MasterShellCommand(command=util.Interpolate("chmod -R a+rX %(kw:bingo_path)s", bingo_path=bingo_path)))
factory.addStep(steps.MasterShellCommand(command=util.Interpolate("/run/wrappers/bin/sudo systemctl restart %(kw:bingo_service)s.service", bingo_service=bingo_systemd_service)))
- return util.BuilderConfig(name="DeniseBingo_build", workernames=["generic-worker-denise"], factory=factory)
+ return factory
-def denise_oms_factory():
+def oms_build_factory():
@util.renderer
def oms_run_path(props):
if props.hasProperty("branch") and len(props["branch"]) > 0 and props["branch"] == "master":
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.OMS_GIT_URL,
submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="full", method="fresh"))
factory.addStep(steps.MasterShellCommand(command=util.Interpolate("rm -rf %(kw:oms_path)s", oms_path=oms_path)))
- factory.addStep(steps.DirectoryUpload(workersrc="../source",
- masterdest=oms_path,
- url=oms_url))
+ factory.addStep(steps.DirectoryUpload(workersrc=".", masterdest=oms_path, url=oms_url))
factory.addStep(steps.MasterShellCommand(command=util.Interpolate("chmod -R a+rX %(kw:oms_path)s", oms_path=oms_path)))
factory.addStep(steps.MasterShellCommand(command=util.Interpolate("/run/wrappers/bin/sudo systemctl restart %(kw:oms_service)s.service", oms_service=oms_systemd_service)))
- return util.BuilderConfig(name="DeniseOMS_build", workernames=["generic-worker-denise"], factory=factory)
+ return factory
-def denise_aventuriers_factory():
+def aventuriers_build_factory():
path_env = {
"PATH": os.environ["BUILDBOT_PATH_Aventuriers"] + ":${PATH}",
"TZ": "Europe/Paris",
factory.addStep(steps.Git(logEnviron=False, repourl=E.AVENTURIERS_GIT_URL,
submodules=True, mode="full", method="fresh"))
factory.addStep(steps.ShellCommand(name="build files",
- logEnviron=False, haltOnFailure=True, workdir="build",
+ logEnviron=False, haltOnFailure=True,
env=path_env, command=["make", "tout", "encyclo"]))
factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.AVENTURIERS_RELEASE_PATH)))
- factory.addStep(steps.DirectoryUpload(workersrc="../build/html",
+ factory.addStep(steps.DirectoryUpload(workersrc="html",
masterdest=E.AVENTURIERS_RELEASE_PATH,
url="https://aventuriers.syanni.eu"))
factory.addStep(steps.FileUpload(name="upload epub file", workersrc="aventuriers.epub",
- workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.epub",
+ masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.epub",
url="https://aventuriers.syanni.eu/aventuriers.epub", mode=0o644))
factory.addStep(steps.FileUpload(name="upload mobi file", workersrc="aventuriers.mobi",
- workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.mobi",
+ masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.mobi",
url="https://aventuriers.syanni.eu/aventuriers.mobi", mode=0o644))
factory.addStep(steps.FileUpload(name="upload pdf file", workersrc="aventuriers.pdf",
- workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.pdf",
+ masterdest=E.AVENTURIERS_RELEASE_PATH + "/aventuriers.pdf",
url="https://aventuriers.syanni.eu/aventuriers.pdf", mode=0o644))
factory.addStep(steps.FileUpload(name="upload encyclo pdf file", workersrc="encyclo.pdf",
- workdir="build", masterdest=E.AVENTURIERS_RELEASE_PATH + "/encyclo.pdf",
+ masterdest=E.AVENTURIERS_RELEASE_PATH + "/encyclo.pdf",
url="https://aventuriers.syanni.eu/encyclo.pdf", mode=0o644))
factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.AVENTURIERS_RELEASE_PATH)))
- return util.BuilderConfig(name="DeniseAventuriers_build", workernames=["generic-worker-denise"], factory=factory)
+ return factory
import os
from buildbot.util import bytes2unicode
import json
+from functools import partial
__all__ = [ "configure", "E" ]
TITLE_URL = "https://www.immae.eu"
TITLE = "Immae website"
-class CustomBase(webhooks.base):
- def getChanges(self, request):
- try:
- content = request.content.read()
- args = json.loads(bytes2unicode(content))
- except Exception as e:
- raise ValueError("Error loading JSON: " + str(e))
-
- args.setdefault("comments", "")
- args.setdefault("repository", "")
- args.setdefault("author", args.get("who", "unknown"))
-
- return ([args], None)
-
def configure(c):
c["buildbotURL"] = E.BUILDBOT_URL
c["www"]["port"] = E.SOCKET
- c["www"]["change_hook_dialects"]["base"] = {
- "custom_class": CustomBase
+ worker_name = "generic-worker-immae-eu"
+ c['workers'].append(worker.LocalWorker(worker_name))
+
+ withbranch = []
+ withoutbranch = []
+
+ withoutbranch.append(configure_gsm_cells(c, worker_name))
+
+ withbranch.append(_configure("immae_eu", immae_eu_build_factory, c, worker_name))
+ withbranch.append(_configure("normalesup", normalesup_build_factory, c, worker_name))
+ withbranch.append(_configure("cours", cours_build_factory, c, worker_name))
+ withbranch.append(_configure("recettes", recettes_build_factory, c, worker_name))
+ withbranch.append(_configure("docs", docs_build_factory, c, worker_name))
+ withbranch.append(_configure("history", history_build_factory, c, worker_name))
+ withbranch.append(_configure("bip39", bip39_build_factory, c, worker_name))
+
+ withbranch.append(_configure_symfony("Chloe", c, worker_name,
+ "gitolite@git.immae.eu:perso/Immae/Sites/Chloe/New"))
+ withbranch.append(_configure_symfony("Florian", c, worker_name,
+ "gitolite@git.immae.eu:perso/florian_telles/stabilo"))
+ withbranch.append(configure_symfony_isabelle_aten(c, worker_name))
+ withbranch.append(_configure_symfony("Ludivine", c, worker_name,
+ "gitolite@git.immae.eu:perso/Immae/Sites/Ludivine"))
+ withbranch.append(_configure_symfony("Connexionswing", c, worker_name,
+ "gitolite@git.immae.eu:perso/Immae/Projets/Connexionswing"))
+ withbranch.append(_configure_symfony("Piedsjaloux", c, worker_name,
+ "gitolite@git.immae.eu:Pieds_jaloux/NewSite"))
+
+ c['schedulers'].append(force_scheduler("force_immae_eu", withbranch))
+ c['schedulers'].append(force_scheduler("force_no_branch_immae_eu", withoutbranch, nobranch=True))
+
+ configure_slack_push(c, E.SECRETS_FILE, all_builder_names(c))
+ configure_xmpp_push(c, E.SECRETS_FILE, all_builder_names(c), E.XMPP_RECIPIENTS)
+
+BRANCH_TO_SYMFONY_ENV = {
+ "Ludivine": {
+ "master": "prod",
+ "test": "dev",
+ },
+ "Connexionswing": {
+ "master": "prod",
+ "test": "dev",
+ },
+ "Piedsjaloux": {
+ "master": "prod",
+ "test": "dev",
}
- c['workers'].append(worker.LocalWorker("generic-worker-immae-eu"))
-
- c['schedulers'].append(hook_scheduler("ImmaeEu", timer=1))
- c['schedulers'].append(hook_scheduler("Normalesup", timer=1))
- c['schedulers'].append(hook_scheduler("Cours", timer=1))
- c['schedulers'].append(hook_scheduler("Recettes", timer=1))
- c['schedulers'].append(hook_scheduler("Docs", timer=1))
- c['schedulers'].append(hook_scheduler("History", timer=1))
- c['schedulers'].append(hook_scheduler("BIP39", timer=1))
- c['schedulers'].append(hook_scheduler("Symfony_Chloe", timer=1))
- c['schedulers'].append(hook_scheduler("Symfony_Florian", timer=1))
- c['schedulers'].append(hook_scheduler("Symfony_Ludivine", timer=1))
- c['schedulers'].append(hook_scheduler("Symfony_IsabelleAten", timer=1))
- c['schedulers'].append(hook_scheduler("Symfony_Piedsjaloux", timer=1))
- c['schedulers'].append(hook_scheduler("Symfony_Connexionswing", timer=1))
+ }
+
+BRANCH_TO_SERVICE_NAME = {
+ "Chloe": {
+ "test": "phpfpm-chloe_new_integration",
+ #"master": "phpfpm-chloe_production",
+ },
+ "Florian": {
+ "stabilo_dev": "phpfpm-florian_app",
+ },
+ "IsabelleAten": {
+ "test": "phpfpm-isabelle_aten_integration",
+ "master": "phpfpm-isabelle_aten_production",
+ },
+ "Ludivine": {
+ "test": "phpfpm-ludivine_integration",
+ "master": "phpfpm-ludivine_production",
+ },
+ "Connexionswing": {
+ "test": "phpfpm-connexionswing_integration",
+ "master": "phpfpm-connexionswing_production",
+ },
+ "Piedsjaloux": {
+ "test": "phpfpm-piedsjaloux_integration",
+ "master": "phpfpm-piedsjaloux_production",
+ },
+ }
+
+BRANCH_TO_POST_STEP = {
+ "Connexionswing": {
+ "master": "SYMFONY_ENV=prod php ./bin/console assetic:dump --env=prod --no-debug"
+ },
+ }
+
+def need_follow_systemd(name, step):
+ return step.getProperty("branch") in BRANCH_TO_SERVICE_NAME.get(name, {})
+
+def need_post_step(name, step):
+ return step.getProperty("branch") in BRANCH_TO_POST_STEP.get(name, {})
+
+@util.renderer
+def get_post_step_command(props, name):
+ if props.hasProperty("branch") and len(props["branch"]) > 0:
+ post_step = BRANCH_TO_POST_STEP.get(name, {}).get(props["branch"])
+ if post_step is not None:
+ return post_step
+
+@util.renderer
+def get_parameters_file(props, name):
+ if props.hasProperty("branch") and len(props["branch"]) > 0:
+ env = BRANCH_TO_SYMFONY_ENV.get(name, {}).get(props["branch"], "dev")
+ else:
+ env = "dev"
+ return E.SECRETS_FILE + "/symfony_{}_{}_parameters.yml".format(name, env)
+
+@util.renderer
+def get_composer_install_command(props, name):
+ if props.hasProperty("branch") and len(props["branch"]) > 0:
+ env = BRANCH_TO_SYMFONY_ENV.get(name, {}).get(props["branch"], "dev")
+ else:
+ env = "dev"
+
+ return "SYMFONY_ENV={} composer install".format(env) + (" --no-dev" if env == "prod" else "")
+
+@util.renderer
+def get_systemd_service_invocation_command(props, name):
+ if props.hasProperty("branch") and len(props["branch"]) > 0:
+ service = BRANCH_TO_SERVICE_NAME.get(name, {}).get(props["branch"])
+ if service is not None:
+ return "ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no -i {} buildbot@eldiron systemctl show -p InvocationID --value {}.service".format(E.SSH_KEY_PATH, service)
+
+@util.renderer
+def follow_systemd_command(props, name, invocation_id):
+ if props.hasProperty("branch") and len(props["branch"]) > 0:
+ service = BRANCH_TO_SERVICE_NAME.get(name, {}).get(props["branch"])
+ if service is not None:
+ return ["follow-systemd-unit", service, util.Property("service_invocation_id")]
+
+def configure_gsm_cells(c, worker_name):
+ builder_name = "GSMCells_build"
c['schedulers'].append(schedulers.Nightly(name="GSMCells-weekly",
- builderNames=["GSMCells_build"], dayOfWeek=6, hour=3))
- c['schedulers'].append(force_scheduler("force_immae_eu", [
- "ImmaeEu_build", "Normalesup_build", "Cours_build", "Docs_build",
- "Recettes_build", "History_build", "BIP39_build",
- "Symfony_Chloe_build", "Symfony_Florian_build",
- "Symfony_IsabelleAten_build", "Symfony_Ludivine_build",
- "Symfony_Piedsjaloux_build", "Symfony_Connexionswing_build"
- ]))
- c['schedulers'].append(schedulers.ForceScheduler(
- name="GSMCells-force", label="Force build",
- buttonName="Force build",
- reason=util.StringParameter(name="reason", label="Reason", default="Force build"),
- codebases=[
- util.CodebaseParameter("",
- branch=util.FixedParameter(name="branch", default=""),
- revision=util.FixedParameter(name="revision", default=""),
- repository=util.FixedParameter(name="repository", default=""),
- project=util.FixedParameter(name="project", default=""),
- ),
- ],
- username=util.FixedParameter(name="username", default="Web button"),
- builderNames=["GSMCells_build"]
- ))
-
- c['builders'].append(immae_eu_factory())
- c['builders'].append(normalesup_factory())
- c['builders'].append(cours_factory())
- c['builders'].append(gsm_cells_factory())
- c['builders'].append(recettes_factory())
- c['builders'].append(docs_factory())
- c['builders'].append(history_factory())
- c['builders'].append(bip39_factory())
- c['builders'].append(symfony_project_factory("Chloe", "gitolite@git.immae.eu:perso/Immae/Sites/Chloe/New", "phpfpm-chloe_new_integration"))
- c['builders'].append(symfony_project_factory("Florian", "gitolite@git.immae.eu:perso/florian_telles/stabilo", "phpfpm-florian_app"))
- c['builders'].append(symfony_project_factory("IsabelleAten", "gitolite@git.immae.eu:perso/Immae/Sites/Aten", "phpfpm-isabelle_aten_integration", parameters_path=None,
+ builderNames=[builder_name], dayOfWeek=6, hour=3))
+ c['builders'].append(util.BuilderConfig(name=builder_name, workernames=[worker_name], factory=gsm_cells_build_factory()))
+
+ return builder_name
+
+def _configure(name, factory, c, worker_name):
+ if name == "bip39":
+ capitalized = "BIP39"
+ else:
+ capitalized = "".join([n.capitalize() for n in name.split('_')])
+ builder_name = "{}_build".format(capitalized)
+
+ c['schedulers'].append(git_hook_scheduler(capitalized, [builder_name]))
+ c['builders'].append(util.BuilderConfig(name=builder_name, workernames=[worker_name], factory=factory()))
+
+ return builder_name
+
+def configure_symfony_isabelle_aten(c, worker_name):
+ return _configure_symfony("IsabelleAten", c, worker_name,
+ "gitolite@git.immae.eu:perso/Immae/Sites/Aten", parameters_path=None,
other_steps=lambda path_env : [
NixShellCommand(name="Install yarn",
- logEnviron=False, haltOnFailure=True, workdir="build",
+ logEnviron=False, haltOnFailure=True,
env=path_env, command="yarn install"),
NixShellCommand(name="Build frontend",
- logEnviron=False, haltOnFailure=True, workdir="build",
+ logEnviron=False, haltOnFailure=True,
env=path_env, command="yarn run encore production")
]
- ))
- c['builders'].append(symfony_project_factory("Ludivine", "gitolite@git.immae.eu:perso/Immae/Sites/Ludivine", "phpfpm-ludivine_integration"))
- c['builders'].append(symfony_project_factory("Connexionswing", "gitolite@git.immae.eu:perso/Immae/Projets/Connexionswing", "phpfpm-connexionswing_integration"))
- c['builders'].append(symfony_project_factory("Piedsjaloux", "gitolite@git.immae.eu:Pieds_jaloux/NewSite", "phpfpm-piedsjaloux_integration"))
-
- c['services'].append(SlackStatusPush(
- name="slack_status_immae_eu_project",
- builders=[
- "ImmaeEu_build", "Normalesup_build", "Cours_build", "Docs_build",
- "GSMCells_build", "Recettes_build", "History_build",
- "BIP39_build", "Symfony_Chloe_build", "Symfony_Florian_build",
- "Symfony_IsabelleAten_build", "Symfony_Ludivine_build",
- "Symfony_Piedsjaloux_build", "Symfony_Connexionswing_build"
- ],
- serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip()))
- c['services'].append(XMPPStatusPush(
- name="xmpp_status_immae_eu_project",
- builders=[
- "ImmaeEu_build", "Normalesup_build", "Cours_build", "Docs_build",
- "GSMCells_build", "Recettes_build", "History_build",
- "BIP39_build", "Symfony_Chloe_build", "Symfony_Florian_build",
- "Symfony_IsabelleAten_build", "Symfony_Ludivine_build",
- "Symfony_Piedsjaloux_build", "Symfony_Connexionswing_build"
- ],
- recipients=E.XMPP_RECIPIENTS,
- password=open(E.SECRETS_FILE + "/notify_xmpp_password", "r").read().rstrip()))
-
-def history_factory():
+ )
+
+def _configure_symfony(name, c, worker_name, *args, **kwargs):
+ builder_name = "Symfony_{}_build".format(name)
+
+ c['schedulers'].append(git_hook_scheduler("Symfony_{}".format(name), [builder_name]))
+ c['builders'].append(util.BuilderConfig(name=builder_name, workernames=[worker_name], factory=symfony_project_factory(name, *args, **kwargs)))
+
+ return builder_name
+
+def history_build_factory():
path_env = {
"PATH": os.environ["BUILDBOT_PATH_History"] + ":${PATH}"
}
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.HISTORY_GIT_URL,
submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="full", method="fresh"))
factory.addStep(steps.ShellCommand(name="build website",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
env=path_env, command=["jekyll", "build"]))
factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.HISTORY_RELEASE_PATH)))
- factory.addStep(steps.DirectoryUpload(workersrc="../source/_site",
+ factory.addStep(steps.DirectoryUpload(workersrc="_site",
masterdest=E.HISTORY_RELEASE_PATH,
url="https://www.immae.eu/history"))
factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.HISTORY_RELEASE_PATH)))
- return util.BuilderConfig(name="History_build", workernames=["generic-worker-immae-eu"], factory=factory)
+ return factory
-def docs_factory():
+def docs_build_factory():
path_env = {
"PATH": os.environ["BUILDBOT_PATH_Docs"] + ":${PATH}"
}
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.DOCS_GIT_URL,
submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="full", method="fresh"))
factory.addStep(steps.ShellCommand(name="build website",
- logEnviron=False, haltOnFailure=True, workdir="source",
- env=path_env, command=["make", "clean", "html"]))
+ logEnviron=False, haltOnFailure=True,
+ env=path_env, command=["make", "html"]))
factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.DOCS_RELEASE_PATH)))
- factory.addStep(steps.DirectoryUpload(workersrc="../source/_build/html",
+ factory.addStep(steps.DirectoryUpload(workersrc="_build/html",
masterdest=E.DOCS_RELEASE_PATH,
url="https://www.immae.eu/docs"))
factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.DOCS_RELEASE_PATH)))
- return util.BuilderConfig(name="Docs_build", workernames=["generic-worker-immae-eu"], factory=factory)
+ return factory
-def recettes_factory():
+def recettes_build_factory():
path_env = {
"PATH": os.environ["BUILDBOT_PATH_Recettes"] + ":${PATH}"
}
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.RECETTES_GIT_URL,
submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="full", method="fresh"))
factory.addStep(NixShellCommand(name="build website",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
env=path_env, command="jekyll build --trace --baseurl /recettes"))
factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.RECETTES_RELEASE_PATH)))
- factory.addStep(steps.DirectoryUpload(workersrc="../source/_site",
+ factory.addStep(steps.DirectoryUpload(workersrc="_site",
masterdest=E.RECETTES_RELEASE_PATH,
url="https://www.immae.eu/recettes"))
factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.RECETTES_RELEASE_PATH)))
- return util.BuilderConfig(name="Recettes_build", workernames=["generic-worker-immae-eu"], factory=factory)
+ return factory
-def bip39_factory():
+def bip39_build_factory():
path_env = {
"PATH": os.environ["BUILDBOT_PATH_BIP39"] + ":${PATH}"
}
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.BIP39_GIT_URL,
- submodules=True, mode="full", method="copy"))
+ submodules=True, mode="full", method="fresh"))
factory.addStep(steps.ShellCommand(name="build file",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
env=path_env, command=["python", "compile.py"]))
factory.addStep(steps.FileUpload(name="upload file", workersrc="bip39-standalone.html",
- workdir="source", masterdest=E.BIP39_RELEASE_PATH + "/index.html",
+ masterdest=E.BIP39_RELEASE_PATH + "/index.html",
url="https://tools.immae.eu/BIP39", mode=0o644))
factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.BIP39_RELEASE_PATH)))
- return util.BuilderConfig(name="BIP39_build", workernames=["generic-worker-immae-eu"], factory=factory)
+ return factory
-def immae_eu_factory():
+def immae_eu_build_factory():
path_env = {
"PATH": os.environ["BUILDBOT_PATH_ImmaeEu"] + ":${PATH}"
}
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.IMMAE_EU_GIT_URL,
submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="full", method="fresh"))
factory.addStep(steps.ShellCommand(name="build website",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
env=path_env, command=["make", "html"]))
factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.IMMAE_EU_RELEASE_PATH)))
- factory.addStep(steps.DirectoryUpload(workersrc="../source/output",
+ factory.addStep(steps.DirectoryUpload(workersrc="output",
masterdest=E.IMMAE_EU_RELEASE_PATH,
url="https://www.immae.eu"))
factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.IMMAE_EU_RELEASE_PATH)))
- return util.BuilderConfig(name="ImmaeEu_build", workernames=["generic-worker-immae-eu"], factory=factory)
+ return factory
-def cours_factory():
+def cours_build_factory():
path_env = {
"PATH": os.environ["BUILDBOT_PATH_Cours"] + ":${PATH}",
"CI": "yes"
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.COURS_GIT_URL,
submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="incremental"))
factory.addStep(steps.ShellCommand(name="build website",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
command=["make", "build"], env=path_env))
factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.COURS_RELEASE_PATH)))
- factory.addStep(steps.DirectoryUpload(workersrc="../source/build",
+ factory.addStep(steps.DirectoryUpload(workersrc="build",
masterdest=E.COURS_RELEASE_PATH,
url="https://www.immae.eu/cours"))
factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.COURS_RELEASE_PATH)))
factory.addStep(steps.ShellCommand(name="build pdfs",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
command=["make", "pdfs"], env=path_env))
package = util.Interpolate("cours_%(kw:clean_branch)s.tar.gz", clean_branch=clean_branch)
package_dest = util.Interpolate(release_file.format(E.COURS_TARBALL_PATH), clean_branch=clean_branch)
package_url = util.Interpolate(release_file.format(E.COURS_TARBALL_URL), clean_branch=clean_branch)
factory.addStep(steps.ShellCommand(name="build pdf tarball",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
command=["tar", "-cvf", package, "-C", "pdfs", "mp", "mpsi"], env=path_env))
factory.addStep(steps.FileUpload(name="upload package", workersrc=package,
- workdir="source", masterdest=package_dest,
- url=package_url, mode=0o644))
+ masterdest=package_dest, url=package_url, mode=0o644))
- return util.BuilderConfig(name="Cours_build", workernames=["generic-worker-immae-eu"], factory=factory)
+ return factory
-def normalesup_factory():
+def normalesup_build_factory():
path_env = {
"PATH": os.environ["BUILDBOT_PATH_Normalesup"] + ":${PATH}"
}
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=E.NORMALESUP_GIT_URL,
submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="incremental"))
factory.addStep(steps.ShellCommand(name="build website",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
command=["make", "build"], env=path_env))
factory.addStep(steps.ShellCommand(name="give read access to all files",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
command="chmod -R a+rX build", env=path_env))
factory.addStep(steps.ShellCommand(name="synchronize with phare",
- logEnviron=False, haltOnFailure=True, workdir="source",
+ logEnviron=False, haltOnFailure=True,
env=path_env, command=[
"rsync", "-av", "--delete",
"-e", "ssh -i {} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o CheckHostIP=no".format(E.SSH_KEY_PATH),
os.environ["BUILDBOT_NORMALESUP_HOST"]
]))
factory.addStep(steps.MasterShellCommand(command="rm -rf {}".format(E.NORMALESUP_RELEASE_PATH)))
- factory.addStep(steps.DirectoryUpload(workersrc="../source/build", masterdest=E.NORMALESUP_RELEASE_PATH,
+ factory.addStep(steps.DirectoryUpload(workersrc="build", masterdest=E.NORMALESUP_RELEASE_PATH,
url="https://www.immae.eu/recherche"))
factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.NORMALESUP_RELEASE_PATH)))
- return util.BuilderConfig(name="Normalesup_build", workernames=["generic-worker-immae-eu"], factory=factory)
+ return factory
-def gsm_cells_factory():
+def gsm_cells_build_factory():
path_env = {
"PATH": os.environ["BUILDBOT_PATH_GSMCells"] + ":${PATH}",
"IN_BUILDBOT": "yes",
factory.addStep(steps.MasterShellCommand(command="ln -sf lacells.db {}/lacells.db.new".format(E.GSMCELLS_RELEASE_PATH)))
factory.addStep(steps.MasterShellCommand(command="chmod -R a+rX {}".format(E.GSMCELLS_RELEASE_PATH)))
- return util.BuilderConfig(name="GSMCells_build", workernames=["generic-worker-immae-eu"], factory=factory)
+ return factory
-def symfony_project_factory(name, repourl, service_name, parameters_path="app/config/parameters.yml", other_steps=lambda a : []):
+def symfony_project_factory(name, repourl, parameters_path="app/config/parameters.yml", other_steps=lambda a : []):
if "BUILDBOT_PATH_SYMFONY_{}".format(name) in os.environ:
path_env = {
"PATH": os.environ["BUILDBOT_PATH_SYMFONY_{}".format(name)] + ":${PATH}"
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False, repourl=repourl,
submodules=True, sshPrivateKey=open(E.SSH_KEY_PATH).read().rstrip(),
- sshHostKey=E.SSH_HOST_KEY, mode="full", method="copy"))
+ sshHostKey=E.SSH_HOST_KEY, mode="full", method="fresh"))
if parameters_path is not None:
- factory.addStep(steps.FileDownload(mastersrc=(E.SECRETS_FILE + "/symfony_{}_parameters.yml".format(name)),
+ factory.addStep(steps.FileDownload(mastersrc=get_parameters_file.withArgs(name),
workerdest=parameters_path))
factory.addStep(NixShellCommand(name="build website",
- logEnviron=False, haltOnFailure=True, workdir="build",
- env=path_env, command="composer install"))
+ logEnviron=False, haltOnFailure=True,
+ env=path_env, command=get_composer_install_command.withArgs(name)))
+ if name in BRANCH_TO_POST_STEP:
+ factory.addStep(NixShellCommand(name="build website post",
+ logEnviron=False, haltOnFailure=True, doStepIf=partial(need_post_step, name),
+ env=path_env, command=get_post_step_command.withArgs(name)))
if parameters_path is not None:
factory.addStep(steps.ShellCommand(name="Remove parameters.yml",
logEnviron=False, haltOnFailure=True,
package_dest = util.Interpolate(release_file.format("/var/lib/ftp/release.immae.eu/buildbot", name), clean_branch=clean_branch)
# Tar doesn’t like creating the tarball in the same directory
factory.addStep(steps.ShellCommand(name="Make a tarball 1/2",
- logEnviron=False, haltOnFailure=True, workdir="build", env=path_env,
+ logEnviron=False, haltOnFailure=True, env=path_env,
command=["touch", package]))
factory.addStep(steps.ShellCommand(name="Make a tarball 2/2",
- logEnviron=False, haltOnFailure=True, workdir="build", env=path_env,
+ logEnviron=False, haltOnFailure=True, env=path_env,
command=["tar", "--exclude", package, "-czf", package, "."]))
+ factory.addStep(steps.SetPropertyFromCommand(command=get_systemd_service_invocation_command.withArgs(name),
+ property="service_invocation_id", doStepIf=partial(need_follow_systemd, name)))
factory.addStep(steps.FileUpload(name="upload package",
- workersrc=package, workdir="build",
- masterdest=package_dest, mode=0o644))
- factory.addStep(steps.MasterShellCommand(command="/run/wrappers/bin/sudo systemctl restart {}.service".format(service_name)))
- return util.BuilderConfig(name="Symfony_{}_build".format(name), workernames=["generic-worker-immae-eu"], factory=factory)
-
+ workersrc=package, masterdest=package_dest, mode=0o644))
+ factory.addStep(steps.MasterShellCommand(command=follow_systemd_command.withArgs(name, util.Property("service_invocation_id")), env=path_env, logEnviron=False, doStepIf=partial(need_follow_systemd, name)))
+ return factory
GIT_URL = "https://git.immae.eu/perso/Immae/TestProject.git"
SSH_KEY_PATH = "/var/lib/buildbot/buildbot_key"
LIBVIRT_URL = os.environ["BUILDBOT_VIRT_URL"] + "?keyfile=" + SSH_KEY_PATH
- PUPPET_HOST = "root@backup-1.v.immae.eu"
LDAP_HOST = "ldap.immae.eu"
LDAP_DN = "cn=buildbot,ou=services,dc=immae,dc=eu"
LDAP_ROLES_BASE = "ou=roles,ou=hosts,dc=immae,dc=eu"
return ([args], None)
-def deploy_hook_scheduler(project, timer=1):
- return schedulers.AnyBranchScheduler(
- change_filter=util.ChangeFilter(category="deploy_webhook", project=project),
- name="{}_deploy".format(project), treeStableTimer=timer, builderNames=["{}_deploy".format(project)])
-
def configure(c):
c["buildbotURL"] = E.BUILDBOT_URL
c["www"]["port"] = E.SOCKET
- c["www"]["change_hook_dialects"]["base"] = {
- "custom_class": CustomBase
- }
- c['workers'].append(ilibvirt.LibVirtWorker("test-build",
- open(E.SECRETS_FILE + "/worker_password", "r").read().rstrip(),
- ilibvirt.Connection(E.LIBVIRT_URL),
- E.WORKER_HOST))
- c['workers'].append(ilibvirt.LibVirtWorker("test-deploy",
- open(E.SECRETS_FILE + "/worker_password", "r").read().rstrip(),
- ilibvirt.Connection(E.LIBVIRT_URL),
- E.WORKER_HOST))
-
- c['schedulers'].append(hook_scheduler("TestProject", timer=1))
- c['schedulers'].append(force_scheduler("force_test", ["TestProject_build"]))
- c['schedulers'].append(deploy_scheduler("deploy_test", ["TestProject_deploy"]))
- c['schedulers'].append(deploy_hook_scheduler("TestProject", timer=1))
-
- c['builders'].append(factory())
- c['builders'].append(deploy_factory())
-
- c['services'].append(SlackStatusPush(
- name="slack_status_test_project",
- builders=["TestProject_build", "TestProject_deploy"],
- serverUrl=open(E.SECRETS_FILE + "/slack_webhook", "r").read().rstrip()))
- c['services'].append(XMPPStatusPush(
- name="xmpp_status_test_project",
- builders=["TestProject_build", "TestProject_deploy"],
- recipients=E.XMPP_RECIPIENTS,
- password=open(E.SECRETS_FILE + "/notify_xmpp_password", "r").read().rstrip()))
-
-def factory():
+ c["www"]["change_hook_dialects"]["base"] = { "custom_class": CustomBase }
+
+ configure_build(c)
+ configure_deploy(c)
+
+ configure_slack_push(c, E.SECRETS_FILE, all_builder_names(c))
+ configure_xmpp_push(c, E.SECRETS_FILE, all_builder_names(c), E.XMPP_RECIPIENTS)
+
+def configure_build(c):
+ builder_name = "TestProject_build"
+ worker_name = "test-build"
+ c['schedulers'].append(force_scheduler("force_test", [builder_name]))
+ c['schedulers'].append(git_hook_scheduler("TestProject", [builder_name]))
+ c['workers'].append(libvirt_worker(worker_name))
+ c['builders'].append(util.BuilderConfig(name=builder_name, workernames=[worker_name], factory=build_factory()))
+
+def configure_deploy(c):
+ builder_name = "TestProject_deploy"
+ worker_name = "test-deploy"
+ c['workers'].append(libvirt_worker(worker_name))
+ c['schedulers'].append(deploy_hook_scheduler("TestProject", [builder_name]))
+ c['schedulers'].append(deploy_scheduler("deploy_test", [builder_name]))
+ c['builders'].append(util.BuilderConfig(name=builder_name, workernames=[worker_name], factory=deploy_factory()))
+
+def libvirt_worker(name):
+ return ilibvirt.LibVirtWorker(name,
+ open(E.SECRETS_FILE + "/worker_password", "r").read().rstrip(),
+ ilibvirt.Connection(E.LIBVIRT_URL),
+ E.WORKER_HOST)
+
+def build_factory():
package = util.Interpolate("test_%(kw:clean_branch)s.tar.gz", clean_branch=clean_branch)
package_dest = util.Interpolate("{}/test_%(kw:clean_branch)s.tar.gz".format(E.RELEASE_PATH), clean_branch=clean_branch)
package_url = util.Interpolate("{}/test_%(kw:clean_branch)s.tar.gz".format(E.RELEASE_URL), clean_branch=clean_branch)
factory = util.BuildFactory()
factory.addStep(steps.Git(logEnviron=False,
- repourl=E.GIT_URL, mode="full", method="copy"))
+ repourl=E.GIT_URL, mode="full", method="fresh"))
factory.addStep(steps.ShellCommand(name="env",
logEnviron=False, command=["env"]))
factory.addStep(steps.ShellCommand(name="pwd",
logEnviron=False, command=["echo", package]))
factory.addSteps(package_and_upload(package, package_dest, package_url))
- return util.BuilderConfig(name="TestProject_build", workernames=["test-build"], factory=factory)
-
-
-def compute_build_infos():
- @util.renderer
- def compute(props):
- import re, hashlib
- build_file = props.getProperty("build")
- package_dest = "{}/{}".format(E.RELEASE_PATH, build_file)
- version = re.match(r"{0}_(.*).tar.gz".format("test"), build_file).group(1)
- with open(package_dest, "rb") as f:
- sha = hashlib.sha256(f.read()).hexdigest()
- return {
- "build_version": version,
- "build_hash": sha,
- }
- return compute
-
-@util.renderer
-def puppet_host(props):
- return E.PUPPET_HOST
+ return factory
def deploy_factory():
package_dest = util.Interpolate("{}/%(prop:build)s".format(E.RELEASE_PATH))
factory = util.BuildFactory()
factory.addStep(steps.MasterShellCommand(command=["test", "-f", package_dest]))
- factory.addStep(steps.SetProperties(properties=compute_build_infos()))
- factory.addStep(LdapPush(environment=util.Property("environment"),
+ factory.addStep(steps.SetProperties(properties=compute_build_infos("test", E.RELEASE_PATH)))
+ factory.addStep(LdapEdit(environment=util.Property("environment"),
build_version=util.Property("build_version"),
build_hash=util.Property("build_hash"),
- ldap_password=util.Secret("ldap")))
- factory.addStep(steps.MasterShellCommand(command=[
- "ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "CheckHostIP=no", "-i", E.SSH_KEY_PATH, puppet_host]))
- return util.BuilderConfig(name="TestProject_deploy", workernames=["test-deploy"], factory=factory)
-
-from twisted.internet import defer
-from buildbot.process.buildstep import FAILURE
-from buildbot.process.buildstep import SUCCESS
-from buildbot.process.buildstep import BuildStep
-
-class LdapPush(BuildStep):
- name = "LdapPush"
- renderables = ["environment", "build_version", "build_hash", "ldap_password"]
-
- def __init__(self, **kwargs):
- self.environment = kwargs.pop("environment")
- self.build_version = kwargs.pop("build_version")
- self.build_hash = kwargs.pop("build_hash")
- self.ldap_password = kwargs.pop("ldap_password")
- self.ldap_host = kwargs.pop("ldap_host", E.LDAP_HOST)
- super().__init__(**kwargs)
-
- def run(self):
- import json
- from ldap3 import Reader, Writer, Server, Connection, ObjectDef
- server = Server(self.ldap_host)
- conn = Connection(server,
- user=E.LDAP_DN,
- password=self.ldap_password)
- conn.bind()
- obj = ObjectDef("immaePuppetClass", conn)
- r = Reader(conn, obj,
- "cn=test.{},{}".format(self.environment, E.LDAP_ROLES_BASE))
- r.search()
- if len(r) > 0:
- w = Writer.from_cursor(r)
- for value in w[0].immaePuppetJson.values:
- config = json.loads(value)
- if "test_version" in config:
- config["test_version"] = self.build_version
- config["test_sha256"] = self.build_hash
- w[0].immaePuppetJson -= value
- w[0].immaePuppetJson += json.dumps(config, indent=" ")
- w.commit()
- return defer.succeed(SUCCESS)
- return defer.succeed(FAILURE)
+ config_key="test",
+ ldap_host=E.LDAP_HOST, ldap_roles_base=E.LDAP_ROLES_BASE, ldap_dn=E.LDAP_DN,
+ ldap_cn_template="test.{}", ldap_password=util.Secret("ldap")))
+ factory.addStep(steps.MasterShellCommand(command=deploy_ssh_command(E.SSH_KEY_PATH, {})))
+ return factory