Added buildbot module

This commit is contained in:
Julien Malka 2023-01-19 19:17:40 +01:00
parent 64055dc28d
commit fb38df2ea3
Signed by: Luj
GPG key ID: 6FC74C847011FD83
4 changed files with 1020 additions and 0 deletions

View file

@ -0,0 +1,626 @@
#!/usr/bin/env python3
import json
import multiprocessing
import os
import re
import uuid
from collections import defaultdict
from pathlib import Path
from typing import Any, Generator, List
from buildbot.plugins import steps, util
from buildbot.process import buildstep, logobserver
from buildbot.process.properties import Properties
from buildbot.process.results import ALL_RESULTS, statusToString
from buildbot.steps.trigger import Trigger
from twisted.internet import defer
class BuildTrigger(Trigger):
"""
Dynamic trigger that creates a build for every attribute.
"""
def __init__(self, scheduler: str, jobs: list[dict[str, str]], **kwargs):
if "name" not in kwargs:
kwargs["name"] = "trigger"
self.jobs = jobs
self.config = None
Trigger.__init__(
self,
waitForFinish=True,
schedulerNames=[scheduler],
haltOnFailure=True,
flunkOnFailure=True,
sourceStamps=[],
alwaysUseLatest=False,
updateSourceStamp=False,
**kwargs,
)
def createTriggerProperties(self, props):
return props
def getSchedulersAndProperties(self):
build_props = self.build.getProperties()
repo_name = build_props.getProperty(
"github.base.repo.full_name",
build_props.getProperty("github.repository.full_name"),
)
sch = self.schedulerNames[0]
triggered_schedulers = []
for job in self.jobs:
attr = job.get("attr", "eval-error")
name = attr
if repo_name is not None:
name = f"{repo_name}: {name}"
drv_path = job.get("drvPath")
error = job.get("error")
out_path = job.get("outputs", {}).get("out")
build_props.setProperty(f"{attr}-out_path", out_path, "nix-eval")
build_props.setProperty(f"{attr}-drv_path", drv_path, "nix-eval")
props = Properties()
props.setProperty("virtual_builder_name", name, "nix-eval")
props.setProperty("virtual_builder_tags", "", "nix-eval")
props.setProperty("attr", attr, "nix-eval")
props.setProperty("drv_path", drv_path, "nix-eval")
props.setProperty("out_path", out_path, "nix-eval")
# we use this to identify builds when running a retry
props.setProperty("build_uuid", str(uuid.uuid4()), "nix-eval")
props.setProperty("error", error, "nix-eval")
triggered_schedulers.append((sch, props))
return triggered_schedulers
def getCurrentSummary(self):
"""
The original build trigger will the generic builder name `nix-build` in this case, which is not helpful
"""
if not self.triggeredNames:
return {"step": "running"}
summary = []
if self._result_list:
for status in ALL_RESULTS:
count = self._result_list.count(status)
if count:
summary.append(
f"{self._result_list.count(status)} {statusToString(status, count)}"
)
return {"step": f"({', '.join(summary)})"}
class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
"""
Parses the output of `nix-eval-jobs` and triggers a `nix-build` build for
every attribute.
"""
def __init__(self, **kwargs):
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
self.observer = logobserver.BufferLogObserver()
self.addLogObserver("stdio", self.observer)
@defer.inlineCallbacks
def run(self) -> Generator[Any, object, Any]:
# run nix-instanstiate to generate the dict of stages
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
# if the command passes extract the list of stages
result = cmd.results()
if result == util.SUCCESS:
# create a ShellCommand for each stage and add them to the build
jobs = []
for line in self.observer.getStdout().split("\n"):
if line != "":
job = json.loads(line)
jobs.append(job)
self.build.addStepsAfterCurrentStep(
[BuildTrigger(scheduler="nix-build", name="nix-build", jobs=jobs)]
)
return result
# FIXME this leaks memory... but probably not enough that we care
class RetryCounter:
def __init__(self, retries: int) -> None:
self.builds: dict[uuid.UUID, int] = defaultdict(lambda: retries)
def retry_build(self, id: uuid.UUID) -> int:
retries = self.builds[id]
if retries > 1:
self.builds[id] = retries - 1
return retries
else:
return 0
# For now we limit this to two. Often this allows us to make the error log
# shorter because we won't see the logs for all previous succeeded builds
RETRY_COUNTER = RetryCounter(retries=2)
class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
"""
Builds a nix derivation if evaluation was successful,
otherwise this shows the evaluation error.
"""
def __init__(self, **kwargs):
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
self.observer = logobserver.BufferLogObserver()
self.addLogObserver("stdio", self.observer)
@defer.inlineCallbacks
def run(self) -> Generator[Any, object, Any]:
error = self.getProperty("error")
if error is not None:
attr = self.getProperty("attr")
# show eval error
self.build.results = util.FAILURE
log = yield self.addLog("nix_error")
log.addStderr(f"{attr} failed to evaluate:\n{error}")
return util.FAILURE
# run `nix build`
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
res = cmd.results()
if res == util.FAILURE:
retries = RETRY_COUNTER.retry_build(self.getProperty("build_uuid"))
if retries > 0:
return util.RETRY
return res
class UpdateBuildOutput(steps.BuildStep):
"""
Updates store paths in a public www directory.
This is useful to prefetch updates without having to evaluate
on the target machine.
"""
def __init__(self, branches: list[str], **kwargs):
self.branches = branches
super().__init__(**kwargs)
def run(self) -> Generator[Any, object, Any]:
props = self.build.getProperties()
if props.getProperty("branch") not in self.branches:
return util.SKIPPED
attr = os.path.basename(props.getProperty("attr"))
out_path = props.getProperty("out_path")
# XXX don't hardcode this
p = Path("/var/www/buildbot/nix-outputs/")
os.makedirs(p, exist_ok=True)
with open(p / attr, "w") as f:
f.write(out_path)
return util.SUCCESS
class MergePr(steps.ShellCommand):
"""
Merge a pull request for specified branches and pull request owners
"""
def __init__(
self,
base_branches: list[str],
owners: list[str],
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self.base_branches = base_branches
self.owners = owners
self.observer = logobserver.BufferLogObserver()
self.addLogObserver("stdio", self.observer)
@defer.inlineCallbacks
def reconfigService(
self,
base_branches: list[str],
owners: list[str],
**kwargs: Any,
) -> Generator[Any, object, Any]:
self.base_branches = base_branches
self.owners = owners
super().reconfigService(**kwargs)
@defer.inlineCallbacks
def run(self) -> Generator[Any, object, Any]:
props = self.build.getProperties()
if props.getProperty("basename") not in self.base_branches:
return util.SKIPPED
if props.getProperty("event") not in ["pull_request"]:
return util.SKIPPED
if not any(owner in self.owners for owner in props.getProperty("owners")):
return util.SKIPPED
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
return cmd.results()
class CreatePr(steps.ShellCommand):
"""
Creates a pull request if none exists
"""
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.addLogObserver(
"stdio", logobserver.LineConsumerLogObserver(self.check_pr_exists)
)
def check_pr_exists(self):
ignores = [
re.compile(
"""a pull request for branch ".*" into branch ".*" already exists:"""
),
re.compile("No commits between .* and .*"),
]
while True:
_, line = yield
if any(ignore.search(line) is not None for ignore in ignores):
self.skipped = True
@defer.inlineCallbacks
def run(self):
self.skipped = False
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
if self.skipped:
return util.SKIPPED
return cmd.results()
def nix_update_flake_config(
worker_names: list[str],
projectname: str,
github_token_secret: str,
github_bot_user: str,
) -> util.BuilderConfig:
"""
Updates the flake an opens a PR for it.
"""
factory = util.BuildFactory()
url_with_secret = util.Interpolate(
f"https://git:%(secret:{github_token_secret})s@github.com/{projectname}"
)
factory.addStep(
steps.Git(
repourl=url_with_secret,
alwaysUseLatest=True,
method="clean",
submodules=True,
haltOnFailure=True,
)
)
factory.addStep(
steps.ShellCommand(
name="Update flakes",
env=dict(
GIT_AUTHOR_NAME=github_bot_user,
GIT_AUTHOR_EMAIL=f"{github_bot_user}@users.noreply.github.com",
GIT_COMMITTER_NAME=github_bot_user,
GIT_COMMITTER_EMAIL=f"{github_bot_user}@users.noreply.github.com",
),
command=[
"nix",
"flake",
"update",
"--commit-lock-file",
"--commit-lockfile-summary",
"flake.lock: Update",
],
haltOnFailure=True,
)
)
factory.addStep(
steps.ShellCommand(
name="Force-Push to update_flake_lock branch",
command=[
"git",
"push",
"--force",
"origin",
"HEAD:refs/heads/update_flake_lock",
],
haltOnFailure=True,
)
)
factory.addStep(
CreatePr(
name="Create pull-request",
env=dict(GITHUB_TOKEN=util.Secret(github_token_secret)),
command=[
"gh",
"pr",
"create",
"--repo",
projectname,
"--title",
"flake.lock: Update",
"--body",
"Automatic buildbot update",
"--head",
"refs/heads/update_flake_lock",
"--base",
"main",
],
haltOnFailure = True
)
)
return util.BuilderConfig(
name="nix-update-flake",
workernames=worker_names,
factory=factory,
properties=dict(virtual_builder_name="nix-update-flake"),
)
class Machine:
def __init__(self, hostname: str, attr_name: str) -> None:
self.hostname = hostname
self.attr_name = attr_name
class DeployTrigger(Trigger):
"""
Dynamic trigger that creates a deploy step for every machine.
"""
def __init__(self, scheduler: str, machines: list[Machine], **kwargs):
if "name" not in kwargs:
kwargs["name"] = "trigger"
self.machines = machines
self.config = None
Trigger.__init__(
self,
waitForFinish=True,
schedulerNames=[scheduler],
haltOnFailure=True,
flunkOnFailure=True,
sourceStamps=[],
alwaysUseLatest=False,
updateSourceStamp=False,
**kwargs,
)
def createTriggerProperties(self, props):
return props
def getSchedulersAndProperties(self):
build_props = self.build.getProperties()
repo_name = build_props.getProperty(
"github.base.repo.full_name",
build_props.getProperty("github.repository.full_name"),
)
sch = self.schedulerNames[0]
triggered_schedulers = []
for m in self.machines:
out_path = build_props.getProperty(f"nixos-{m.attr_name}-out_path")
props = Properties()
name = m.attr_name
if repo_name is not None:
name = f"{repo_name}: Deploy {name}"
props.setProperty("virtual_builder_name", name, "deploy")
props.setProperty("attr", m.attr_name, "deploy")
props.setProperty("out_path", out_path, "deploy")
triggered_schedulers.append((sch, props))
return triggered_schedulers
@defer.inlineCallbacks
def run(self):
props = self.build.getProperties()
if props.getProperty("branch") not in self.branches:
return util.SKIPPED
res = yield super().__init__()
return res
def getCurrentSummary(self):
"""
The original build trigger will the generic builder name `nix-build` in this case, which is not helpful
"""
if not self.triggeredNames:
return {"step": "running"}
summary = []
if self._result_list:
for status in ALL_RESULTS:
count = self._result_list.count(status)
if count:
summary.append(
f"{self._result_list.count(status)} {statusToString(status, count)}"
)
return {"step": f"({', '.join(summary)})"}
def nix_eval_config(
worker_names: list[str],
github_token_secret: str,
automerge_users: List[str] = [],
machines: list[Machine] = [],
) -> util.BuilderConfig:
"""
Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
For each evaluated attribute a new build pipeline is started.
If all builds succeed and the build was for a PR opened by the flake update bot,
this PR is merged.
"""
factory = util.BuildFactory()
# check out the source
url_with_secret = util.Interpolate(
f"https://git:%(secret:{github_token_secret})s@github.com/%(prop:project)s"
)
factory.addStep(
steps.Git(
repourl=url_with_secret,
method="clean",
submodules=True,
haltOnFailure=True,
)
)
factory.addStep(
NixEvalCommand(
env={},
name="Eval flake",
command=[
"nix-eval-jobs",
"--workers",
8,
"--option",
"accept-flake-config",
"true",
"--gc-roots-dir",
# FIXME: don't hardcode this
"/var/lib/buildbot-worker/gcroot",
"--flake",
".#hydraJobs",
],
haltOnFailure=True,
)
)
# Merge flake-update pull requests if CI succeeds
if len(automerge_users) > 0:
factory.addStep(
MergePr(
name="Merge pull-request",
env=dict(GITHUB_TOKEN=util.Secret(github_token_secret)),
base_branches=["master"],
owners=automerge_users,
command=[
"gh",
"pr",
"merge",
"--repo",
util.Property("project"),
"--rebase",
util.Property("pullrequesturl"),
],
)
)
# factory.addStep(
# DeployTrigger(scheduler="nixos-deploy", name="nixos-deploy", machines=machines)
# )
# factory.addStep(
# DeployNixOS(
# name="Deploy NixOS machines",
# env=dict(GITHUB_TOKEN=util.Secret(github_token_secret)),
# base_branches=["master"],
# owners=automerge_users,
# command=[
# "gh",
# "pr",
# "merge",
# "--repo",
# util.Property("project"),
# "--rebase",
# util.Property("pullrequesturl"),
# ],
# )
# )
return util.BuilderConfig(
name="nix-eval",
workernames=worker_names,
factory=factory,
properties=dict(virtual_builder_name="nix-eval"),
)
def nix_build_config(
worker_names: list[str],
has_cachix_auth_token: bool = False,
has_cachix_signing_key: bool = False,
) -> util.BuilderConfig:
"""
Builds one nix flake attribute.
"""
factory = util.BuildFactory()
factory.addStep(
NixBuildCommand(
env={},
name="Build flake attr",
command=[
"nix-build",
"--option",
"keep-going",
"true",
"--accept-flake-config",
"--out-link",
util.Interpolate("result-%(prop:attr)s"),
util.Property("drv_path"),
],
haltOnFailure=True,
)
)
if has_cachix_auth_token or has_cachix_signing_key:
if has_cachix_signing_key:
env = dict(CACHIX_SIGNING_KEY=util.Secret("cachix-signing-key"))
else:
env = dict(CACHIX_AUTH_TOKEN=util.Secret("cachix-auth-token"))
factory.addStep(
steps.ShellCommand(
name="Upload cachix",
env=env,
command=[
"cachix",
"push",
util.Secret("cachix-name"),
util.Interpolate("result-%(prop:attr)s"),
],
)
)
factory.addStep(UpdateBuildOutput(name="Update build output", branches=["master"]))
return util.BuilderConfig(
name="nix-build",
workernames=worker_names,
properties=[],
collapseRequests=False,
env={},
factory=factory,
)
# def nixos_deployment_config(worker_names: list[str]) -> util.BuilderConfig:
# factory = util.BuildFactory()
# factory.addStep(
# NixBuildCommand(
# env={},
# name="Deploy NixOS",
# command=[
# "nix",
# "build",
# "--option",
# "keep-going",
# "true",
# "-L",
# "--out-link",
# util.Interpolate("result-%(prop:attr)s"),
# util.Property("drv_path"),
# ],
# haltOnFailure=True,
# )
# )
# return util.BuilderConfig(
# name="nix-build",
# workernames=worker_names,
# properties=[],
# collapseRequests=False,
# env={},
# factory=factory,
# )

View file

@ -0,0 +1,149 @@
{ lib, pkgs, config, ... }:
with lib;
let
cfg = config.luj.buildbot;
port = "1810";
package = pkgs.python3Packages.buildbot-worker;
python = package.pythonModule;
home = "/var/lib/buildbot-worker";
buildbotDir = "${home}/worker";
in
{
options.luj.buildbot = {
enable = mkEnableOption "activate buildbot service";
nginx.enable = mkEnableOption "activate nginx";
nginx.subdomain = mkOption {
type = types.str;
};
};
config = mkIf cfg.enable {
# Buildbot master
services.buildbot-master = {
enable = true;
masterCfg = "${./.}/master.py";
pythonPackages = ps: [
ps.requests
ps.treq
ps.psycopg2
ps.buildbot-worker
];
};
systemd.services.buildbot-master = {
reloadIfChanged = true;
environment = {
PORT = port;
# Github app used for the login button
GITHUB_OAUTH_ID = "355493f668a8e1aa10cf";
GITHUB_ORG = "JulienMalka";
GITHUB_REPO = "nix-config";
BUILDBOT_URL = "https://buildbot.julienmalka.me/";
BUILDBOT_GITHUB_USER = "JulienMalka";
# comma seperated list of users that are allowed to login to buildbot and do stuff
GITHUB_ADMINS = "JulienMalka";
};
serviceConfig = {
# Restart buildbot with a delay. This time way we can use buildbot to deploy itself.
ExecReload = "+${pkgs.systemd}/bin/systemd-run --on-active=60 ${pkgs.systemd}/bin/systemctl restart buildbot-master";
# in master.py we read secrets from $CREDENTIALS_DIRECTORY
LoadCredential = [
"github-token:${config.sops.secrets.github-token.path}"
"github-webhook-secret:${config.sops.secrets.github-webhook-secret.path}"
"github-oauth-secret:${config.sops.secrets.github-oauth-secret.path}"
"buildbot-nix-workers:${config.sops.secrets.buildbot-nix-workers.path}"
];
};
};
sops.secrets = {
github-token = {
format = "binary";
sopsFile = ../../secrets/github-token-secret;
};
github-webhook-secret = {
format = "binary";
sopsFile = ../../secrets/github-webhook-secret;
};
github-oauth-secret = {
format = "binary";
sopsFile = ../../secrets/github-oauth-secret;
};
buildbot-nix-workers = {
format = "binary";
sopsFile = ../../secrets/buildbot-nix-workers;
};
};
services.nginx.virtualHosts."buildbot.julienmalka.me" =
{
locations."/".proxyPass = "http://127.0.0.1:1810/";
locations."/sse" = {
proxyPass = "http://127.0.0.1:1810/sse/";
# proxy buffering will prevent sse to work
extraConfig = "proxy_buffering off;";
};
locations."/ws" = {
proxyPass = "http://127.0.0.1:1810/ws";
proxyWebsockets = true;
# raise the proxy timeout for the websocket
extraConfig = "proxy_read_timeout 6000s;";
};
};
#buildbot worker
nix.settings.allowed-users = [ "buildbot-worker" ];
users.users.buildbot-worker = {
description = "Buildbot Worker User.";
isSystemUser = true;
createHome = true;
home = "/var/lib/buildbot-worker";
group = "buildbot-worker";
useDefaultShell = true;
};
users.groups.buildbot-worker = { };
systemd.services.buildbot-worker = {
reloadIfChanged = true;
description = "Buildbot Worker.";
after = [ "network.target" "buildbot-master.service" ];
wantedBy = [ "multi-user.target" ];
path = [
pkgs.unstable.nix-eval-jobs
pkgs.git
pkgs.gh
pkgs.nix
pkgs.nix-output-monitor
];
environment.PYTHONPATH = "${python.withPackages (_: [package])}/${python.sitePackages}";
environment.MASTER_URL = ''tcp:host=localhost:port=9989'';
environment.BUILDBOT_DIR = buildbotDir;
environment.WORKER_PASSWORD_FILE = config.sops.secrets.buildbot-nix-worker-password.path;
serviceConfig = {
Type = "simple";
User = "buildbot-worker";
Group = "buildbot-worker";
WorkingDirectory = home;
# Restart buildbot with a delay. This time way we can use buildbot to deploy itself.
ExecReload = "+${pkgs.systemd}/bin/systemd-run --on-active=60 ${pkgs.systemd}/bin/systemctl restart buildbot-worker";
ExecStart = "${python.pkgs.twisted}/bin/twistd --nodaemon --pidfile= --logfile - --python ${./worker.py}";
};
};
sops.secrets.buildbot-nix-worker-password = {
format = "binary";
owner = "buildbot-worker";
sopsFile = ../../secrets/buildbot-nix-worker-password;
};
};
}

187
modules/buildbot/master.py Normal file
View file

@ -0,0 +1,187 @@
#!/usr/bin/env python3
import json
import os
import sys
from datetime import timedelta
from pathlib import Path
from typing import Any
from buildbot.plugins import reporters, schedulers, secrets, util, worker
from buildbot.process.properties import Interpolate
# allow to import modules
sys.path.append(str(Path(__file__).parent))
from buildbot_nix import (
nix_build_config,
nix_eval_config,
nix_update_flake_config,
)
def read_secret_file(secret_name: str) -> str:
directory = os.environ.get("CREDENTIALS_DIRECTORY")
if directory is None:
print("directory not set", file=sys.stderr)
sys.exit(1)
return Path(directory).joinpath(secret_name).read_text()
ORG = os.environ["GITHUB_ORG"]
REPO = os.environ["GITHUB_REPO"]
BUILDBOT_URL = os.environ["BUILDBOT_URL"]
BUILDBOT_GITHUB_USER = os.environ["BUILDBOT_GITHUB_USER"]
def build_config() -> dict[str, Any]:
c = {}
c["buildbotNetUsageData"] = None
print(ORG, REPO)
# configure a janitor which will delete all logs older than one month, and will run on sundays at noon
c["configurators"] = [
util.JanitorConfigurator(logHorizon=timedelta(weeks=4), hour=12, dayOfWeek=6)
]
c["schedulers"] = [
# build all pushes to default branch
schedulers.SingleBranchScheduler(
name="main",
change_filter=util.ChangeFilter(
repository=f"https://github.com/{ORG}/{REPO}",
filter_fn=lambda c: c.branch
== c.properties.getProperty("github.repository.default_branch"),
),
builderNames=["nix-eval"],
),
# build all pull requests
schedulers.SingleBranchScheduler(
name="prs",
change_filter=util.ChangeFilter(
repository=f"https://github.com/{ORG}/{REPO}", category="pull"
),
builderNames=["nix-eval"],
),
schedulers.SingleBranchScheduler(
name="flake-sources",
change_filter=util.ChangeFilter(
repository=f"https://github.com/{ORG}/nixpkgs", branch="main"
),
treeStableTimer=20,
builderNames=["nix-update-flake"],
),
# this is triggered from `nix-eval`
schedulers.Triggerable(
name="nix-build",
builderNames=["nix-build"],
),
# allow to manually trigger a nix-build
schedulers.ForceScheduler(name="force", builderNames=["nix-eval"]),
# allow to manually update flakes
schedulers.ForceScheduler(
name="update-flake",
builderNames=["nix-update-flake"],
buttonName="Update flakes",
),
# updates flakes once a weeek
schedulers.NightlyTriggerable(
name="update-flake-weekly",
builderNames=["nix-update-flake"],
hour=3,
minute=0,
dayOfWeek=6,
),
]
github_api_token = read_secret_file("github-token")
c["services"] = [
reporters.GitHubStatusPush(
token=github_api_token,
# Since we dynamically create build steps,
# we use `virtual_builder_name` in the webinterface
# so that we distinguish what has beeing build
context=Interpolate("buildbot/%(prop:virtual_builder_name)s"),
),
]
# Shape of this file:
# [ { "name": "<worker-name>", "pass": "<worker-password>", "cores": "<cpu-cores>" } ]
worker_config = json.loads(read_secret_file("buildbot-nix-workers"))
credentials = os.environ.get("CREDENTIALS_DIRECTORY", ".")
has_cachix_auth_token = False
has_cachix_signing_key = False
systemd_secrets = secrets.SecretInAFile(dirname=credentials)
c["secretsProviders"] = [systemd_secrets]
c["workers"] = []
worker_names = []
for item in worker_config:
print(f"WORKER : {item}")
cores = item.get("cores", 0)
for i in range(cores):
worker_name = f"{item['name']}-{i}"
c["workers"].append(worker.Worker(worker_name, item["pass"]))
worker_names.append(worker_name)
c["builders"] = [
# Since all workers run on the same machine, we only assign one of them to do the evaluation.
# This should prevent exessive memory usage.
nix_eval_config(
[worker_names[0]],
github_token_secret="github-token",
automerge_users=[BUILDBOT_GITHUB_USER],
),
nix_build_config(worker_names, has_cachix_auth_token, has_cachix_signing_key),
nix_update_flake_config(
worker_names,
f"{ORG}/{REPO}",
github_token_secret="github-token",
github_bot_user=BUILDBOT_GITHUB_USER,
),
]
github_admins = os.environ.get("GITHUB_ADMINS", "").split(",")
print(github_admins)
print(os.environ.get("GITHUB_OAUTH_ID"))
print(read_secret_file("github-oauth-secret"))
print("lol")
print(read_secret_file("github-webhook-secret"))
print(github_api_token)
c["www"] = {
"avatar_methods": [util.AvatarGitHub()],
"port": int(os.environ.get("PORT", "1810")),
"auth": util.UserPasswordAuth({"JulienMalka": "hello"}),
"authz": util.Authz(
roleMatchers=[
util.RolesFromUsername(roles=["admin"], usernames=github_admins)
],
allowRules=[
util.AnyEndpointMatcher(role="admin", defaultDeny=False),
util.AnyControlEndpointMatcher(role="admins"),
],
),
"plugins": dict(waterfall_view={}, console_view={}, grid_view={}),
"change_hook_dialects": dict(
github={
"secret": "hello",
"strict": False,
"token": github_api_token,
"github_property_whitelist": "*",
}
),
}
c["db"] = {"db_url": os.environ.get("DB_URL", "sqlite:///state.sqlite")}
c["protocols"] = {"pb": {"port": "tcp:9989:interface=\\:\\:"}}
c["buildbotURL"] = BUILDBOT_URL
return c
BuildmasterConfig = build_config()

View file

@ -0,0 +1,58 @@
#!/usr/bin/env python3
import multiprocessing
import os
import socket
from io import open
from buildbot_worker.bot import Worker
from twisted.application import service
def require_env(key: str) -> str:
val = os.environ.get(key)
assert val is not None, "val is not set"
return val
def setup_worker(application: service.Application, id: int) -> None:
basedir = f"{require_env('BUILDBOT_DIR')}-{id}"
os.makedirs(basedir, mode=0o700, exist_ok=True)
master_url = require_env("MASTER_URL")
hostname = socket.gethostname()
workername = f"{hostname}-{id}"
with open(
require_env("WORKER_PASSWORD_FILE"), "r", encoding="utf-8"
) as passwd_file:
passwd = passwd_file.read().strip("\r\n")
keepalive = 600
umask = None
maxdelay = 300
numcpus = None
allow_shutdown = None
s = Worker(
None,
None,
workername,
passwd,
basedir,
keepalive,
connection_string=master_url,
umask=umask,
maxdelay=maxdelay,
numcpus=numcpus,
allow_shutdown=allow_shutdown,
)
s.setServiceParent(application)
# note: this line is matched against to check that this is a worker
# directory; do not edit it.
application = service.Application("buildbot-worker")
for i in range(8):
setup_worker(application, i)