summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorElizabeth Alexander Hunt <me@liz.coffee>2025-05-11 15:36:49 -0700
committerElizabeth Alexander Hunt <me@liz.coffee>2025-05-11 16:17:23 -0700
commitb241180aa85ad81f4ee0dca9bf3c0429916a6a18 (patch)
tree91bac564261b210517297daf3e4937e11e4001aa
parentaff695a1ff160d9e3045fbdf832c95db732c0f89 (diff)
downloadci-b241180aa85ad81f4ee0dca9bf3c0429916a6a18.tar.gz
ci-b241180aa85ad81f4ee0dca9bf3c0429916a6a18.zip
Significantly improve traceability and minor fixes.
-rw-r--r--Dockerfile6
-rwxr-xr-xhooks/mod.ts30
-rw-r--r--utils/logger.ts6
-rw-r--r--utils/mod.ts1
-rw-r--r--utils/run.ts2
-rw-r--r--utils/secret.ts27
-rw-r--r--worker/Dockerfile32
-rwxr-xr-xworker/jobs/checkout_ci.run54
-rwxr-xr-xworker/scripts/ansible_playbook95
-rwxr-xr-xworker/scripts/build_image86
-rwxr-xr-xworker/scripts/fetch_code15
-rwxr-xr-xworker/scripts/log6
-rwxr-xr-xworker/scripts/run_pipeline74
13 files changed, 283 insertions, 151 deletions
diff --git a/Dockerfile b/Dockerfile
index 854898e..7bb81cf 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM docker.io/library/debian:stable AS laminar
+FROM debian:stable-slim AS laminar
ENV DEBIAN_FRONTEND=noninteractive
RUN useradd --system --home-dir /var/lib/laminar --no-user-group --groups users --uid 100 laminar
RUN rm -rf /etc/cron.d/e2scrub_all
@@ -26,7 +26,7 @@ RUN cmake -B /opt/laminar/build -S /opt/laminar/src -G Ninja \
FROM denoland/deno:debian AS liz-ci
RUN apt-get update -yqq && apt-get install -yqq libcapnp-0.9.2 \
- libsqlite3-0 zlib1g curl
+ libsqlite3-0 zlib1g curl bash
COPY --from=laminar /usr/sbin/laminard /usr/sbin/laminard
COPY --from=laminar /usr/bin/laminarc /usr/bin/laminarc
COPY --from=laminar /usr/share/man/man8/laminard.8.gz /usr/share/man/man8/laminard.8.gz
@@ -38,4 +38,4 @@ COPY --from=laminar /usr/share/zsh/site-functions/_laminarc /usr/share/zsh/site-
WORKDIR /app
COPY . /app
-ENTRYPOINT [ "/bin/sh", "-c" ]
+ENTRYPOINT [ "/bin/bash", "-c" ]
diff --git a/hooks/mod.ts b/hooks/mod.ts
index bef822e..8771bea 100755
--- a/hooks/mod.ts
+++ b/hooks/mod.ts
@@ -1,23 +1,25 @@
#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run
-import { getRequiredEnv, getStdout, validateIdentifier } from "@liz-ci/utils";
+import {
+ getRequiredEnv,
+ getStdout,
+ loggerWithPrefix,
+ validateIdentifier,
+} from "@liz-ci/utils";
const getRequestLogger = (req: Request) => {
- const id = crypto.randomUUID();
const url = new URL(req.url);
- const method = req.method;
const getPrefix = () =>
- `[${new Date().toISOString()}] Request [${id}] @ [${url}] -X [${method}] |`;
- return {
- log: (...args: unknown[]) => console.log(getPrefix(), ...args),
- error: (...args: unknown[]) => console.error(getPrefix(), ...args),
- };
+ `[${
+ new Date().toISOString()
+ }] RequestTrace=[${crypto.randomUUID()}] @ [${url.pathname}] -X [${req.method}] |`;
+ return loggerWithPrefix(getPrefix);
};
const addr = { port: 9000, hostname: "0.0.0.0" };
Deno.serve(addr, async (req) => {
const logger = getRequestLogger(req);
- logger.log("Request initiated");
+ logger.log("start");
try {
const { pathname } = new URL(req.url);
@@ -45,7 +47,7 @@ Deno.serve(addr, async (req) => {
if (pathname === "/checkout_ci") {
const { remote, rev, refname } = await req.json();
if (![remote, rev, refname].every(validateIdentifier)) {
- logger.log("invalid reqwest\n");
+ logger.log("invalid reqwest");
return new Response("invalid reqwest >:D\n", {
status: 400,
});
@@ -59,17 +61,17 @@ Deno.serve(addr, async (req) => {
`rev="${rev}"`,
`refname="${refname}"`,
]);
- logger.log(`successful queue :D\n` + laminar);
- return new Response(laminar, {
+ logger.log(`successful queue :D`, laminar);
+ return new Response(laminar + "\n", {
status: 200,
});
}
return new Response("idk what that is bro :((\n", { status: 404 });
} catch (e) {
- logger.error("Uncaught exception", e);
+ logger.error("uncaught exception", e);
return new Response("womp womp D:\n", { status: 500 });
} finally {
- logger.log("Request finished.");
+ logger.log("finish");
}
});
diff --git a/utils/logger.ts b/utils/logger.ts
new file mode 100644
index 0000000..e36d249
--- /dev/null
+++ b/utils/logger.ts
@@ -0,0 +1,6 @@
+export const loggerWithPrefix = (prefixSupplier: () => string) => {
+ return {
+ log: (...args: unknown[]) => console.log(prefixSupplier(), ...args),
+ error: (...args: unknown[]) => console.error(prefixSupplier(), ...args),
+ };
+};
diff --git a/utils/mod.ts b/utils/mod.ts
index 8c5dc45..4e907df 100644
--- a/utils/mod.ts
+++ b/utils/mod.ts
@@ -1,3 +1,4 @@
+export * from "./logger.ts";
export * from "./env.ts";
export * from "./run.ts";
export * from "./secret.ts";
diff --git a/utils/run.ts b/utils/run.ts
index f3ce3d3..f06ef97 100644
--- a/utils/run.ts
+++ b/utils/run.ts
@@ -15,7 +15,7 @@ export const getStdout = async (
const stdoutText = new TextDecoder().decode(stdout);
const stderrText = new TextDecoder().decode(stderr);
- if (code !== 0) throw new Error(`Command failed: ${cmd}\n${stderrText}`);
+ if (code !== 0) throw new Error(`Command failed\n${stderrText}`);
return stdoutText;
};
diff --git a/utils/secret.ts b/utils/secret.ts
index 8860998..eb2054b 100644
--- a/utils/secret.ts
+++ b/utils/secret.ts
@@ -1,5 +1,8 @@
-import { getRequiredEnv, getStdout } from "./mod.ts";
+import { getRequiredEnv, getStdout, loggerWithPrefix } from "./mod.ts";
+const logger = loggerWithPrefix(() =>
+ `[${new Date().toISOString()}] [BitwardenSession]`
+);
export class BitwardenSession {
private readonly sessionInitializer: Promise<string>;
@@ -8,14 +11,25 @@ export class BitwardenSession {
this.sessionInitializer = getStdout(
`bw config server ${server} --quiet`,
- ).then(() => getStdout(`bw login --apikey --quiet`))
- .then(() => getStdout(`bw unlock --passwordenv BW_PASSWORD --raw`))
- .then((session) => session.trim());
+ )
+ .then(() => {
+ logger.log("Logging in via API");
+ return getStdout(`bw login --apikey --quiet`);
+ })
+ .then(() => {
+ logger.log("Unlocking vault in session");
+ return getStdout(`bw unlock --passwordenv BW_PASSWORD --raw`);
+ })
+ .then((session) => {
+ logger.log(`Session ${session}`);
+ return session.trim();
+ });
}
public async getItem<T extends LoginItem | SecureNote>(
secretName: string,
): Promise<T> {
+ logger.log(`Finding secret ${secretName}`);
return await this.sessionInitializer.then((session) =>
getStdout(`bw list items`, {
env: {
@@ -26,6 +40,7 @@ export class BitwardenSession {
items.find(({ name }: { name: string }) => name === secretName)
).then((item) => {
if (!item) throw new Error("Could not find bitwarden item " + secretName);
+ logger.log(`Found secret: ${secretName}`);
return item;
});
}
@@ -33,7 +48,9 @@ export class BitwardenSession {
async close(): Promise<void> {
return await this.sessionInitializer.then((session) =>
getStdout(`bw lock`, { env: { BW_SESSION: session } })
- ).then(() => {});
+ ).then(() => {
+ logger.log("Locked session");
+ });
}
}
diff --git a/worker/Dockerfile b/worker/Dockerfile
index 342feeb..94b1a3a 100644
--- a/worker/Dockerfile
+++ b/worker/Dockerfile
@@ -1,26 +1,30 @@
-FROM debian:stable-slim AS bw-builder
+FROM debian:stable-slim AS cli-dependencies
+
+# Define versions as build arguments to improve caching
+ARG BITWARDEN_VERSION=2025.4.0
RUN apt-get update && apt-get install -yqq unzip curl
-RUN curl -L -o /tmp/bw-linux.zip https://github.com/bitwarden/clients/releases/download/cli-v2025.4.0/bw-linux-2025.4.0.zip \
- && unzip /tmp/bw-linux.zip -d /tmp \
- && chmod +x /tmp/bw
+
+RUN curl -L -o /bw-linux.zip "https://github.com/bitwarden/clients/releases/download/cli-v${BITWARDEN_VERSION}/bw-linux-${BITWARDEN_VERSION}.zip"
+RUN unzip /bw-linux.zip -d / \
+ && chmod +x /bw
+
+RUN curl -L "https://get.docker.com/builds/$(uname -s)/$(uname -m)/docker-latest.tgz" > /docker.tgz
+RUN tar -xvzf /docker.tgz
FROM oci.liz.coffee/img/liz-ci:release AS worker
-RUN apt-get update && apt-get install -yqq ca-certificates curl \
- && curl "https://get.docker.com/builds/`uname -s`/`uname -m`/docker-latest.tgz" > docker.tgz \
- && tar -xvzf docker.tgz \
- && mv docker/* /usr/local/bin \
- && rm -rf docke* \
- && groupadd docker \
- && useradd --system --home-dir /var/lib/laminar \
+RUN apt-get update && apt-get install -yqq ca-certificates
+RUN groupadd docker
+RUN useradd --system --home-dir /var/lib/laminar \
--no-user-group --groups users,docker --uid 100 laminar
-COPY --from=bw-builder /tmp/bw /usr/local/bin/
+COPY --from=cli-dependencies /bw /usr/local/bin/
+COPY --from=cli-dependencies /docker/* /usr/local/bin/
RUN mkdir -p /var/lib/laminar/cfg
RUN chown -R laminar /var/lib/laminar
-RUN ln -sf /app/worker/jobs /var/lib/laminar/cfg/jobs && \
- ln -sf /app/worker/scripts /var/lib/laminar/cfg/scripts
+RUN ln -sf /app/worker/jobs /var/lib/laminar/cfg/jobs
+RUN ln -sf /app/worker/scripts /var/lib/laminar/cfg/scripts
USER laminar
WORKDIR /var/lib/laminar
diff --git a/worker/jobs/checkout_ci.run b/worker/jobs/checkout_ci.run
index 278a5b3..0945444 100755
--- a/worker/jobs/checkout_ci.run
+++ b/worker/jobs/checkout_ci.run
@@ -1,40 +1,42 @@
#!/bin/bash
-# usage: laminarc run ci remote="ssh://src.liz.coffee:2222/cgit" rev="<sha>" \
-# refname="refs/..."
-
-set -e
+# usage: laminarc run checkout_ci remote="ssh://src.liz.coffee:2222/cgit" rev="<sha>" \
+# refname="refs/..."
RUN=`date +%s`
-CWD=`$PWD`
-WORKING_DIR=`$PWD/$RUN`
+RETURN="$PWD"
+WORKING_DIR="$PWD/$RUN"
+
+export LOG_PREFIX="[checkout_ci.$RUN]"
+log "starting checkout_ci job $remote @ $refname - $rev in $WORKING_DIR"
mkdir -p "$WORKING_DIR" && cd "$WORKING_DIR"
-checkout="$rev" path="tmpsrc" fetch_code.sh
+CODE="$WORKING_DIR/src"
+checkout="$rev" path="$CODE" fetch_code
-if [[ ! -e "$WORKING_DIR/tmpsrc/.ci/ci.json" ]]; then
- echo "No Continuous Integration configured for $remote."
+CI_WORKFLOW="$CODE/.ci/ci.json"
+if [[ ! -e "$CI_WORKFLOW" ]]; then
+ log "no CI configuration found"
exit 0
fi
-PIPELINE_GENERATOR_PATH=$(jq -r '.pipeline' "$WORKING_DIR/tmpsrc/.ci/ci.json")
+PIPELINE_GENERATOR_PATH=$(jq -r '.pipeline' "$CI_WORKFLOW")
if [[ "$PIPELINE_GENERATOR_PATH" == *".."* ]]; then
- echo "Error: Path contains '..'"
+ log "no '..'"
exit 1
fi
-docker run --rm \
- --network none \
- --cap-drop ALL \
- --security-opt no-new-privileges \
- -v "$WORKING_DIR/tmpsrc/$PIPELINE_GENERATOR:/pipeline" \
- -e refname="$refname" \
- -e rev="$rev" \
- -e remote="$remote" \
- oci.liz.coffee/img/liz-ci:release \
- /pipeline \
- > "$WORKING_DIR/pipeline.json"
-
-pipeline="$WORKING_DIR/pipeline.json" run_pipeline
-
-cd "$CWD" && rm -rf "$WORKING_DIR"
+log "building the pipeline..."
+PIPELINE="$WORKING_DIR/pipeline.json"
+docker run --rm --network none --cap-drop ALL --security-opt no-new-privileges \
+ -e refname="$refname" -e rev="$rev" -e remote="$remote" \
+ -v "$CODE/$PIPELINE_GENERATOR_PATH:/pipeline_generator" \
+ oci.liz.coffee/img/liz-ci:release /pipeline_generator \
+ > "$PIPELINE"
+
+pipeline="$PIPELINE" run_pipeline
+
+log "cleaning up working directory"
+cd "$RETURN" && rm -rf "$WORKING_DIR"
+
+log "checkout_ci run done"
diff --git a/worker/scripts/ansible_playbook b/worker/scripts/ansible_playbook
index a85995b..062680d 100755
--- a/worker/scripts/ansible_playbook
+++ b/worker/scripts/ansible_playbook
@@ -4,6 +4,7 @@ import {
BitwardenSession,
getRequiredEnv,
getStdout,
+ loggerWithPrefix,
prependWith,
type SecureNote,
} from "@liz-ci/utils";
@@ -13,41 +14,65 @@ const args: AnsiblePlaybookJobProps = {
path: getRequiredEnv("path"),
playbooks: getRequiredEnv("playbooks"),
};
-
-const bitwardenSession = new BitwardenSession();
-
-const secretFiles = await Promise.all(
- ["ansible_secrets", "ssh_key"]
- .map((secretName) =>
- bitwardenSession
- .getItem<SecureNote>(secretName)
- .then(async ({ notes: recoveredSecret }) => {
- const tempFile = await Deno.makeTempFile();
- await Deno.writeTextFile(tempFile, recoveredSecret);
- return tempFile;
- })
- ),
+const logger = loggerWithPrefix(() =>
+ `[${new Date().toISOString()}] [ansible_playbook.'${args.playbooks}']`
);
-const [ansibleSecrets, sshKey] = secretFiles;
-try {
- const volumes = [
- `${args.path}:/ansible`,
- `${sshKey}:/root/id_rsa`,
- `${ansibleSecrets}:/ansible/secrets.yml`,
- ];
- const playbookCmd = `ansible-playbook -e @secrets.yml ${args.playbooks}`;
-
- await getStdout([
- "docker",
- "run",
- ...prependWith(volumes, "-v"),
- "willhallonline/ansible:latest",
- ...playbookCmd.split(" "),
- ]);
-} finally {
- await Promise.allSettled(
- [bitwardenSession.close()].concat(
- secretFiles.map((p) => Deno.remove(p)),
- ),
+
+const run = async () => {
+ logger.log("Starting Ansible playbook job");
+
+ const bitwardenSession = new BitwardenSession();
+ const secretFiles = await Promise.all(
+ ["ansible_secrets", "ssh_key"]
+ .map((secretName) =>
+ bitwardenSession
+ .getItem<SecureNote>(secretName)
+ .then(async ({ notes: recoveredSecret }) => {
+ const tempFile = await Deno.makeTempFile();
+ await Deno.writeTextFile(tempFile, recoveredSecret);
+ logger.log(secretName, "stored at", tempFile);
+ return tempFile;
+ })
+ ),
);
+ const [ansibleSecrets, sshKey] = secretFiles;
+
+ try {
+ const volumes = [
+ `${args.path}:/ansible`,
+ `${sshKey}:/root/id_rsa`,
+ `${ansibleSecrets}:/ansible/secrets.yml`,
+ ];
+
+ const playbookCmd = `ansible-playbook -e @secrets.yml ${args.playbooks}`;
+ const deployCmd = [
+ "docker",
+ "run",
+ ...prependWith(volumes, "-v"),
+ "willhallonline/ansible:latest",
+ ...playbookCmd.split(" "),
+ ];
+ logger.log("deploying...", deployCmd);
+ await getStdout(deployCmd);
+ } finally {
+ await Promise.allSettled(
+ [bitwardenSession.close()].concat(
+ secretFiles.map((p) => {
+ logger.log(`cleanup`, p);
+ return Deno.remove(p);
+ }),
+ ),
+ );
+ }
+
+ logger.log("ansible playbook job completed");
+};
+
+if (import.meta.main) {
+ try {
+ await run();
+ } catch (e) {
+ logger.error("womp womp D:", e);
+ throw e;
+ }
}
diff --git a/worker/scripts/build_image b/worker/scripts/build_image
index 7107224..07c07c9 100755
--- a/worker/scripts/build_image
+++ b/worker/scripts/build_image
@@ -5,6 +5,7 @@ import {
BitwardenSession,
getRequiredEnv,
getStdout,
+ loggerWithPrefix,
type LoginItem,
} from "@liz-ci/utils";
@@ -19,29 +20,38 @@ const args: BuildDockerImageJobProps = {
buildTarget: getRequiredEnv("buildTarget"),
};
-const bitwardenSession = new BitwardenSession();
-const { username: registryUsername, password: registryPassword } =
- (await bitwardenSession.getItem<LoginItem>(args.registry))?.login ?? {};
-if (!(registryUsername && registryPassword)) {
- throw new Error("where's the login info bruh");
-}
-
-await getStdout(
- [
- "docker",
- "login",
- "--username",
- registryUsername,
- "--password",
- registryPassword,
- args.registry,
- ],
+const logger = loggerWithPrefix(() =>
+ `[${
+ new Date().toISOString()
+ }] [build_image.${args.repository}.${args.imageTag}]`
);
-const tag =
- `${args.registry}/${args.namespace}/${args.repository}:${args.imageTag}`;
-await getStdout(
- [
+const run = async () => {
+ logger.log("Starting Docker image build job");
+
+ const bitwardenSession = new BitwardenSession();
+ const { username: registryUsername, password: registryPassword } =
+ (await bitwardenSession.getItem<LoginItem>(args.registry))?.login ?? {};
+ if (!(registryUsername && registryPassword)) {
+ throw new Error("where's the login info bruh");
+ }
+
+ logger.log(`Logging in to Docker registry: ${args.registry}`);
+ await getStdout(
+ [
+ "docker",
+ "login",
+ "--username",
+ registryUsername,
+ "--password",
+ registryPassword,
+ args.registry,
+ ],
+ );
+
+ const tag =
+ `${args.registry}/${args.namespace}/${args.repository}:${args.imageTag}`;
+ const buildCmd = [
"docker",
"build",
"--target",
@@ -51,17 +61,31 @@ await getStdout(
"-f",
`${args.dockerfile}`,
`${args.context}`,
- ],
- {
- clearEnv: true,
- env: {},
- },
-);
+ ];
+
+ logger.log(`building`, tag, buildCmd);
+ await getStdout(
+ buildCmd,
+ {
+ clearEnv: true,
+ env: {},
+ },
+ );
-await getStdout(
- [
+ const pushCmd = [
"docker",
"push",
tag,
- ],
-);
+ ];
+ logger.log(`pushing`, pushCmd);
+ await getStdout(pushCmd);
+};
+
+if (import.meta.main) {
+ try {
+ await run();
+ } catch (e) {
+ logger.error("womp womp D:", e);
+ throw e;
+ }
+}
diff --git a/worker/scripts/fetch_code b/worker/scripts/fetch_code
index d45f6db..d3af763 100755
--- a/worker/scripts/fetch_code
+++ b/worker/scripts/fetch_code
@@ -1,6 +1,21 @@
#!/bin/bash
+export LOG_PREFIX="[fetch_code $remote @ $checkout -> $path]"
+
+log "fetch!"
git clone "$remote" "$path"
+if [ ! $? -eq 0 ]; then
+ log "D: failed to clone"
+ exit 1
+fi
+
cd "$path"
+log "checkout $checkout"
git reset --hard "$checkout"
+if [ ! $? -eq 0 ]; then
+ log "D: can't reset to $checkout"
+ cd -
+ exit 1
+fi
+
cd -
diff --git a/worker/scripts/log b/worker/scripts/log
new file mode 100755
index 0000000..cbad088
--- /dev/null
+++ b/worker/scripts/log
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+_PREFIX=""
+if [[ ! "$LOG_PREFIX" = "" ]]; then _PREFIX="$LOG_PREFIX "; fi
+
+echo "$_PREFIX[$(date '+%Y-%m-%d %H:%M:%S')] $@"
diff --git a/worker/scripts/run_pipeline b/worker/scripts/run_pipeline
index ad58573..9991001 100755
--- a/worker/scripts/run_pipeline
+++ b/worker/scripts/run_pipeline
@@ -1,28 +1,58 @@
-#!/usr/bin/env -S deno --allow-env --allow-net --allow-read
+#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run --allow-read --allow-write
import { type Job, PipelineImpl } from "@liz-ci/model";
-import { getRequiredEnv, getStdout, validateIdentifier } from "@liz-ci/utils";
-
-const stages = await (Deno.readTextFile(getRequiredEnv("pipeline")))
- .then(PipelineImpl.from)
- .then((pipeline) => pipeline.getStages());
-
-const validateJob = (job: Job) => {
- Object.entries(job.arguments).forEach((e) => {
- if (!e.every(validateIdentifier)) {
- throw new Error(`job of type ${job.type} has invalid entry ${e}`);
- }
- });
+import {
+ getRequiredEnv,
+ getStdout,
+ loggerWithPrefix,
+ validateIdentifier,
+} from "@liz-ci/utils";
+
+const pipelinePath = getRequiredEnv("pipeline");
+const logger = loggerWithPrefix(() =>
+ `[${new Date().toISOString()}] [run_pipeline.${pipelinePath}]`
+);
+
+const jobValidForExecution = (job: Job) => {
+ return Object
+ .entries(job.arguments)
+ .filter((e) => {
+ if (e.every(validateIdentifier)) return true;
+ logger.error(`job of type ${job.type} has invalid args ${e}`);
+ return false;
+ })
+ .length === 0;
};
-for (const stage of stages) {
- await Promise.all(
- stage.parallelJobs.map((job) => {
- validateJob(job);
+const run = async () => {
+ logger.log("starting pipeline execution");
+
+ const stages = await (Deno.readTextFile(pipelinePath))
+ .then(PipelineImpl.from)
+ .then((pipeline) => pipeline.getStages());
+
+ for (const stage of stages) {
+ logger.log("executing stage", stage);
+
+ await Promise.all(
+ stage.parallelJobs.map(async (job, jobIdx) => {
+ logger.log(`executing job ${jobIdx}`, job);
+ if (!jobValidForExecution(job)) throw new Error("invalid job");
+
+ const result = await getStdout(job.type, { env: job.arguments });
+ logger.log(jobIdx, "outputs", { result });
+ }),
+ );
+ }
+
+ logger.log("ok! yay!");
+};
- return getStdout(job.type, {
- env: job.arguments,
- });
- }),
- );
+if (import.meta.main) {
+ try {
+ await run();
+ } catch (e) {
+ logger.error("womp womp D:", e);
+ throw e;
+ }
}