summaryrefslogtreecommitdiff
path: root/worker/scripts
diff options
context:
space:
mode:
authorElizabeth Alexander Hunt <me@liz.coffee>2025-05-11 15:36:49 -0700
committerElizabeth Alexander Hunt <me@liz.coffee>2025-05-11 16:17:23 -0700
commitb241180aa85ad81f4ee0dca9bf3c0429916a6a18 (patch)
tree91bac564261b210517297daf3e4937e11e4001aa /worker/scripts
parentaff695a1ff160d9e3045fbdf832c95db732c0f89 (diff)
downloadci-b241180aa85ad81f4ee0dca9bf3c0429916a6a18.tar.gz
ci-b241180aa85ad81f4ee0dca9bf3c0429916a6a18.zip
Significantly improve traceability and minor fixes.
Diffstat (limited to 'worker/scripts')
-rwxr-xr-xworker/scripts/ansible_playbook95
-rwxr-xr-xworker/scripts/build_image86
-rwxr-xr-xworker/scripts/fetch_code15
-rwxr-xr-xworker/scripts/log6
-rwxr-xr-xworker/scripts/run_pipeline74
5 files changed, 188 insertions, 88 deletions
diff --git a/worker/scripts/ansible_playbook b/worker/scripts/ansible_playbook
index a85995b..062680d 100755
--- a/worker/scripts/ansible_playbook
+++ b/worker/scripts/ansible_playbook
@@ -4,6 +4,7 @@ import {
BitwardenSession,
getRequiredEnv,
getStdout,
+ loggerWithPrefix,
prependWith,
type SecureNote,
} from "@liz-ci/utils";
@@ -13,41 +14,65 @@ const args: AnsiblePlaybookJobProps = {
path: getRequiredEnv("path"),
playbooks: getRequiredEnv("playbooks"),
};
-
-const bitwardenSession = new BitwardenSession();
-
-const secretFiles = await Promise.all(
- ["ansible_secrets", "ssh_key"]
- .map((secretName) =>
- bitwardenSession
- .getItem<SecureNote>(secretName)
- .then(async ({ notes: recoveredSecret }) => {
- const tempFile = await Deno.makeTempFile();
- await Deno.writeTextFile(tempFile, recoveredSecret);
- return tempFile;
- })
- ),
+const logger = loggerWithPrefix(() =>
+ `[${new Date().toISOString()}] [ansible_playbook.'${args.playbooks}']`
);
-const [ansibleSecrets, sshKey] = secretFiles;
-try {
- const volumes = [
- `${args.path}:/ansible`,
- `${sshKey}:/root/id_rsa`,
- `${ansibleSecrets}:/ansible/secrets.yml`,
- ];
- const playbookCmd = `ansible-playbook -e @secrets.yml ${args.playbooks}`;
-
- await getStdout([
- "docker",
- "run",
- ...prependWith(volumes, "-v"),
- "willhallonline/ansible:latest",
- ...playbookCmd.split(" "),
- ]);
-} finally {
- await Promise.allSettled(
- [bitwardenSession.close()].concat(
- secretFiles.map((p) => Deno.remove(p)),
- ),
+
+const run = async () => {
+ logger.log("Starting Ansible playbook job");
+
+ const bitwardenSession = new BitwardenSession();
+ const secretFiles = await Promise.all(
+ ["ansible_secrets", "ssh_key"]
+ .map((secretName) =>
+ bitwardenSession
+ .getItem<SecureNote>(secretName)
+ .then(async ({ notes: recoveredSecret }) => {
+ const tempFile = await Deno.makeTempFile();
+ await Deno.writeTextFile(tempFile, recoveredSecret);
+ logger.log(secretName, "stored at", tempFile);
+ return tempFile;
+ })
+ ),
);
+ const [ansibleSecrets, sshKey] = secretFiles;
+
+ try {
+ const volumes = [
+ `${args.path}:/ansible`,
+ `${sshKey}:/root/id_rsa`,
+ `${ansibleSecrets}:/ansible/secrets.yml`,
+ ];
+
+ const playbookCmd = `ansible-playbook -e @secrets.yml ${args.playbooks}`;
+ const deployCmd = [
+ "docker",
+ "run",
+ ...prependWith(volumes, "-v"),
+ "willhallonline/ansible:latest",
+ ...playbookCmd.split(" "),
+ ];
+ logger.log("deploying...", deployCmd);
+ await getStdout(deployCmd);
+ } finally {
+ await Promise.allSettled(
+ [bitwardenSession.close()].concat(
+ secretFiles.map((p) => {
+ logger.log(`cleanup`, p);
+ return Deno.remove(p);
+ }),
+ ),
+ );
+ }
+
+ logger.log("ansible playbook job completed");
+};
+
+if (import.meta.main) {
+ try {
+ await run();
+ } catch (e) {
+ logger.error("womp womp D:", e);
+ throw e;
+ }
}
diff --git a/worker/scripts/build_image b/worker/scripts/build_image
index 7107224..07c07c9 100755
--- a/worker/scripts/build_image
+++ b/worker/scripts/build_image
@@ -5,6 +5,7 @@ import {
BitwardenSession,
getRequiredEnv,
getStdout,
+ loggerWithPrefix,
type LoginItem,
} from "@liz-ci/utils";
@@ -19,29 +20,38 @@ const args: BuildDockerImageJobProps = {
buildTarget: getRequiredEnv("buildTarget"),
};
-const bitwardenSession = new BitwardenSession();
-const { username: registryUsername, password: registryPassword } =
- (await bitwardenSession.getItem<LoginItem>(args.registry))?.login ?? {};
-if (!(registryUsername && registryPassword)) {
- throw new Error("where's the login info bruh");
-}
-
-await getStdout(
- [
- "docker",
- "login",
- "--username",
- registryUsername,
- "--password",
- registryPassword,
- args.registry,
- ],
+const logger = loggerWithPrefix(() =>
+ `[${
+ new Date().toISOString()
+ }] [build_image.${args.repository}.${args.imageTag}]`
);
-const tag =
- `${args.registry}/${args.namespace}/${args.repository}:${args.imageTag}`;
-await getStdout(
- [
+const run = async () => {
+ logger.log("Starting Docker image build job");
+
+ const bitwardenSession = new BitwardenSession();
+ const { username: registryUsername, password: registryPassword } =
+ (await bitwardenSession.getItem<LoginItem>(args.registry))?.login ?? {};
+ if (!(registryUsername && registryPassword)) {
+ throw new Error("where's the login info bruh");
+ }
+
+ logger.log(`Logging in to Docker registry: ${args.registry}`);
+ await getStdout(
+ [
+ "docker",
+ "login",
+ "--username",
+ registryUsername,
+ "--password",
+ registryPassword,
+ args.registry,
+ ],
+ );
+
+ const tag =
+ `${args.registry}/${args.namespace}/${args.repository}:${args.imageTag}`;
+ const buildCmd = [
"docker",
"build",
"--target",
@@ -51,17 +61,31 @@ await getStdout(
"-f",
`${args.dockerfile}`,
`${args.context}`,
- ],
- {
- clearEnv: true,
- env: {},
- },
-);
+ ];
+
+ logger.log(`building`, tag, buildCmd);
+ await getStdout(
+ buildCmd,
+ {
+ clearEnv: true,
+ env: {},
+ },
+ );
-await getStdout(
- [
+ const pushCmd = [
"docker",
"push",
tag,
- ],
-);
+ ];
+ logger.log(`pushing`, pushCmd);
+ await getStdout(pushCmd);
+};
+
+if (import.meta.main) {
+ try {
+ await run();
+ } catch (e) {
+ logger.error("womp womp D:", e);
+ throw e;
+ }
+}
diff --git a/worker/scripts/fetch_code b/worker/scripts/fetch_code
index d45f6db..d3af763 100755
--- a/worker/scripts/fetch_code
+++ b/worker/scripts/fetch_code
@@ -1,6 +1,21 @@
#!/bin/bash
+export LOG_PREFIX="[fetch_code $remote @ $checkout -> $path]"
+
+log "fetch!"
git clone "$remote" "$path"
+if [ ! $? -eq 0 ]; then
+ log "D: failed to clone"
+ exit 1
+fi
+
cd "$path"
+log "checkout $checkout"
git reset --hard "$checkout"
+if [ ! $? -eq 0 ]; then
+ log "D: can't reset to $checkout"
+ cd -
+ exit 1
+fi
+
cd -
diff --git a/worker/scripts/log b/worker/scripts/log
new file mode 100755
index 0000000..cbad088
--- /dev/null
+++ b/worker/scripts/log
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+_PREFIX=""
+if [[ ! "$LOG_PREFIX" = "" ]]; then _PREFIX="$LOG_PREFIX "; fi
+
+echo "$_PREFIX[$(date '+%Y-%m-%d %H:%M:%S')] $@"
diff --git a/worker/scripts/run_pipeline b/worker/scripts/run_pipeline
index ad58573..9991001 100755
--- a/worker/scripts/run_pipeline
+++ b/worker/scripts/run_pipeline
@@ -1,28 +1,58 @@
-#!/usr/bin/env -S deno --allow-env --allow-net --allow-read
+#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run --allow-read --allow-write
import { type Job, PipelineImpl } from "@liz-ci/model";
-import { getRequiredEnv, getStdout, validateIdentifier } from "@liz-ci/utils";
-
-const stages = await (Deno.readTextFile(getRequiredEnv("pipeline")))
- .then(PipelineImpl.from)
- .then((pipeline) => pipeline.getStages());
-
-const validateJob = (job: Job) => {
- Object.entries(job.arguments).forEach((e) => {
- if (!e.every(validateIdentifier)) {
- throw new Error(`job of type ${job.type} has invalid entry ${e}`);
- }
- });
+import {
+ getRequiredEnv,
+ getStdout,
+ loggerWithPrefix,
+ validateIdentifier,
+} from "@liz-ci/utils";
+
+const pipelinePath = getRequiredEnv("pipeline");
+const logger = loggerWithPrefix(() =>
+ `[${new Date().toISOString()}] [run_pipeline.${pipelinePath}]`
+);
+
+const jobValidForExecution = (job: Job) => {
+ return Object
+ .entries(job.arguments)
+ .filter((e) => {
+ if (e.every(validateIdentifier)) return true;
+ logger.error(`job of type ${job.type} has invalid args ${e}`);
+ return false;
+ })
+ .length === 0;
};
-for (const stage of stages) {
- await Promise.all(
- stage.parallelJobs.map((job) => {
- validateJob(job);
+const run = async () => {
+ logger.log("starting pipeline execution");
+
+ const stages = await (Deno.readTextFile(pipelinePath))
+ .then(PipelineImpl.from)
+ .then((pipeline) => pipeline.getStages());
+
+ for (const stage of stages) {
+ logger.log("executing stage", stage);
+
+ await Promise.all(
+ stage.parallelJobs.map(async (job, jobIdx) => {
+ logger.log(`executing job ${jobIdx}`, job);
+ if (!jobValidForExecution(job)) throw new Error("invalid job");
+
+ const result = await getStdout(job.type, { env: job.arguments });
+ logger.log(jobIdx, "outputs", { result });
+ }),
+ );
+ }
+
+ logger.log("ok! yay!");
+};
- return getStdout(job.type, {
- env: job.arguments,
- });
- }),
- );
+if (import.meta.main) {
+ try {
+ await run();
+ } catch (e) {
+ logger.error("womp womp D:", e);
+ throw e;
+ }
}