summaryrefslogtreecommitdiff
path: root/worker/scripts
diff options
context:
space:
mode:
authorElizabeth Alexander Hunt <me@liz.coffee>2025-05-12 09:40:12 -0700
committerElizabeth <me@liz.coffee>2025-05-26 14:15:42 -0700
commitd51c9d74857aca3c2f172609297266968bc7f809 (patch)
tree64327f9cc4219729aa11af32d7d4c70cddfc2292 /worker/scripts
parent30729a0cf707d9022bae0a7baaba77379dc31fd5 (diff)
downloadci-d51c9d74857aca3c2f172609297266968bc7f809.tar.gz
ci-d51c9d74857aca3c2f172609297266968bc7f809.zip
The big refactor TM
Diffstat (limited to 'worker/scripts')
-rwxr-xr-xworker/scripts/ansible_playbook167
-rwxr-xr-xworker/scripts/build_docker_image161
-rwxr-xr-xworker/scripts/build_image91
-rwxr-xr-xworker/scripts/fetch_code6
-rwxr-xr-xworker/scripts/run_pipeline58
5 files changed, 265 insertions, 218 deletions
diff --git a/worker/scripts/ansible_playbook b/worker/scripts/ansible_playbook
index 062680d..e9e967c 100755
--- a/worker/scripts/ansible_playbook
+++ b/worker/scripts/ansible_playbook
@@ -1,78 +1,113 @@
#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run --allow-read --allow-write
import {
- BitwardenSession,
- getRequiredEnv,
+ Either,
+ getRequiredEnvVars,
getStdout,
- loggerWithPrefix,
+ type IEither,
+ LogMetricTraceable,
+ Metric,
prependWith,
- type SecureNote,
-} from "@liz-ci/utils";
-import type { AnsiblePlaybookJobProps } from "@liz-ci/model";
+ TraceUtil,
+} from "@emprespresso/pengueno";
+import type { AnsiblePlaybookJob } from "@emprespresso/ci-model";
+import { Bitwarden, type SecureNote } from "@emprespresso/ci-worker";
-const args: AnsiblePlaybookJobProps = {
- path: getRequiredEnv("path"),
- playbooks: getRequiredEnv("playbooks"),
-};
-const logger = loggerWithPrefix(() =>
- `[${new Date().toISOString()}] [ansible_playbook.'${args.playbooks}']`
-);
+const eitherJob = getRequiredEnvVars([
+ "path",
+ "playbooks",
+])
+ .mapRight((baseArgs) => (
+ <AnsiblePlaybookJob> {
+ type: "ansible_playbook",
+ arguments: baseArgs,
+ }
+ ));
-const run = async () => {
- logger.log("Starting Ansible playbook job");
+const eitherVault = Bitwarden.getConfigFromEnvironment()
+ .mapRight((config) => new Bitwarden(config));
- const bitwardenSession = new BitwardenSession();
- const secretFiles = await Promise.all(
- ["ansible_secrets", "ssh_key"]
- .map((secretName) =>
- bitwardenSession
- .getItem<SecureNote>(secretName)
- .then(async ({ notes: recoveredSecret }) => {
- const tempFile = await Deno.makeTempFile();
- await Deno.writeTextFile(tempFile, recoveredSecret);
- logger.log(secretName, "stored at", tempFile);
- return tempFile;
- })
- ),
- );
- const [ansibleSecrets, sshKey] = secretFiles;
+const playbookMetric = Metric.fromName("ansiblePlaybook.playbook");
+await LogMetricTraceable.from(eitherJob)
+ .bimap(TraceUtil.withTrace("ansible_playbook"))
+ .bimap(TraceUtil.withMetricTrace(playbookMetric))
+ .peek((tEitherJob) =>
+ tEitherJob.trace.trace("starting ansible playbook job! (⑅˘꒳˘)")
+ )
+ .map((tEitherJob) =>
+ tEitherJob.get().flatMapAsync((job) =>
+ eitherVault.flatMapAsync(async (vault) => {
+ const eitherKey = await vault.unlock(tEitherJob);
+ return eitherKey.mapRight((key) => ({ job, key, vault }));
+ })
+ )
+ )
+ .map(async (tEitherJobVault) => {
+ tEitherJobVault.trace.trace(
+ "getting ansible secwets uwu~",
+ );
+ const eitherJobVault = await tEitherJobVault.get();
- try {
- const volumes = [
- `${args.path}:/ansible`,
- `${sshKey}:/root/id_rsa`,
- `${ansibleSecrets}:/ansible/secrets.yml`,
- ];
+ const eitherSshKey = await eitherJobVault
+ .flatMapAsync(({ key, vault }) =>
+ vault.fetchSecret<SecureNote>(tEitherJobVault, key, "ssh_key")
+ );
+ const eitherSshKeyFile = await eitherSshKey.mapRight(({ notes }) => notes)
+ .flatMapAsync(saveToTempFile);
+ const eitherAnsibleSecrets = await eitherJobVault
+ .flatMapAsync(({ key, vault }) =>
+ vault.fetchSecret<SecureNote>(tEitherJobVault, key, "ansible_playbooks")
+ );
+ const eitherAnsibleSecretsFile = await eitherAnsibleSecrets.mapRight((
+ { notes },
+ ) => notes).flatMapAsync(saveToTempFile);
- const playbookCmd = `ansible-playbook -e @secrets.yml ${args.playbooks}`;
- const deployCmd = [
- "docker",
- "run",
- ...prependWith(volumes, "-v"),
- "willhallonline/ansible:latest",
- ...playbookCmd.split(" "),
- ];
- logger.log("deploying...", deployCmd);
- await getStdout(deployCmd);
- } finally {
- await Promise.allSettled(
- [bitwardenSession.close()].concat(
- secretFiles.map((p) => {
- logger.log(`cleanup`, p);
- return Deno.remove(p);
- }),
- ),
+ return eitherJobVault.flatMapAsync(async ({ job, vault, key }) => {
+ const eitherLocked = await vault.lock(tEitherJobVault, key);
+ return eitherLocked.flatMap((_locked) =>
+ eitherSshKeyFile.flatMap((sshKeyFile) =>
+ eitherAnsibleSecretsFile.mapRight((secretsFile) => ({
+ job,
+ sshKeyFile,
+ secretsFile,
+ }))
+ )
+ );
+ });
+ })
+ .map(async (tEitherJobAndSecrets) => {
+ const eitherJobAndSecrets = await tEitherJobAndSecrets.get();
+ return eitherJobAndSecrets.flatMapAsync(
+ ({ job, sshKeyFile, secretsFile }) => {
+ const volumes = [
+ `${job.arguments.path}:/ansible`,
+ `${sshKeyFile}:/root/id_rsa`,
+ `${secretsFile}:/ansible/secrets.yml`,
+ ];
+ const playbookCmd =
+ `ansible-playbook -e @secrets.yml ${job.arguments.playbooks}`;
+ const deployCmd = [
+ "docker",
+ "run",
+ ...prependWith(volumes, "-v"),
+ "willhallonline/ansible:latest",
+ ...playbookCmd.split(" "),
+ ];
+ tEitherJobAndSecrets.trace.trace(
+ `running ansible magic~ (◕ᴗ◕✿) ${deployCmd}`,
+ );
+ return tEitherJobAndSecrets.move(deployCmd).map(getStdout).get();
+ },
);
- }
+ })
+ .get();
- logger.log("ansible playbook job completed");
-};
-
-if (import.meta.main) {
- try {
- await run();
- } catch (e) {
- logger.error("womp womp D:", e);
- throw e;
- }
-}
+const saveToTempFile = (text: string): Promise<IEither<Error, string>> =>
+ Either.fromFailableAsync(
+ Deno.makeTempDir({ dir: Deno.cwd() })
+ .then((dir) => Deno.makeTempFile({ dir }))
+ .then(async (f) => {
+ await Deno.writeTextFile(f, text);
+ return f;
+ }),
+ );
diff --git a/worker/scripts/build_docker_image b/worker/scripts/build_docker_image
new file mode 100755
index 0000000..1dd0c3d
--- /dev/null
+++ b/worker/scripts/build_docker_image
@@ -0,0 +1,161 @@
+#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run
+
+import {
+ getRequiredEnvVars,
+ getStdout,
+ LogLevel,
+ LogMetricTraceable,
+ Metric,
+ TraceUtil,
+} from "@emprespresso/pengueno";
+import type {
+ BuildDockerImageJob,
+ BuildDockerImageJobProps,
+} from "@emprespresso/ci-model";
+import { Bitwarden, type LoginItem } from "@emprespresso/ci-worker";
+
+const eitherJob = getRequiredEnvVars([
+ "registry",
+ "namespace",
+ "repository",
+ "imageTag",
+ "context",
+ "dockerfile",
+ "buildTarget",
+])
+ .mapRight((baseArgs) => (
+ <BuildDockerImageJob> {
+ type: "build_docker_image",
+ arguments: baseArgs,
+ }
+ ));
+const eitherVault = Bitwarden.getConfigFromEnvironment()
+ .mapRight((config) => new Bitwarden(config));
+
+const buildImageMetric = Metric.fromName("dockerImage.build");
+const loginMetric = Metric.fromName("dockerRegistry.login");
+await LogMetricTraceable.from(eitherJob)
+ .bimap(
+ (tEitherJob) => {
+ const trace = "build_docker_image." +
+ tEitherJob.get().fold((_, v) => v?.buildTarget ?? "");
+ return [tEitherJob.get(), trace];
+ },
+ )
+ .bimap(TraceUtil.withMetricTrace(buildImageMetric))
+ .bimap(TraceUtil.withMetricTrace(loginMetric))
+ .peek((tEitherJob) =>
+ tEitherJob.trace.trace("starting docker image build job! (⑅˘꒳˘)")
+ )
+ .map((tEitherJob) =>
+ tEitherJob.get()
+ .flatMapAsync((job) =>
+ eitherVault.flatMapAsync(async (vault) => {
+ const eitherKey = await vault.unlock(tEitherJob);
+ return eitherKey.mapRight((key) => ({ job, key, vault }));
+ })
+ )
+ )
+ .map(async (tEitherJobVault) => {
+ tEitherJobVault.trace.trace("logging into the wegistwy uwu~");
+ const eitherJobVault = await tEitherJobVault.get();
+ const eitherDockerRegistryLoginItem = await eitherJobVault.flatMapAsync((
+ { job, key, vault },
+ ) =>
+ vault.fetchSecret<LoginItem>(tEitherJobVault, key, job.arguments.registry)
+ .finally(() => vault.lock(tEitherJobVault, key))
+ );
+ return eitherDockerRegistryLoginItem.flatMapAsync(({ login }) =>
+ eitherJobVault.flatMapAsync(async ({ job }) => {
+ const loginCommand = getDockerLoginCommand(
+ login.username,
+ job.arguments.registry,
+ );
+ const eitherLoggedIn = await tEitherJobVault.move(loginCommand).map((
+ tLoginCmd,
+ ) =>
+ getStdout(tLoginCmd, { env: { REGISTRY_PASSWORD: login.password } })
+ ).get();
+ return eitherLoggedIn.moveRight(job);
+ })
+ );
+ })
+ .peek(async (tEitherWithAuthdRegistry) => {
+ const eitherWithAuthdRegistry = await tEitherWithAuthdRegistry.get();
+ return tEitherWithAuthdRegistry.trace.trace(
+ eitherWithAuthdRegistry.fold((err, _val) =>
+ loginMetric[err ? "failure" : "success"]
+ ),
+ );
+ })
+ .map(async (tEitherWithAuthdRegistryBuildJob) => {
+ const eitherWithAuthdRegistryBuildJob =
+ await tEitherWithAuthdRegistryBuildJob.get();
+ tEitherWithAuthdRegistryBuildJob.trace.trace(
+ "finally building the image~ (◕ᴗ◕✿)",
+ );
+ const eitherBuiltImage = await eitherWithAuthdRegistryBuildJob.flatMapAsync(
+ (job) =>
+ tEitherWithAuthdRegistryBuildJob
+ .move(getBuildCommand(job.arguments))
+ .map((tBuildCmd) =>
+ getStdout(tBuildCmd, {
+ env: {},
+ clearEnv: true,
+ })
+ )
+ .get(),
+ );
+ return eitherBuiltImage.flatMap((buildOutput) =>
+ eitherWithAuthdRegistryBuildJob.mapRight((job) => ({ buildOutput, job }))
+ );
+ })
+ .peek(async (tEitherWithBuiltImage) => {
+ const eitherWithBuiltImage = await tEitherWithBuiltImage.get();
+ eitherWithBuiltImage.fold((err, val) => {
+ tEitherWithBuiltImage.trace.trace(
+ buildImageMetric[err ? "failure" : "success"],
+ );
+ if (err) {
+ tEitherWithBuiltImage.trace.addTrace(LogLevel.ERROR).trace(
+ `oh nyoo we couldn't buiwd the img :(( ${err}`,
+ );
+ return;
+ }
+ tEitherWithBuiltImage.trace.addTrace("buildOutput").trace(val);
+ });
+ })
+ .map(async (tEitherWithBuiltImage) => {
+ const eitherWithBuiltImage = await tEitherWithBuiltImage.get();
+ return eitherWithBuiltImage
+ .mapRight(({ job }) =>
+ tEitherWithBuiltImage.move(getPushCommand(job.arguments.imageTag))
+ )
+ .flatMapAsync((tPushCommand) => getStdout(tPushCommand));
+ })
+ .get();
+
+const getDockerLoginCommand = (username: string, registry: string) =>
+ `docker login --username ${username} --password $REGISTRY_PASSWORD ${registry}`
+ .split(" ");
+
+const getBuildCommand = (
+ {
+ buildTarget,
+ imageTag,
+ dockerfile,
+ context,
+ }: BuildDockerImageJobProps,
+) => [
+ "docker",
+ "build",
+ "--target",
+ buildTarget,
+ "-t",
+ imageTag,
+ "-f",
+ dockerfile,
+ context,
+];
+
+const getPushCommand = (tag: string) => ["docker", "push", tag];
diff --git a/worker/scripts/build_image b/worker/scripts/build_image
deleted file mode 100755
index 07c07c9..0000000
--- a/worker/scripts/build_image
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run
-
-import type { BuildDockerImageJobProps } from "@liz-ci/model";
-import {
- BitwardenSession,
- getRequiredEnv,
- getStdout,
- loggerWithPrefix,
- type LoginItem,
-} from "@liz-ci/utils";
-
-const args: BuildDockerImageJobProps = {
- registry: getRequiredEnv("registry"),
- namespace: getRequiredEnv("namespace"),
- repository: getRequiredEnv("repository"),
- imageTag: getRequiredEnv("imageTag"),
-
- context: getRequiredEnv("context"),
- dockerfile: getRequiredEnv("dockerfile"),
- buildTarget: getRequiredEnv("buildTarget"),
-};
-
-const logger = loggerWithPrefix(() =>
- `[${
- new Date().toISOString()
- }] [build_image.${args.repository}.${args.imageTag}]`
-);
-
-const run = async () => {
- logger.log("Starting Docker image build job");
-
- const bitwardenSession = new BitwardenSession();
- const { username: registryUsername, password: registryPassword } =
- (await bitwardenSession.getItem<LoginItem>(args.registry))?.login ?? {};
- if (!(registryUsername && registryPassword)) {
- throw new Error("where's the login info bruh");
- }
-
- logger.log(`Logging in to Docker registry: ${args.registry}`);
- await getStdout(
- [
- "docker",
- "login",
- "--username",
- registryUsername,
- "--password",
- registryPassword,
- args.registry,
- ],
- );
-
- const tag =
- `${args.registry}/${args.namespace}/${args.repository}:${args.imageTag}`;
- const buildCmd = [
- "docker",
- "build",
- "--target",
- args.buildTarget,
- "-t",
- tag,
- "-f",
- `${args.dockerfile}`,
- `${args.context}`,
- ];
-
- logger.log(`building`, tag, buildCmd);
- await getStdout(
- buildCmd,
- {
- clearEnv: true,
- env: {},
- },
- );
-
- const pushCmd = [
- "docker",
- "push",
- tag,
- ];
- logger.log(`pushing`, pushCmd);
- await getStdout(pushCmd);
-};
-
-if (import.meta.main) {
- try {
- await run();
- } catch (e) {
- logger.error("womp womp D:", e);
- throw e;
- }
-}
diff --git a/worker/scripts/fetch_code b/worker/scripts/fetch_code
index d3af763..cc2d561 100755
--- a/worker/scripts/fetch_code
+++ b/worker/scripts/fetch_code
@@ -2,15 +2,15 @@
export LOG_PREFIX="[fetch_code $remote @ $checkout -> $path]"
-log "fetch!"
+log "getting the codez~ time to fetch!"
git clone "$remote" "$path"
if [ ! $? -eq 0 ]; then
- log "D: failed to clone"
+ log "D: oh nyo! couldn't clone the repo"
exit 1
fi
cd "$path"
-log "checkout $checkout"
+log "switching to $checkout~"
git reset --hard "$checkout"
if [ ! $? -eq 0 ]; then
log "D: can't reset to $checkout"
diff --git a/worker/scripts/run_pipeline b/worker/scripts/run_pipeline
deleted file mode 100755
index 9991001..0000000
--- a/worker/scripts/run_pipeline
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run --allow-read --allow-write
-
-import { type Job, PipelineImpl } from "@liz-ci/model";
-import {
- getRequiredEnv,
- getStdout,
- loggerWithPrefix,
- validateIdentifier,
-} from "@liz-ci/utils";
-
-const pipelinePath = getRequiredEnv("pipeline");
-const logger = loggerWithPrefix(() =>
- `[${new Date().toISOString()}] [run_pipeline.${pipelinePath}]`
-);
-
-const jobValidForExecution = (job: Job) => {
- return Object
- .entries(job.arguments)
- .filter((e) => {
- if (e.every(validateIdentifier)) return true;
- logger.error(`job of type ${job.type} has invalid args ${e}`);
- return false;
- })
- .length === 0;
-};
-
-const run = async () => {
- logger.log("starting pipeline execution");
-
- const stages = await (Deno.readTextFile(pipelinePath))
- .then(PipelineImpl.from)
- .then((pipeline) => pipeline.getStages());
-
- for (const stage of stages) {
- logger.log("executing stage", stage);
-
- await Promise.all(
- stage.parallelJobs.map(async (job, jobIdx) => {
- logger.log(`executing job ${jobIdx}`, job);
- if (!jobValidForExecution(job)) throw new Error("invalid job");
-
- const result = await getStdout(job.type, { env: job.arguments });
- logger.log(jobIdx, "outputs", { result });
- }),
- );
- }
-
- logger.log("ok! yay!");
-};
-
-if (import.meta.main) {
- try {
- await run();
- } catch (e) {
- logger.error("womp womp D:", e);
- throw e;
- }
-}