summaryrefslogtreecommitdiff
path: root/worker
diff options
context:
space:
mode:
authorElizabeth Alexander Hunt <me@liz.coffee>2025-05-12 09:40:12 -0700
committerElizabeth <me@liz.coffee>2025-05-26 14:15:42 -0700
commitd51c9d74857aca3c2f172609297266968bc7f809 (patch)
tree64327f9cc4219729aa11af32d7d4c70cddfc2292 /worker
parent30729a0cf707d9022bae0a7baaba77379dc31fd5 (diff)
downloadci-d51c9d74857aca3c2f172609297266968bc7f809.tar.gz
ci-d51c9d74857aca3c2f172609297266968bc7f809.zip
The big refactor TM
Diffstat (limited to 'worker')
-rw-r--r--worker/Dockerfile8
-rw-r--r--worker/deno.json4
-rw-r--r--worker/executor/job.ts42
-rw-r--r--worker/executor/mod.ts2
-rw-r--r--worker/executor/pipeline.ts53
-rwxr-xr-xworker/jobs/checkout_ci.run42
-rw-r--r--worker/jobs/ci_pipeline.run166
-rw-r--r--worker/mod.ts2
-rwxr-xr-xworker/scripts/ansible_playbook167
-rwxr-xr-xworker/scripts/build_docker_image161
-rwxr-xr-xworker/scripts/build_image91
-rwxr-xr-xworker/scripts/fetch_code6
-rwxr-xr-xworker/scripts/run_pipeline58
-rw-r--r--worker/secret/bitwarden.ts153
-rw-r--r--worker/secret/ivault.ts24
-rw-r--r--worker/secret/mod.ts2
16 files changed, 715 insertions, 266 deletions
diff --git a/worker/Dockerfile b/worker/Dockerfile
index ea393ed..a3e2e12 100644
--- a/worker/Dockerfile
+++ b/worker/Dockerfile
@@ -1,4 +1,4 @@
-FROM debian:stable-slim AS cli-dependencies
+FROM debian:stable-slim AS worker-dependencies
# Define versions as build arguments to improve caching
ARG BITWARDEN_VERSION=2025.4.0
@@ -11,15 +11,15 @@ RUN unzip /bw-linux.zip -d / \
RUN curl -L "https://get.docker.com/builds/$(uname -s)/$(uname -m)/docker-latest.tgz" > /docker.tgz
RUN tar -xvzf /docker.tgz
-FROM oci.liz.coffee/img/liz-ci:release AS worker
+FROM oci.liz.coffee/@emprespresso/ci-base:release AS worker
RUN apt-get update && apt-get install -yqq git jq
RUN groupadd docker
RUN useradd --system --home-dir /var/lib/laminar \
--no-user-group --groups users,docker --uid 100 laminar
-COPY --from=cli-dependencies /bw /usr/local/bin/
-COPY --from=cli-dependencies /docker/* /usr/local/bin/
+COPY --from=worker-dependencies /bw /usr/local/bin/
+COPY --from=worker-dependencies /docker/* /usr/local/bin/
RUN mkdir -p /var/lib/laminar/cfg
RUN cp -r /app/worker/* /var/lib/laminar/cfg
diff --git a/worker/deno.json b/worker/deno.json
index 5636d0a..77c65de 100644
--- a/worker/deno.json
+++ b/worker/deno.json
@@ -1,4 +1,4 @@
{
- "name": "@liz-ci/worker",
- "exports": "./mod.ts"
+ "name": "@emprespresso/ci-worker",
+ "exports": "./mod.ts"
}
diff --git a/worker/executor/job.ts b/worker/executor/job.ts
new file mode 100644
index 0000000..76f0e0c
--- /dev/null
+++ b/worker/executor/job.ts
@@ -0,0 +1,42 @@
+import {
+ getStdout,
+ type ITraceable,
+ LogLevel,
+ type LogMetricTraceSupplier,
+ memoize,
+ Metric,
+ TraceUtil,
+ validateExecutionEntries,
+} from "@emprespresso/pengueno";
+import type { Job } from "@emprespresso/ci-model";
+
+const jobTypeMetric = memoize((type: string) => Metric.fromName(`run.${type}`));
+export const executeJob = (tJob: ITraceable<Job, LogMetricTraceSupplier>) =>
+ tJob.bimap(TraceUtil.withMetricTrace(jobTypeMetric(tJob.get().type)))
+ .peek((tJob) =>
+ tJob.trace.trace(
+ `let's do this little job ok!! ${tJob.get()}`,
+ )
+ )
+ .map((tJob) =>
+ validateExecutionEntries(tJob.get().arguments)
+ .mapLeft((badEntries) => {
+ tJob.trace.addTrace(LogLevel.ERROR).trace(
+ badEntries.toString(),
+ );
+ return new Error("invalid job arguments");
+ })
+ .flatMapAsync((args) =>
+ getStdout(tJob.move(tJob.get().type), { env: args })
+ )
+ )
+ .peek(
+ TraceUtil.promiseify((q) =>
+ q.trace.trace(
+ q.get().fold((err, _val) =>
+ jobTypeMetric(tJob.get().type)[err ? "failure" : "success"]
+ ),
+ )
+ ),
+ )
+ .get();
diff --git a/worker/executor/mod.ts b/worker/executor/mod.ts
new file mode 100644
index 0000000..944ab7d
--- /dev/null
+++ b/worker/executor/mod.ts
@@ -0,0 +1,2 @@
+export * from "./job.ts";
+export * from "./pipeline.ts";
diff --git a/worker/executor/pipeline.ts b/worker/executor/pipeline.ts
new file mode 100644
index 0000000..a1aa7c3
--- /dev/null
+++ b/worker/executor/pipeline.ts
@@ -0,0 +1,53 @@
+import {
+ Either,
+ type IEither,
+ type ITraceable,
+ type LogMetricTraceSupplier,
+ Metric,
+ TraceUtil,
+} from "@emprespresso/pengueno";
+import type { Job, JobArgT, Pipeline } from "@emprespresso/ci-model";
+import { executeJob } from "./mod.ts";
+
+const pipelinesMetric = Metric.fromName("pipelines");
+export const executePipeline = (
+ tPipeline: ITraceable<Pipeline, LogMetricTraceSupplier>,
+ baseEnv?: JobArgT,
+): Promise<IEither<Error, void>> =>
+ tPipeline.bimap(TraceUtil.withFunctionTrace(executePipeline))
+ .bimap(TraceUtil.withMetricTrace(pipelinesMetric))
+ .map(async (tJobs): Promise<IEither<Error, void>> => {
+ for (const [i, serialStage] of tJobs.get().serialJobs.entries()) {
+ tJobs.trace.trace(
+ `executing stage ${i}. do your best little stage :>\n${serialStage}`,
+ );
+ const jobResults = await Promise.all(
+ serialStage.parallelJobs.map((job) =>
+ tJobs.bimap((_) => [job, `stage ${i}`])
+ .map((tJob) =>
+ <Job> ({
+ ...tJob.get(),
+ arguments: { ...baseEnv, ...tJob.get().arguments },
+ })
+ )
+ .map(executeJob)
+ .peek(
+ TraceUtil.promiseify((tEitherJobOutput) =>
+ tEitherJobOutput.get().mapRight((stdout) =>
+ tEitherJobOutput.trace.addTrace("STDOUT").trace(stdout)
+ )
+ ),
+ )
+ .get()
+ ),
+ );
+ const failures = jobResults.filter((e) => e.fold((err) => !!err));
+ if (failures.length > 0) {
+ tJobs.trace.trace(pipelinesMetric.failure);
+ return Either.left(new Error(failures.toString()));
+ }
+ }
+ tJobs.trace.trace(pipelinesMetric.success);
+ return Either.right(undefined);
+ })
+ .get();
diff --git a/worker/jobs/checkout_ci.run b/worker/jobs/checkout_ci.run
deleted file mode 100755
index 0945444..0000000
--- a/worker/jobs/checkout_ci.run
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-# usage: laminarc run checkout_ci remote="ssh://src.liz.coffee:2222/cgit" rev="<sha>" \
-# refname="refs/..."
-
-RUN=`date +%s`
-RETURN="$PWD"
-WORKING_DIR="$PWD/$RUN"
-
-export LOG_PREFIX="[checkout_ci.$RUN]"
-
-log "starting checkout_ci job $remote @ $refname - $rev in $WORKING_DIR"
-mkdir -p "$WORKING_DIR" && cd "$WORKING_DIR"
-
-CODE="$WORKING_DIR/src"
-checkout="$rev" path="$CODE" fetch_code
-
-CI_WORKFLOW="$CODE/.ci/ci.json"
-if [[ ! -e "$CI_WORKFLOW" ]]; then
- log "no CI configuration found"
- exit 0
-fi
-
-PIPELINE_GENERATOR_PATH=$(jq -r '.pipeline' "$CI_WORKFLOW")
-if [[ "$PIPELINE_GENERATOR_PATH" == *".."* ]]; then
- log "no '..'"
- exit 1
-fi
-
-log "building the pipeline..."
-PIPELINE="$WORKING_DIR/pipeline.json"
-docker run --rm --network none --cap-drop ALL --security-opt no-new-privileges \
- -e refname="$refname" -e rev="$rev" -e remote="$remote" \
- -v "$CODE/$PIPELINE_GENERATOR_PATH:/pipeline_generator" \
- oci.liz.coffee/img/liz-ci:release /pipeline_generator \
- > "$PIPELINE"
-
-pipeline="$PIPELINE" run_pipeline
-
-log "cleaning up working directory"
-cd "$RETURN" && rm -rf "$WORKING_DIR"
-
-log "checkout_ci run done"
diff --git a/worker/jobs/ci_pipeline.run b/worker/jobs/ci_pipeline.run
new file mode 100644
index 0000000..337bd53
--- /dev/null
+++ b/worker/jobs/ci_pipeline.run
@@ -0,0 +1,166 @@
+import {
+ type Command,
+ Either,
+ getRequiredEnvVars,
+ getStdout,
+ isObject,
+ LogMetricTraceable,
+ Metric,
+ prependWith,
+ TraceUtil,
+} from "@emprespresso/pengueno";
+import {
+ type CheckoutCiJob,
+ type FetchCodeJob,
+ PipelineImpl,
+} from "@emprespresso/ci-model";
+import { executeJob, executePipeline } from "@emprespresso/ci-worker";
+
+const run = Date.now().toString();
+const eitherJob = getRequiredEnvVars(["remote", "refname", "rev"])
+ .mapRight((baseArgs) => (
+ <CheckoutCiJob> {
+ type: "checkout_ci",
+ arguments: {
+ ...baseArgs,
+ run,
+ returnPath: Deno.cwd(),
+ },
+ }
+ ));
+
+const ciRunMetric = Metric.fromName("checkout_ci.run");
+const trace = `checkout_ci.${run}`;
+await LogMetricTraceable.from(eitherJob).bimap(TraceUtil.withTrace(trace))
+ .bimap(TraceUtil.withMetricTrace(ciRunMetric))
+ .map((tEitherJob) =>
+ tEitherJob.get().flatMapAsync((ciJob) => {
+ const wd = getWorkingDirectoryForCiJob(ciJob);
+ const fetchPackageJob = <FetchCodeJob> {
+ type: "fetch_code",
+ arguments: {
+ remoteUrl: ciJob.arguments.remote,
+ checkout: ciJob.arguments.rev,
+ path: getSrcDirectoryForCiJob(ciJob),
+ },
+ };
+ return Either.fromFailableAsync<Error, CheckoutCiJob>(
+ Deno.mkdir(wd).then(() => Deno.chdir(wd))
+ .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get())
+ .then(() => ciJob),
+ );
+ })
+ )
+ .map((tEitherCiJob) =>
+ tEitherCiJob.get().then((eitherCiJob) =>
+ eitherCiJob.flatMapAsync<{ cmd: Command; job: CheckoutCiJob }>((ciJob) =>
+ Either.fromFailableAsync<Error, string>(
+ Deno.readTextFile(
+ `${getSrcDirectoryForCiJob(ciJob)}/${CI_WORKFLOW_FILE}`,
+ ),
+ ).then((eitherWorkflowJson) =>
+ eitherWorkflowJson.flatMap(
+ (json) => Either.fromFailable<Error, unknown>(JSON.parse(json)),
+ ).flatMap((eitherWorkflowParse) => {
+ if (isCiWorkflow(eitherWorkflowParse)) {
+ return Either.right({
+ cmd: getPipelineGenerationCommand(
+ ciJob,
+ eitherWorkflowParse.workflow,
+ ),
+ job: ciJob,
+ });
+ }
+ return Either.left(
+ new Error(
+ "couldn't find any valid ci configuration (。•́︿•̀。), that's okay~",
+ ),
+ );
+ })
+ )
+ )
+ )
+ )
+ .map(async (tEitherPipelineGenerationCommand) => {
+ const eitherJobCommand = await tEitherPipelineGenerationCommand.get();
+ const eitherPipeline = await eitherJobCommand
+ .flatMapAsync((jobCommand) =>
+ tEitherPipelineGenerationCommand.move(jobCommand.cmd)
+ .map(getStdout)
+ .get()
+ );
+ return eitherPipeline
+ .flatMap(PipelineImpl.from)
+ .flatMap((pipeline) =>
+ eitherJobCommand.mapRight(({ job }) => ({ job, pipeline }))
+ );
+ })
+ .peek(
+ TraceUtil.promiseify((tEitherPipeline) =>
+ tEitherPipeline.get()
+ .mapRight((val) => val.pipeline.serialize())
+ .mapRight((pipeline) =>
+ `built the pipeline~ (◕ᴗ◕✿) let's make something amazing! ${pipeline}`
+ )
+ .mapRight((msg) => tEitherPipeline.trace.trace(msg))
+ ),
+ )
+ .map(
+ async (tEitherPipeline) => {
+ const eitherPipeline = await tEitherPipeline.get();
+ return eitherPipeline.flatMapAsync(({ pipeline, job }) =>
+ tEitherPipeline.move(pipeline)
+ .map((p) =>
+ executePipeline(p, {
+ HOME: getWorkingDirectoryForCiJob(job),
+ })
+ )
+ .get()
+ );
+ },
+ )
+ .get()
+ .then((e) =>
+ e.flatMap(() => eitherJob).fold((err, val) => {
+ if (err) throw err;
+ return Deno.remove(getWorkingDirectoryForCiJob(val), { recursive: true });
+ })
+ );
+
+const getWorkingDirectoryForCiJob = (job: CheckoutCiJob) =>
+ `${job.arguments.returnPath}/${job.arguments.run}`;
+
+const getSrcDirectoryForCiJob = (job: CheckoutCiJob) =>
+ `${job.arguments.returnPath}/${job.arguments.run}/src`;
+
+const _runFlags = ("--rm --network none --cap-drop ALL" +
+ "--security-opt no-new-privileges").split(" ");
+const _image = "oci.liz.coffee/img/ci-worker:release";
+const getPipelineGenerationCommand = (
+ job: CheckoutCiJob,
+ pipelineGeneratorPath: string,
+ image = _image,
+ runFlags = _runFlags,
+) => [
+ "docker",
+ "run",
+ ...runFlags,
+ ...prependWith(
+ Object.entries(job.arguments).map(([key, val]) => `"${key}"="${val}"`),
+ "-e",
+ ),
+ "-v",
+ `${
+ getSrcDirectoryForCiJob(job)
+ }/${pipelineGeneratorPath}:/pipeline_generator`,
+ image,
+ "/pipeline_generator",
+];
+
+export interface CiWorkflow {
+ workflow: string;
+}
+export const isCiWorkflow = (t: unknown): t is CiWorkflow =>
+ isObject(t) && "workflow" in t && typeof t.workflow === "string" &&
+ !t.workflow.includes("..");
+const CI_WORKFLOW_FILE = ".ci/ci.json";
diff --git a/worker/mod.ts b/worker/mod.ts
index e69de29..affcb2c 100644
--- a/worker/mod.ts
+++ b/worker/mod.ts
@@ -0,0 +1,2 @@
+export * from "./secret/mod.ts";
+export * from "./executor/mod.ts";
diff --git a/worker/scripts/ansible_playbook b/worker/scripts/ansible_playbook
index 062680d..e9e967c 100755
--- a/worker/scripts/ansible_playbook
+++ b/worker/scripts/ansible_playbook
@@ -1,78 +1,113 @@
#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run --allow-read --allow-write
import {
- BitwardenSession,
- getRequiredEnv,
+ Either,
+ getRequiredEnvVars,
getStdout,
- loggerWithPrefix,
+ type IEither,
+ LogMetricTraceable,
+ Metric,
prependWith,
- type SecureNote,
-} from "@liz-ci/utils";
-import type { AnsiblePlaybookJobProps } from "@liz-ci/model";
+ TraceUtil,
+} from "@emprespresso/pengueno";
+import type { AnsiblePlaybookJob } from "@emprespresso/ci-model";
+import { Bitwarden, type SecureNote } from "@emprespresso/ci-worker";
-const args: AnsiblePlaybookJobProps = {
- path: getRequiredEnv("path"),
- playbooks: getRequiredEnv("playbooks"),
-};
-const logger = loggerWithPrefix(() =>
- `[${new Date().toISOString()}] [ansible_playbook.'${args.playbooks}']`
-);
+const eitherJob = getRequiredEnvVars([
+ "path",
+ "playbooks",
+])
+ .mapRight((baseArgs) => (
+ <AnsiblePlaybookJob> {
+ type: "ansible_playbook",
+ arguments: baseArgs,
+ }
+ ));
-const run = async () => {
- logger.log("Starting Ansible playbook job");
+const eitherVault = Bitwarden.getConfigFromEnvironment()
+ .mapRight((config) => new Bitwarden(config));
- const bitwardenSession = new BitwardenSession();
- const secretFiles = await Promise.all(
- ["ansible_secrets", "ssh_key"]
- .map((secretName) =>
- bitwardenSession
- .getItem<SecureNote>(secretName)
- .then(async ({ notes: recoveredSecret }) => {
- const tempFile = await Deno.makeTempFile();
- await Deno.writeTextFile(tempFile, recoveredSecret);
- logger.log(secretName, "stored at", tempFile);
- return tempFile;
- })
- ),
- );
- const [ansibleSecrets, sshKey] = secretFiles;
+const playbookMetric = Metric.fromName("ansiblePlaybook.playbook");
+await LogMetricTraceable.from(eitherJob)
+ .bimap(TraceUtil.withTrace("ansible_playbook"))
+ .bimap(TraceUtil.withMetricTrace(playbookMetric))
+ .peek((tEitherJob) =>
+ tEitherJob.trace.trace("starting ansible playbook job! (⑅˘꒳˘)")
+ )
+ .map((tEitherJob) =>
+ tEitherJob.get().flatMapAsync((job) =>
+ eitherVault.flatMapAsync(async (vault) => {
+ const eitherKey = await vault.unlock(tEitherJob);
+ return eitherKey.mapRight((key) => ({ job, key, vault }));
+ })
+ )
+ )
+ .map(async (tEitherJobVault) => {
+ tEitherJobVault.trace.trace(
+ "getting ansible secwets uwu~",
+ );
+ const eitherJobVault = await tEitherJobVault.get();
- try {
- const volumes = [
- `${args.path}:/ansible`,
- `${sshKey}:/root/id_rsa`,
- `${ansibleSecrets}:/ansible/secrets.yml`,
- ];
+ const eitherSshKey = await eitherJobVault
+ .flatMapAsync(({ key, vault }) =>
+ vault.fetchSecret<SecureNote>(tEitherJobVault, key, "ssh_key")
+ );
+ const eitherSshKeyFile = await eitherSshKey.mapRight(({ notes }) => notes)
+ .flatMapAsync(saveToTempFile);
+ const eitherAnsibleSecrets = await eitherJobVault
+ .flatMapAsync(({ key, vault }) =>
+ vault.fetchSecret<SecureNote>(tEitherJobVault, key, "ansible_playbooks")
+ );
+ const eitherAnsibleSecretsFile = await eitherAnsibleSecrets.mapRight((
+ { notes },
+ ) => notes).flatMapAsync(saveToTempFile);
- const playbookCmd = `ansible-playbook -e @secrets.yml ${args.playbooks}`;
- const deployCmd = [
- "docker",
- "run",
- ...prependWith(volumes, "-v"),
- "willhallonline/ansible:latest",
- ...playbookCmd.split(" "),
- ];
- logger.log("deploying...", deployCmd);
- await getStdout(deployCmd);
- } finally {
- await Promise.allSettled(
- [bitwardenSession.close()].concat(
- secretFiles.map((p) => {
- logger.log(`cleanup`, p);
- return Deno.remove(p);
- }),
- ),
+ return eitherJobVault.flatMapAsync(async ({ job, vault, key }) => {
+ const eitherLocked = await vault.lock(tEitherJobVault, key);
+ return eitherLocked.flatMap((_locked) =>
+ eitherSshKeyFile.flatMap((sshKeyFile) =>
+ eitherAnsibleSecretsFile.mapRight((secretsFile) => ({
+ job,
+ sshKeyFile,
+ secretsFile,
+ }))
+ )
+ );
+ });
+ })
+ .map(async (tEitherJobAndSecrets) => {
+ const eitherJobAndSecrets = await tEitherJobAndSecrets.get();
+ return eitherJobAndSecrets.flatMapAsync(
+ ({ job, sshKeyFile, secretsFile }) => {
+ const volumes = [
+ `${job.arguments.path}:/ansible`,
+ `${sshKeyFile}:/root/id_rsa`,
+ `${secretsFile}:/ansible/secrets.yml`,
+ ];
+ const playbookCmd =
+ `ansible-playbook -e @secrets.yml ${job.arguments.playbooks}`;
+ const deployCmd = [
+ "docker",
+ "run",
+ ...prependWith(volumes, "-v"),
+ "willhallonline/ansible:latest",
+ ...playbookCmd.split(" "),
+ ];
+ tEitherJobAndSecrets.trace.trace(
+ `running ansible magic~ (◕ᴗ◕✿) ${deployCmd}`,
+ );
+ return tEitherJobAndSecrets.move(deployCmd).map(getStdout).get();
+ },
);
- }
+ })
+ .get();
- logger.log("ansible playbook job completed");
-};
-
-if (import.meta.main) {
- try {
- await run();
- } catch (e) {
- logger.error("womp womp D:", e);
- throw e;
- }
-}
+const saveToTempFile = (text: string): Promise<IEither<Error, string>> =>
+ Either.fromFailableAsync(
+ Deno.makeTempDir({ dir: Deno.cwd() })
+ .then((dir) => Deno.makeTempFile({ dir }))
+ .then(async (f) => {
+ await Deno.writeTextFile(f, text);
+ return f;
+ }),
+ );
diff --git a/worker/scripts/build_docker_image b/worker/scripts/build_docker_image
new file mode 100755
index 0000000..1dd0c3d
--- /dev/null
+++ b/worker/scripts/build_docker_image
@@ -0,0 +1,161 @@
+#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run
+
+import {
+ getRequiredEnvVars,
+ getStdout,
+ LogLevel,
+ LogMetricTraceable,
+ Metric,
+ TraceUtil,
+} from "@emprespresso/pengueno";
+import type {
+ BuildDockerImageJob,
+ BuildDockerImageJobProps,
+} from "@emprespresso/ci-model";
+import { Bitwarden, type LoginItem } from "@emprespresso/ci-worker";
+
+const eitherJob = getRequiredEnvVars([
+ "registry",
+ "namespace",
+ "repository",
+ "imageTag",
+ "context",
+ "dockerfile",
+ "buildTarget",
+])
+ .mapRight((baseArgs) => (
+ <BuildDockerImageJob> {
+ type: "build_docker_image",
+ arguments: baseArgs,
+ }
+ ));
+const eitherVault = Bitwarden.getConfigFromEnvironment()
+ .mapRight((config) => new Bitwarden(config));
+
+const buildImageMetric = Metric.fromName("dockerImage.build");
+const loginMetric = Metric.fromName("dockerRegistry.login");
+await LogMetricTraceable.from(eitherJob)
+ .bimap(
+ (tEitherJob) => {
+ const trace = "build_docker_image." +
+ tEitherJob.get().fold((_, v) => v?.buildTarget ?? "");
+ return [tEitherJob.get(), trace];
+ },
+ )
+ .bimap(TraceUtil.withMetricTrace(buildImageMetric))
+ .bimap(TraceUtil.withMetricTrace(loginMetric))
+ .peek((tEitherJob) =>
+ tEitherJob.trace.trace("starting docker image build job! (⑅˘꒳˘)")
+ )
+ .map((tEitherJob) =>
+ tEitherJob.get()
+ .flatMapAsync((job) =>
+ eitherVault.flatMapAsync(async (vault) => {
+ const eitherKey = await vault.unlock(tEitherJob);
+ return eitherKey.mapRight((key) => ({ job, key, vault }));
+ })
+ )
+ )
+ .map(async (tEitherJobVault) => {
+ tEitherJobVault.trace.trace("logging into the wegistwy uwu~");
+ const eitherJobVault = await tEitherJobVault.get();
+ const eitherDockerRegistryLoginItem = await eitherJobVault.flatMapAsync((
+ { job, key, vault },
+ ) =>
+ vault.fetchSecret<LoginItem>(tEitherJobVault, key, job.arguments.registry)
+ .finally(() => vault.lock(tEitherJobVault, key))
+ );
+ return eitherDockerRegistryLoginItem.flatMapAsync(({ login }) =>
+ eitherJobVault.flatMapAsync(async ({ job }) => {
+ const loginCommand = getDockerLoginCommand(
+ login.username,
+ job.arguments.registry,
+ );
+ const eitherLoggedIn = await tEitherJobVault.move(loginCommand).map((
+ tLoginCmd,
+ ) =>
+ getStdout(tLoginCmd, { env: { REGISTRY_PASSWORD: login.password } })
+ ).get();
+ return eitherLoggedIn.moveRight(job);
+ })
+ );
+ })
+ .peek(async (tEitherWithAuthdRegistry) => {
+ const eitherWithAuthdRegistry = await tEitherWithAuthdRegistry.get();
+ return tEitherWithAuthdRegistry.trace.trace(
+ eitherWithAuthdRegistry.fold((err, _val) =>
+ loginMetric[err ? "failure" : "success"]
+ ),
+ );
+ })
+ .map(async (tEitherWithAuthdRegistryBuildJob) => {
+ const eitherWithAuthdRegistryBuildJob =
+ await tEitherWithAuthdRegistryBuildJob.get();
+ tEitherWithAuthdRegistryBuildJob.trace.trace(
+ "finally building the image~ (◕ᴗ◕✿)",
+ );
+ const eitherBuiltImage = await eitherWithAuthdRegistryBuildJob.flatMapAsync(
+ (job) =>
+ tEitherWithAuthdRegistryBuildJob
+ .move(getBuildCommand(job.arguments))
+ .map((tBuildCmd) =>
+ getStdout(tBuildCmd, {
+ env: {},
+ clearEnv: true,
+ })
+ )
+ .get(),
+ );
+ return eitherBuiltImage.flatMap((buildOutput) =>
+ eitherWithAuthdRegistryBuildJob.mapRight((job) => ({ buildOutput, job }))
+ );
+ })
+ .peek(async (tEitherWithBuiltImage) => {
+ const eitherWithBuiltImage = await tEitherWithBuiltImage.get();
+ eitherWithBuiltImage.fold((err, val) => {
+ tEitherWithBuiltImage.trace.trace(
+ buildImageMetric[err ? "failure" : "success"],
+ );
+ if (err) {
+ tEitherWithBuiltImage.trace.addTrace(LogLevel.ERROR).trace(
+ `oh nyoo we couldn't buiwd the img :(( ${err}`,
+ );
+ return;
+ }
+ tEitherWithBuiltImage.trace.addTrace("buildOutput").trace(val);
+ });
+ })
+ .map(async (tEitherWithBuiltImage) => {
+ const eitherWithBuiltImage = await tEitherWithBuiltImage.get();
+ return eitherWithBuiltImage
+ .mapRight(({ job }) =>
+ tEitherWithBuiltImage.move(getPushCommand(job.arguments.imageTag))
+ )
+ .flatMapAsync((tPushCommand) => getStdout(tPushCommand));
+ })
+ .get();
+
+const getDockerLoginCommand = (username: string, registry: string) =>
+ `docker login --username ${username} --password $REGISTRY_PASSWORD ${registry}`
+ .split(" ");
+
+const getBuildCommand = (
+ {
+ buildTarget,
+ imageTag,
+ dockerfile,
+ context,
+ }: BuildDockerImageJobProps,
+) => [
+ "docker",
+ "build",
+ "--target",
+ buildTarget,
+ "-t",
+ imageTag,
+ "-f",
+ dockerfile,
+ context,
+];
+
+const getPushCommand = (tag: string) => ["docker", "push", tag];
diff --git a/worker/scripts/build_image b/worker/scripts/build_image
deleted file mode 100755
index 07c07c9..0000000
--- a/worker/scripts/build_image
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run
-
-import type { BuildDockerImageJobProps } from "@liz-ci/model";
-import {
- BitwardenSession,
- getRequiredEnv,
- getStdout,
- loggerWithPrefix,
- type LoginItem,
-} from "@liz-ci/utils";
-
-const args: BuildDockerImageJobProps = {
- registry: getRequiredEnv("registry"),
- namespace: getRequiredEnv("namespace"),
- repository: getRequiredEnv("repository"),
- imageTag: getRequiredEnv("imageTag"),
-
- context: getRequiredEnv("context"),
- dockerfile: getRequiredEnv("dockerfile"),
- buildTarget: getRequiredEnv("buildTarget"),
-};
-
-const logger = loggerWithPrefix(() =>
- `[${
- new Date().toISOString()
- }] [build_image.${args.repository}.${args.imageTag}]`
-);
-
-const run = async () => {
- logger.log("Starting Docker image build job");
-
- const bitwardenSession = new BitwardenSession();
- const { username: registryUsername, password: registryPassword } =
- (await bitwardenSession.getItem<LoginItem>(args.registry))?.login ?? {};
- if (!(registryUsername && registryPassword)) {
- throw new Error("where's the login info bruh");
- }
-
- logger.log(`Logging in to Docker registry: ${args.registry}`);
- await getStdout(
- [
- "docker",
- "login",
- "--username",
- registryUsername,
- "--password",
- registryPassword,
- args.registry,
- ],
- );
-
- const tag =
- `${args.registry}/${args.namespace}/${args.repository}:${args.imageTag}`;
- const buildCmd = [
- "docker",
- "build",
- "--target",
- args.buildTarget,
- "-t",
- tag,
- "-f",
- `${args.dockerfile}`,
- `${args.context}`,
- ];
-
- logger.log(`building`, tag, buildCmd);
- await getStdout(
- buildCmd,
- {
- clearEnv: true,
- env: {},
- },
- );
-
- const pushCmd = [
- "docker",
- "push",
- tag,
- ];
- logger.log(`pushing`, pushCmd);
- await getStdout(pushCmd);
-};
-
-if (import.meta.main) {
- try {
- await run();
- } catch (e) {
- logger.error("womp womp D:", e);
- throw e;
- }
-}
diff --git a/worker/scripts/fetch_code b/worker/scripts/fetch_code
index d3af763..cc2d561 100755
--- a/worker/scripts/fetch_code
+++ b/worker/scripts/fetch_code
@@ -2,15 +2,15 @@
export LOG_PREFIX="[fetch_code $remote @ $checkout -> $path]"
-log "fetch!"
+log "getting the codez~ time to fetch!"
git clone "$remote" "$path"
if [ ! $? -eq 0 ]; then
- log "D: failed to clone"
+ log "D: oh nyo! couldn't clone the repo"
exit 1
fi
cd "$path"
-log "checkout $checkout"
+log "switching to $checkout~"
git reset --hard "$checkout"
if [ ! $? -eq 0 ]; then
log "D: can't reset to $checkout"
diff --git a/worker/scripts/run_pipeline b/worker/scripts/run_pipeline
deleted file mode 100755
index 9991001..0000000
--- a/worker/scripts/run_pipeline
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run --allow-read --allow-write
-
-import { type Job, PipelineImpl } from "@liz-ci/model";
-import {
- getRequiredEnv,
- getStdout,
- loggerWithPrefix,
- validateIdentifier,
-} from "@liz-ci/utils";
-
-const pipelinePath = getRequiredEnv("pipeline");
-const logger = loggerWithPrefix(() =>
- `[${new Date().toISOString()}] [run_pipeline.${pipelinePath}]`
-);
-
-const jobValidForExecution = (job: Job) => {
- return Object
- .entries(job.arguments)
- .filter((e) => {
- if (e.every(validateIdentifier)) return true;
- logger.error(`job of type ${job.type} has invalid args ${e}`);
- return false;
- })
- .length === 0;
-};
-
-const run = async () => {
- logger.log("starting pipeline execution");
-
- const stages = await (Deno.readTextFile(pipelinePath))
- .then(PipelineImpl.from)
- .then((pipeline) => pipeline.getStages());
-
- for (const stage of stages) {
- logger.log("executing stage", stage);
-
- await Promise.all(
- stage.parallelJobs.map(async (job, jobIdx) => {
- logger.log(`executing job ${jobIdx}`, job);
- if (!jobValidForExecution(job)) throw new Error("invalid job");
-
- const result = await getStdout(job.type, { env: job.arguments });
- logger.log(jobIdx, "outputs", { result });
- }),
- );
- }
-
- logger.log("ok! yay!");
-};
-
-if (import.meta.main) {
- try {
- await run();
- } catch (e) {
- logger.error("womp womp D:", e);
- throw e;
- }
-}
diff --git a/worker/secret/bitwarden.ts b/worker/secret/bitwarden.ts
new file mode 100644
index 0000000..0172a1b
--- /dev/null
+++ b/worker/secret/bitwarden.ts
@@ -0,0 +1,153 @@
+import {
+ Either,
+ getRequiredEnvVars,
+ getStdout,
+ type IEither,
+ type ITraceable,
+ type LogMetricTraceSupplier,
+ Metric,
+ TraceUtil,
+} from "@emprespresso/pengueno";
+import type { IVault, SecretItem } from "./mod.ts";
+
+type TClient = ITraceable<unknown, LogMetricTraceSupplier>;
+type TKey = string;
+type TItemId = string;
+export class Bitwarden implements IVault<TClient, TKey, TItemId> {
+ constructor(private readonly config: BitwardenConfig) {}
+
+ public unlock(client: TClient) {
+ return client.move(this.config)
+ .bimap(TraceUtil.withMetricTrace(Bitwarden.loginMetric))
+ .flatMap((tConfig) =>
+ tConfig.move(`bw config server ${tConfig.get().server}`).map(getStdout)
+ )
+ .map(async (tEitherWithConfig) => {
+ const eitherWithConfig = await tEitherWithConfig.get();
+ tEitherWithConfig.trace.trace("logging in~ ^.^");
+ return eitherWithConfig.flatMapAsync((_) =>
+ tEitherWithConfig.move("bw login --apikey --quiet").map(getStdout)
+ .get()
+ );
+ })
+ .peek(async (tEitherWithAuthd) => {
+ const eitherWithAuthd = await tEitherWithAuthd.get();
+ return tEitherWithAuthd.trace.trace(
+ eitherWithAuthd.fold((err, _val) =>
+ Bitwarden.loginMetric[err ? "failure" : "success"]
+ ),
+ );
+ })
+ .map(async (tEitherWithAuthd) => {
+ const eitherWithAuthd = await tEitherWithAuthd.get();
+ tEitherWithAuthd.trace.trace("unlocking the secret vault~ (◕ᴗ◕✿)");
+ return eitherWithAuthd.flatMapAsync((_) =>
+ tEitherWithAuthd.move("bw unlock --passwordenv BW_PASSWORD --raw")
+ .map(getStdout)
+ .get()
+ );
+ })
+ .peek(async (tEitherWithSession) => {
+ const eitherWithAuthd = await tEitherWithSession.get();
+ return tEitherWithSession.trace.trace(
+ eitherWithAuthd.fold((err, _val) =>
+ Bitwarden.unlockVaultMetric[err ? "failure" : "success"]
+ ),
+ );
+ })
+ .get();
+ }
+
+ public fetchSecret<T extends SecretItem>(
+ client: TClient,
+ key: string,
+ item: string,
+ ): Promise<IEither<Error, T>> {
+ return client.move(key)
+ .bimap(TraceUtil.withMetricTrace(Bitwarden.fetchSecretMetric))
+ .peek((tSession) =>
+ tSession.trace.trace(`looking for your secret ${item} (⑅˘꒳˘)`)
+ )
+ .flatMap((tSession) =>
+ tSession.move("bw list items").map((listCmd) =>
+ getStdout(listCmd, { env: { BW_SESSION: tSession.get() } })
+ )
+ )
+ .map(
+ TraceUtil.promiseify((tEitherItemsJson) =>
+ tEitherItemsJson.get()
+ .flatMap((itemsJson): IEither<Error, Array<T & { name: string }>> =>
+ Either.fromFailable(() => JSON.parse(itemsJson))
+ )
+ .flatMap((itemsList): IEither<Error, T> => {
+ const secret = itemsList.find(({ name }) => name === item);
+ if (!secret) {
+ return Either.left(
+ new Error(`couldn't find the item ${item} (。•́︿•̀。)`),
+ );
+ }
+ return Either.right(secret);
+ })
+ ),
+ )
+ .peek(async (tEitherWithSecret) => {
+ const eitherWithSecret = await tEitherWithSecret.get();
+ return tEitherWithSecret.trace.trace(
+ eitherWithSecret.fold((err, _val) =>
+ Bitwarden.fetchSecretMetric[err ? "failure" : "success"]
+ ),
+ );
+ })
+ .get();
+ }
+
+ public lock(client: TClient, key: TKey) {
+ return client.move(key)
+ .bimap(TraceUtil.withMetricTrace(Bitwarden.lockVaultMetric))
+ .peek((tSession) =>
+ tSession.trace.trace(`taking care of locking the vault :3`)
+ )
+ .flatMap((tSession) =>
+ tSession.move("bw lock").map((lockCmd) =>
+ getStdout(lockCmd, { env: { BW_SESSION: tSession.get() } })
+ )
+ )
+ .peek(async (tEitherWithLocked) => {
+ const eitherWithLocked = await tEitherWithLocked.get();
+ return eitherWithLocked.fold((err, _val) => {
+ tEitherWithLocked.trace.trace(
+ Bitwarden.lockVaultMetric[err ? "failure" : "success"],
+ );
+ if (err) return;
+ tEitherWithLocked.trace.trace(
+ "all locked up and secure now~ (。•̀ᴗ-)✧",
+ );
+ });
+ })
+ .get();
+ }
+
+ public static getConfigFromEnvironment(): IEither<Error, BitwardenConfig> {
+ return getRequiredEnvVars([
+ "BW_SERVER",
+ "BW_CLIENTSECRET",
+ "BW_CLIENTID",
+ "BW_PASSWORD",
+ ]).mapRight(({ BW_SERVER, BW_CLIENTSECRET, BW_CLIENTID }) => ({
+ clientId: BW_CLIENTID,
+ secret: BW_CLIENTSECRET,
+ server: BW_SERVER,
+ }));
+ }
+
+ private static loginMetric = Metric.fromName("Bitwarden.login");
+ private static unlockVaultMetric = Metric.fromName("Bitwarden.unlockVault");
+ private static fetchSecretMetric = Metric.fromName("Bitwarden.fetchSecret");
+ private static lockVaultMetric = Metric.fromName("Bitwarden.lock");
+}
+
+export interface BitwardenConfig {
+ server: string;
+ secret: string;
+ clientId: string;
+}
diff --git a/worker/secret/ivault.ts b/worker/secret/ivault.ts
new file mode 100644
index 0000000..e101e56
--- /dev/null
+++ b/worker/secret/ivault.ts
@@ -0,0 +1,24 @@
+import type { IEither } from "@emprespresso/pengueno";
+
+export interface LoginItem {
+ login: {
+ username: string;
+ password: string;
+ };
+}
+
+export interface SecureNote {
+ notes: string;
+}
+
+export type SecretItem = LoginItem | SecureNote;
+export interface IVault<TClient, TKey, TItemId> {
+ unlock: (client: TClient) => Promise<IEither<Error, TKey>>;
+ lock: (client: TClient, key: TKey) => Promise<IEither<Error, TKey>>;
+
+ fetchSecret: <T extends SecretItem>(
+ client: TClient,
+ key: TKey,
+ item: TItemId,
+ ) => Promise<IEither<Error, T>>;
+}
diff --git a/worker/secret/mod.ts b/worker/secret/mod.ts
new file mode 100644
index 0000000..70a1ea9
--- /dev/null
+++ b/worker/secret/mod.ts
@@ -0,0 +1,2 @@
+export * from "./ivault.ts";
+export * from "./bitwarden.ts";