summaryrefslogtreecommitdiff
path: root/worker/scripts/checkout_ci.ts
diff options
context:
space:
mode:
authorElizabeth Hunt <me@liz.coffee>2025-06-20 14:53:38 -0700
committerElizabeth Hunt <me@liz.coffee>2025-06-20 14:53:38 -0700
commitd4791f3d357634daf506fb8f91cc5332a794c421 (patch)
tree1bb01d2d4d8fa74d83bb6f99f2c8aa4146ca2d11 /worker/scripts/checkout_ci.ts
parentd7e8d31c94cd713a2f4cf799e20e993acc69e361 (diff)
downloadci-d4791f3d357634daf506fb8f91cc5332a794c421.tar.gz
ci-d4791f3d357634daf506fb8f91cc5332a794c421.zip
Move to nodejs
Diffstat (limited to 'worker/scripts/checkout_ci.ts')
-rwxr-xr-xworker/scripts/checkout_ci.ts294
1 files changed, 132 insertions, 162 deletions
diff --git a/worker/scripts/checkout_ci.ts b/worker/scripts/checkout_ci.ts
index efe74fb..8e4dcca 100755
--- a/worker/scripts/checkout_ci.ts
+++ b/worker/scripts/checkout_ci.ts
@@ -1,182 +1,152 @@
-#!/usr/bin/env -S deno run --allow-all
+#!/usr/bin/env node
import {
- type Command,
- Either,
- LogTraceable,
- getRequiredEnvVars,
- getStdout,
- isObject,
- LogMetricTraceable,
- Metric,
- prependWith,
- TraceUtil,
-} from "@emprespresso/pengueno";
-import {
- type CheckoutCiJob,
- type FetchCodeJob,
- PipelineImpl,
-} from "@emprespresso/ci_model";
-import { executeJob, executePipeline } from "@emprespresso/ci_worker";
+ type Command,
+ Either,
+ LogTraceable,
+ getRequiredEnvVars,
+ getStdout,
+ isObject,
+ LogMetricTraceable,
+ Metric,
+ prependWith,
+ TraceUtil,
+} from '@emprespresso/pengueno';
+import { mkdir, readFile, rm } from 'fs/promises';
+import { join } from 'path';
+import { type CheckoutCiJob, type FetchCodeJob, PipelineImpl } from '@emprespresso/ci_model';
+import { executeJob, executePipeline } from '@emprespresso/ci_worker';
const run = Date.now().toString();
-const eitherJob = getRequiredEnvVars(["remote", "refname", "rev"]).mapRight(
- (baseArgs) =>
- <CheckoutCiJob>{
- type: "checkout_ci.ts",
- arguments: {
- ...baseArgs,
- run,
- returnPath: Deno.cwd(),
- },
- },
+const eitherJob = getRequiredEnvVars(['remote', 'refname', 'rev']).mapRight(
+ (baseArgs) =>
+ <CheckoutCiJob>{
+ type: 'checkout_ci.ts',
+ arguments: {
+ ...baseArgs,
+ run,
+ returnPath: process.cwd(),
+ },
+ },
);
-const ciRunMetric = Metric.fromName("checkout_ci.run");
-const _logJob = LogTraceable.of(eitherJob).bimap(
- TraceUtil.withTrace(`checkout_ci.${run}`),
-);
+const ciRunMetric = Metric.fromName('checkout_ci.run');
+const _logJob = LogTraceable.of(eitherJob).bimap(TraceUtil.withTrace(`checkout_ci.${run}`));
await LogMetricTraceable.ofLogTraceable(_logJob)
- .bimap(TraceUtil.withMetricTrace(ciRunMetric))
- .map((tEitherJob) =>
- tEitherJob.get().flatMapAsync((ciJob) => {
- const wd = getWorkingDirectoryForCiJob(ciJob);
- const fetchPackageJob = <FetchCodeJob>{
- type: "fetch_code.ts",
- arguments: {
- remoteUrl: ciJob.arguments.remote,
- checkout: ciJob.arguments.rev,
- path: getSrcDirectoryForCiJob(ciJob),
- },
- };
- return Either.fromFailableAsync<Error, CheckoutCiJob>(() =>
- Deno.mkdir(wd)
- .then(() => Deno.chdir(wd))
- .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get())
- .then(() => ciJob),
- );
- }),
- )
- .map((tEitherCiJob) =>
- tEitherCiJob.get().then((eitherCiJob) =>
- eitherCiJob.flatMapAsync<{ cmd: Command; job: CheckoutCiJob }>((ciJob) =>
- Either.fromFailableAsync<Error, string>(() =>
- Deno.readTextFile(
- `${getSrcDirectoryForCiJob(ciJob)}/${CI_WORKFLOW_FILE}`,
- ),
- ).then((eitherWorkflowJson) =>
- eitherWorkflowJson
- .flatMap((json) =>
- Either.fromFailable<Error, unknown>(JSON.parse(json)),
- )
- .flatMap((eitherWorkflowParse) => {
- if (isCiWorkflow(eitherWorkflowParse)) {
- return Either.right({
- cmd: getPipelineGenerationCommand(
- ciJob,
- eitherWorkflowParse.workflow,
- ),
- job: ciJob,
- });
- }
- return Either.left(
- new Error(
- "couldn't find any valid ci configuration (。•́︿•̀。), that's okay~",
+ .bimap(TraceUtil.withMetricTrace(ciRunMetric))
+ .map((tEitherJob) =>
+ tEitherJob.get().flatMapAsync((ciJob) => {
+ const wd = getWorkingDirectoryForCiJob(ciJob);
+ const fetchPackageJob = <FetchCodeJob>{
+ type: 'fetch_code.ts',
+ arguments: {
+ remoteUrl: ciJob.arguments.remote,
+ checkout: ciJob.arguments.rev,
+ path: getSrcDirectoryForCiJob(ciJob),
+ },
+ };
+ return Either.fromFailableAsync<Error, CheckoutCiJob>(() =>
+ mkdir(wd, { recursive: true })
+ .then(() => process.chdir(wd))
+ .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get())
+ .then(() => ciJob),
+ );
+ }),
+ )
+ .map((tEitherCiJob) =>
+ tEitherCiJob.get().then((eitherCiJob) =>
+ eitherCiJob.flatMapAsync<{ cmd: Command; job: CheckoutCiJob }>((ciJob) =>
+ Either.fromFailableAsync<Error, string>(() =>
+ readFile(join(getSrcDirectoryForCiJob(ciJob), CI_WORKFLOW_FILE), 'utf-8'),
+ ).then((eitherWorkflowJson) =>
+ eitherWorkflowJson
+ .flatMap((json) => Either.fromFailable<Error, unknown>(JSON.parse(json)))
+ .flatMap((eitherWorkflowParse) => {
+ if (isCiWorkflow(eitherWorkflowParse)) {
+ return Either.right({
+ cmd: getPipelineGenerationCommand(ciJob, eitherWorkflowParse.workflow),
+ job: ciJob,
+ });
+ }
+ return Either.left(
+ new Error("couldn't find any valid ci configuration (。•́︿•̀。), that's okay~"),
+ );
+ }),
),
- );
- }),
+ ),
),
- ),
- ),
- )
- .map(async (tEitherPipelineGenerationCommand) => {
- const eitherJobCommand = await tEitherPipelineGenerationCommand.get();
- const eitherPipeline = await eitherJobCommand.flatMapAsync((jobCommand) =>
- tEitherPipelineGenerationCommand
- .move(jobCommand.cmd)
- .map(getStdout)
- .get(),
- );
- return eitherPipeline
- .flatMap(PipelineImpl.from)
- .flatMap((pipeline) =>
- eitherJobCommand.mapRight(({ job }) => ({ job, pipeline })),
- );
- })
- .peek(
- TraceUtil.promiseify((tEitherPipeline) =>
- tEitherPipeline
- .get()
- .mapRight((val) => val.pipeline.serialize())
- .mapRight(
- (pipeline) =>
- `built the pipeline~ (◕ᴗ◕✿) let's make something amazing! ${pipeline}`,
- )
- .mapRight((msg) => tEitherPipeline.trace.trace(msg)),
- ),
- )
- .map(async (tEitherPipeline) => {
- const eitherPipeline = await tEitherPipeline.get();
- return eitherPipeline.flatMapAsync(({ pipeline, job }) =>
- tEitherPipeline
- .move(pipeline)
- .map((p) =>
- executePipeline(p, {
- HOME: getWorkingDirectoryForCiJob(job),
- }),
- )
- .get(),
+ )
+ .map(async (tEitherPipelineGenerationCommand) => {
+ const eitherJobCommand = await tEitherPipelineGenerationCommand.get();
+ const eitherPipeline = await eitherJobCommand.flatMapAsync((jobCommand) =>
+ tEitherPipelineGenerationCommand.move(jobCommand.cmd).map(getStdout).get(),
+ );
+ return eitherPipeline
+ .flatMap(PipelineImpl.from)
+ .flatMap((pipeline) => eitherJobCommand.mapRight(({ job }) => ({ job, pipeline })));
+ })
+ .peek(
+ TraceUtil.promiseify((tEitherPipeline) =>
+ tEitherPipeline
+ .get()
+ .mapRight((val) => val.pipeline.serialize())
+ .mapRight((pipeline) => `built the pipeline~ (◕ᴗ◕✿) let's make something amazing! ${pipeline}`)
+ .mapRight((msg) => tEitherPipeline.trace.trace(msg)),
+ ),
+ )
+ .map(async (tEitherPipeline) => {
+ const eitherPipeline = await tEitherPipeline.get();
+ return eitherPipeline.flatMapAsync(({ pipeline, job }) =>
+ tEitherPipeline
+ .move(pipeline)
+ .map((p) =>
+ executePipeline(p, {
+ HOME: getWorkingDirectoryForCiJob(job),
+ }),
+ )
+ .get(),
+ );
+ })
+ .get()
+ .then((e) =>
+ e
+ .flatMap(() => eitherJob)
+ .fold(({ isLeft, isRight, value }) => {
+ if (isLeft || !isRight) throw value;
+ return rm(getWorkingDirectoryForCiJob(value), {
+ recursive: true,
+ });
+ }),
);
- })
- .get()
- .then((e) =>
- e
- .flatMap(() => eitherJob)
- .fold(({ isLeft, isRight, value }) => {
- if (isLeft || !isRight) throw value;
- return Deno.remove(getWorkingDirectoryForCiJob(value), {
- recursive: true,
- });
- }),
- );
-const getWorkingDirectoryForCiJob = (job: CheckoutCiJob) =>
- `${job.arguments.returnPath}/${job.arguments.run}`;
+const getWorkingDirectoryForCiJob = (job: CheckoutCiJob) => `${job.arguments.returnPath}/${job.arguments.run}`;
-const getSrcDirectoryForCiJob = (job: CheckoutCiJob) =>
- `${job.arguments.returnPath}/${job.arguments.run}/src`;
+const getSrcDirectoryForCiJob = (job: CheckoutCiJob) => `${job.arguments.returnPath}/${job.arguments.run}/src`;
-const _runFlags = (
- "--rm --network none --cap-drop ALL" + "--security-opt no-new-privileges"
-).split(" ");
-const _image = "oci.liz.coffee/img/ci-worker:release";
+const _runFlags = ('--rm --network none --cap-drop ALL' + '--security-opt no-new-privileges').split(' ');
+const _image = 'oci.liz.coffee/img/ci-worker:release';
const getPipelineGenerationCommand = (
- job: CheckoutCiJob,
- pipelineGeneratorPath: string,
- image = _image,
- runFlags = _runFlags,
+ job: CheckoutCiJob,
+ pipelineGeneratorPath: string,
+ image = _image,
+ runFlags = _runFlags,
) => [
- "docker",
- "run",
- ...runFlags,
- ...prependWith(
- Object.entries(job.arguments).map(([key, val]) => `"${key}"="${val}"`),
- "-e",
- ),
- "-v",
- `${getSrcDirectoryForCiJob(
- job,
- )}/${pipelineGeneratorPath}:/pipeline_generator`,
- image,
- "/pipeline_generator",
+ 'docker',
+ 'run',
+ ...runFlags,
+ ...prependWith(
+ Object.entries(job.arguments).map(([key, val]) => `"${key}"="${val}"`),
+ '-e',
+ ),
+ '-v',
+ `${getSrcDirectoryForCiJob(job)}/${pipelineGeneratorPath}:/pipeline_generator`,
+ image,
+ '/pipeline_generator',
];
export interface CiWorkflow {
- workflow: string;
+ workflow: string;
}
export const isCiWorkflow = (t: unknown): t is CiWorkflow =>
- isObject(t) &&
- "workflow" in t &&
- typeof t.workflow === "string" &&
- !t.workflow.includes("..");
-const CI_WORKFLOW_FILE = ".ci/ci.json";
+ isObject(t) && 'workflow' in t && typeof t.workflow === 'string' && !t.workflow.includes('..');
+const CI_WORKFLOW_FILE = '.ci/ci.json';