#!/usr/bin/env node import { type Command, Either, LogTraceable, getRequiredEnvVars, isObject, LogMetricTraceable, Metric, prependWith, TraceUtil, getStdoutMany, } from '@emprespresso/pengueno'; import { mkdir, readFile, rm } from 'fs/promises'; import { join } from 'path'; import { type CheckoutCiJob, type FetchCodeJob, PipelineImpl } from '@emprespresso/ci_model'; import { executeJob, executePipeline } from '@emprespresso/ci_worker'; interface CiWorkflow { workflow: string; } function isCiWorkflow(t: unknown): t is CiWorkflow { return isObject(t) && 'workflow' in t && typeof t.workflow === 'string' && !t.workflow.includes('..'); } const CI_WORKFLOW_FILE = '.ci/ci.json'; const OCI_REGISTRY = 'oci.liz.coffee'; const PIPELINE_IMAGE = OCI_REGISTRY + '/emprespresso/ci_worker:release'; const READONLY_CREDENTIALS = { username: 'readonly', password: 'readonly' }; // use a different directory per job run // even though the Laminar run is unique per job, there's potential for "children" we spawn // to be a checkout_ci job as well. in which case, we don't want any conflicts with whatever // the "parent" was doing, so we create a unique directory for each. // i.e. // Laminar Run: 57, CWD=/var/lib/laminar/run/57 // ci_pipeline (1000.uuidA) // -> checkout_ci // -> [...children] // -> checkout_ci (1000.uuidB) const run = `${Date.now()}.${crypto.randomUUID().replaceAll('-', '')}`; const eitherJob = getRequiredEnvVars(['remote', 'refname', 'rev', 'executorLaminarPath']).mapRight( (baseArgs) => { type: 'checkout_ci.js', arguments: { ...baseArgs, run, returnPath: process.cwd(), }, }, ); const afterJob = eitherJob.flatMapAsync((job) => Either.fromFailableAsync(() => rm(getWorkingDirectoryForCiJob(job), { recursive: true })), ); const logTraceableJob = LogTraceable.of(eitherJob).flatMap(TraceUtil.withTrace(`checkout_ci.run.${run}`)); const ciRunMetric = Metric.fromName('checkout_ci.run'); await LogMetricTraceable.ofLogTraceable(logTraceableJob) .flatMap(TraceUtil.withMetricTrace(ciRunMetric)) .map((tEitherJob) => tEitherJob.get().flatMapAsync((ciJob) => { const wd = getWorkingDirectoryForCiJob(ciJob); const fetchPackageJob = { type: 'fetch_code', arguments: { remoteUrl: ciJob.arguments.remote, checkout: ciJob.arguments.rev, path: getSrcDirectoryForCiJob(ciJob), }, }; return Either.fromFailableAsync(() => mkdir(wd, { recursive: true }) .then(() => process.chdir(wd)) .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get()) .then((e) => e.fold( (err) => { throw err; }, () => ciJob, ), ), ); }), ) .map(async (tEitherCiJob) => { const eitherCiJob = await tEitherCiJob.get(); const repoCiFileContents = await eitherCiJob.flatMapAsync((ciJob) => Either.fromFailableAsync(() => readFile(join(getSrcDirectoryForCiJob(ciJob), CI_WORKFLOW_FILE), 'utf-8'), ), ); return repoCiFileContents .flatMap((fileText) => Either.fromFailable(() => JSON.parse(fileText)).filter((json) => isCiWorkflow(json)), ) .joinRight(eitherCiJob, (job: CheckoutCiJob, { workflow }) => ({ job, commands: getPipelineGenerationCommand(job, workflow), })); }) .map(async (tEitherPipelineGenerationCommand) => { const eitherJobCommand = await tEitherPipelineGenerationCommand.get(); const pipelineSerialized = await eitherJobCommand.flatMapAsync(({ commands }) => getStdoutMany(tEitherPipelineGenerationCommand.move(commands)), ); return pipelineSerialized .flatMap((results) => { const pipeline = results.at(-1)!; return PipelineImpl.from(pipeline); }) .joinRight(eitherJobCommand, (job, pipeline) => ({ job, pipeline })); }) .peek( TraceUtil.promiseify((tEitherPipeline) => tEitherPipeline .get() .mapRight((val) => val.pipeline.serialize()) .mapRight((pipeline) => `built the pipeline~ (◕ᴗ◕✿) let's make something amazing! ${pipeline}`) .mapRight((msg) => tEitherPipeline.trace.trace(msg)), ), ) .map(async (tEitherPipeline) => { const eitherPipeline = await tEitherPipeline.get(); return eitherPipeline.flatMapAsync(({ pipeline, job }) => tEitherPipeline .move(pipeline) .map((p) => executePipeline(p, { HOME: getWorkingDirectoryForCiJob(job), }), ) .get(), ); }) .map(async (tCompletePipeline) => { const completePipeline = await tCompletePipeline.get(); return completePipeline.fold( (e) => Promise.reject(e), () => afterJob, ); }) .get(); function getWorkingDirectoryForCiJob(job: CheckoutCiJob) { return `${job.arguments.returnPath}/${job.arguments.run}`; } function getSrcDirectoryForCiJob(job: CheckoutCiJob) { return `${getWorkingDirectoryForCiJob(job)}/src`; } function getSrcDirectoryForChildContainer(job: CheckoutCiJob) { // the container which runs the pipeline synthesizer (A) might be spawned by another container // (B) ((which should be the one running this job)) by talking to the host's docker daemon // (mounting /var/run/docker.sock) and executing the {@link getPipelineGenerationCommand}. // // so mounting {@link getSrcDirectoryForCiJob} has no meaning as it doesn't exist on the host; // here we replace the path in (B) with the actual volume source on the host, where the src // exists. return getSrcDirectoryForCiJob(job).replace('/var/lib/laminar', job.arguments.executorLaminarPath); } function getPipelineGenerationCommand( job: CheckoutCiJob, pipelineGeneratorPath: string, credentials = READONLY_CREDENTIALS, registry = OCI_REGISTRY, image = PIPELINE_IMAGE, runFlags = '--rm --network none --cap-drop ALL --security-opt no-new-privileges'.split(' '), ): Array { return [ `docker login --username ${credentials.username} --password ${credentials.password} ${registry}`.split(' '), ].concat([ [ 'docker', 'run', ...runFlags, ...prependWith( Object.entries(job.arguments).map(([key, val]) => `"${key}"="${val}"`), '-e', ), '-v', `${getSrcDirectoryForChildContainer(job)}:/src`, image, `/src/${pipelineGeneratorPath}`, ], ]); }