#!/usr/bin/env node import { type Command, Either, LogTraceable, getRequiredEnvVars, isObject, LogMetricTraceable, Metric, prependWith, TraceUtil, getStdoutMany, IEither, } from '@emprespresso/pengueno'; import { mkdir, readFile, rm } from 'fs/promises'; import path, { join } from 'path'; import { type CheckoutCiJob, type FetchCodeJob, Pipeline, PipelineImpl } from '@emprespresso/ci_model'; import { executeJob, executePipeline, getPathOnHost } from '@emprespresso/ci_worker'; interface CiWorkflow { workflow: string; } function isCiWorkflow(t: unknown): t is CiWorkflow { return isObject(t) && 'workflow' in t && typeof t.workflow === 'string' && !t.workflow.includes('..'); } const CI_WORKFLOW_FILE = '.ci/ci.json'; const OCI_REGISTRY = 'oci.liz.coffee'; const PIPELINE_IMAGE = OCI_REGISTRY + '/emprespresso/ci_worker:release'; const READONLY_CREDENTIALS = { username: 'readonly', password: 'readonly' }; // use a different directory per job run // even though the Laminar run is unique per job, there's potential for "children" we spawn // to be a checkout_ci job as well. in which case, we don't want any conflicts with whatever // the "parent" was doing, so we create a unique directory for each. // i.e. // Laminar Run: 57, CWD=/var/lib/laminar/run/57 // ci_pipeline (1000.uuidA) // -> checkout_ci // -> [...children] // -> checkout_ci (1000.uuidB) const run = `${Date.now()}.${crypto.randomUUID().replaceAll('-', '')}`; const eitherJob = getRequiredEnvVars(['remote', 'refname', 'rev']).mapRight( (baseArgs) => { type: 'checkout_ci.js', arguments: { ...baseArgs, run, returnPath: process.cwd(), }, }, ); const afterJob = eitherJob.flatMapAsync((job) => Either.fromFailableAsync(() => rm(getWorkingDirectoryForCiJob(job), { recursive: true })), ); const logTraceableJob = LogTraceable.of(eitherJob).flatMap(TraceUtil.withTrace(`checkout_ci.run.${run}`)); const ciRunMetric = Metric.fromName('checkout_ci.run'); await LogMetricTraceable.ofLogTraceable(logTraceableJob) .flatMap(TraceUtil.withMetricTrace(ciRunMetric)) .map((tEitherJob) => tEitherJob.get().flatMapAsync((ciJob) => { const wd = getWorkingDirectoryForCiJob(ciJob); const fetchPackageJob = { type: 'fetch_code', arguments: { remoteUrl: ciJob.arguments.remote, checkout: ciJob.arguments.rev, path: getSrcDirectoryForCiJob(ciJob), }, }; return Either.fromFailableAsync(() => mkdir(wd, { recursive: true }) .then(() => process.chdir(wd)) .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get()) .then(() => ciJob), ); }), ) .map(async (tEitherCiJob) => { const eitherCiJob = await tEitherCiJob.get(); const jobSrcOnHost = await eitherCiJob.flatMapAsync((job) => getPathOnHost(getSrcDirectoryForCiJob(job))); const repoCiFileContents = await eitherCiJob.flatMapAsync((ciJob) => Either.fromFailableAsync(() => readFile(join(getSrcDirectoryForCiJob(ciJob), CI_WORKFLOW_FILE), 'utf-8'), ), ); return repoCiFileContents .flatMap((fileText) => Either.fromFailable(() => JSON.parse(fileText)).filter((json) => isCiWorkflow(json)), ) .joinRight(eitherCiJob, (job, { workflow }) => ({ job, workflow, })) .joinRight(jobSrcOnHost, (src, { job, workflow }) => ({ job, commands: getPipelineGenerationCommand(job, workflow, src), })); }) .map( async ( tEitherPipelineGenerationCommand, ): Promise> => { const eitherJobCommand = await tEitherPipelineGenerationCommand.get(); const pipelineSerialized = await eitherJobCommand.flatMapAsync(({ commands }) => getStdoutMany( tEitherPipelineGenerationCommand.move(commands).peek((t) => t.trace.trace(JSON.stringify(t.get()))), ), ); return pipelineSerialized .flatMap((results) => { const pipeline = results.at(-1)!; return PipelineImpl.from(pipeline); }) .joinRight(eitherJobCommand, ({ job }, pipeline) => ({ job, pipeline })); }, ) .peek( TraceUtil.promiseify((tEitherPipeline) => tEitherPipeline .get() .mapRight((val) => val.pipeline.serialize()) .mapRight((pipeline) => `built the pipeline~ (◕ᴗ◕✿) let's make something amazing! ${pipeline}`) .mapRight((msg) => tEitherPipeline.trace.trace(msg)), ), ) .map(async (tEitherPipeline) => { const eitherPipeline = await tEitherPipeline.get(); return eitherPipeline.flatMapAsync(({ pipeline, job }) => tEitherPipeline .move(pipeline) .map((p) => executePipeline(p, { HOME: getWorkingDirectoryForCiJob(job), }), ) .get(), ); }) .map(async (tCompletePipeline) => { const completePipeline = await tCompletePipeline.get(); return completePipeline.fold( (e) => Promise.reject(e), () => afterJob, ); }) .get(); function getWorkingDirectoryForCiJob(job: CheckoutCiJob) { return `${job.arguments.returnPath}/${job.arguments.run}`; } function getSrcDirectoryForCiJob(job: CheckoutCiJob) { return `${getWorkingDirectoryForCiJob(job)}/src`; } function getPipelineGenerationCommand( job: CheckoutCiJob, pipelineGeneratorPath: string, srcMount: string, credentials = READONLY_CREDENTIALS, registry = OCI_REGISTRY, image = PIPELINE_IMAGE, runFlags = '--rm --network none --cap-drop ALL --security-opt no-new-privileges'.split(' '), ): Array { return [ `docker login --username ${credentials.username} --password ${credentials.password} ${registry}`.split(' '), ].concat([ [ 'docker', 'run', ...runFlags, ...prependWith( Object.entries(job.arguments).map(([key, val]) => `"${key}"="${val}"`), '-e', ), '-v', `${srcMount}:/src`, image, `/src/${pipelineGeneratorPath}`, ], ]); }