summaryrefslogtreecommitdiff
path: root/worker/scripts/run_pipeline
diff options
context:
space:
mode:
Diffstat (limited to 'worker/scripts/run_pipeline')
-rwxr-xr-xworker/scripts/run_pipeline58
1 files changed, 0 insertions, 58 deletions
diff --git a/worker/scripts/run_pipeline b/worker/scripts/run_pipeline
deleted file mode 100755
index 9991001..0000000
--- a/worker/scripts/run_pipeline
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run --allow-read --allow-write
-
-import { type Job, PipelineImpl } from "@liz-ci/model";
-import {
- getRequiredEnv,
- getStdout,
- loggerWithPrefix,
- validateIdentifier,
-} from "@liz-ci/utils";
-
-const pipelinePath = getRequiredEnv("pipeline");
-const logger = loggerWithPrefix(() =>
- `[${new Date().toISOString()}] [run_pipeline.${pipelinePath}]`
-);
-
-const jobValidForExecution = (job: Job) => {
- return Object
- .entries(job.arguments)
- .filter((e) => {
- if (e.every(validateIdentifier)) return true;
- logger.error(`job of type ${job.type} has invalid args ${e}`);
- return false;
- })
- .length === 0;
-};
-
-const run = async () => {
- logger.log("starting pipeline execution");
-
- const stages = await (Deno.readTextFile(pipelinePath))
- .then(PipelineImpl.from)
- .then((pipeline) => pipeline.getStages());
-
- for (const stage of stages) {
- logger.log("executing stage", stage);
-
- await Promise.all(
- stage.parallelJobs.map(async (job, jobIdx) => {
- logger.log(`executing job ${jobIdx}`, job);
- if (!jobValidForExecution(job)) throw new Error("invalid job");
-
- const result = await getStdout(job.type, { env: job.arguments });
- logger.log(jobIdx, "outputs", { result });
- }),
- );
- }
-
- logger.log("ok! yay!");
-};
-
-if (import.meta.main) {
- try {
- await run();
- } catch (e) {
- logger.error("womp womp D:", e);
- throw e;
- }
-}