summaryrefslogtreecommitdiff
path: root/worker/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'worker/scripts')
-rwxr-xr-xworker/scripts/ansible_playbook.ts193
-rwxr-xr-xworker/scripts/build_docker_image.ts259
-rwxr-xr-xworker/scripts/checkout_ci.ts294
3 files changed, 336 insertions, 410 deletions
diff --git a/worker/scripts/ansible_playbook.ts b/worker/scripts/ansible_playbook.ts
index 0879dc5..c6d8f2c 100755
--- a/worker/scripts/ansible_playbook.ts
+++ b/worker/scripts/ansible_playbook.ts
@@ -1,113 +1,100 @@
-#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run --allow-read --allow-write
+#!/usr/bin/env node
import {
- Either,
- getRequiredEnvVars,
- getStdout,
- type IEither,
- LogTraceable,
- LogMetricTraceable,
- Metric,
- prependWith,
- TraceUtil,
-} from "@emprespresso/pengueno";
-import type { AnsiblePlaybookJob } from "@emprespresso/ci_model";
-import { Bitwarden, type SecureNote } from "@emprespresso/ci_worker";
+ Either,
+ getRequiredEnvVars,
+ getStdout,
+ type IEither,
+ LogTraceable,
+ LogMetricTraceable,
+ Metric,
+ prependWith,
+ TraceUtil,
+} from '@emprespresso/pengueno';
+import type { AnsiblePlaybookJob } from '@emprespresso/ci_model';
+import { Bitwarden, type SecureNote } from '@emprespresso/ci_worker';
+import { writeFile, mkdtemp } from 'fs/promises';
+import { join } from 'path';
+import { tmpdir } from 'os';
-const eitherJob = getRequiredEnvVars([
- "path",
- "playbooks",
-])
- .mapRight((baseArgs) => (
- <AnsiblePlaybookJob> {
- type: "ansible_playbook.ts",
- arguments: baseArgs,
- }
- ));
+const eitherJob = getRequiredEnvVars(['path', 'playbooks']).mapRight(
+ (baseArgs) =>
+ <AnsiblePlaybookJob>{
+ type: 'ansible_playbook.ts',
+ arguments: baseArgs,
+ },
+);
-const eitherVault = Bitwarden.getConfigFromEnvironment()
- .mapRight((config) => new Bitwarden(config));
+const eitherVault = Bitwarden.getConfigFromEnvironment().mapRight((config) => new Bitwarden(config));
-const playbookMetric = Metric.fromName("ansiblePlaybook.playbook");
-const _logJob = LogTraceable.of(eitherJob).bimap(TraceUtil.withTrace("ansible_playbook"));
-await LogMetricTraceable.ofLogTraceable(_logJob).bimap(TraceUtil.withMetricTrace(playbookMetric))
- .peek((tEitherJob) =>
- tEitherJob.trace.trace("starting ansible playbook job! (⑅˘꒳˘)")
- )
- .map((tEitherJob) =>
- tEitherJob.get().flatMapAsync((job) =>
- eitherVault.flatMapAsync(async (vault) => {
- const eitherKey = await vault.unlock(tEitherJob);
- return eitherKey.mapRight((key) => ({ job, key, vault }));
- })
+const playbookMetric = Metric.fromName('ansiblePlaybook.playbook');
+const _logJob = LogTraceable.of(eitherJob).bimap(TraceUtil.withTrace('ansible_playbook'));
+await LogMetricTraceable.ofLogTraceable(_logJob)
+ .bimap(TraceUtil.withMetricTrace(playbookMetric))
+ .peek((tEitherJob) => tEitherJob.trace.trace('starting ansible playbook job! (⑅˘꒳˘)'))
+ .map((tEitherJob) =>
+ tEitherJob.get().flatMapAsync((job) =>
+ eitherVault.flatMapAsync(async (vault) => {
+ const eitherKey = await vault.unlock(tEitherJob);
+ return eitherKey.mapRight((key) => ({ job, key, vault }));
+ }),
+ ),
)
- )
- .map(async (tEitherJobVault) => {
- tEitherJobVault.trace.trace(
- "getting ansible secwets uwu~",
- );
- const eitherJobVault = await tEitherJobVault.get();
-
- const eitherSshKey = await eitherJobVault
- .flatMapAsync(({ key, vault }) =>
- vault.fetchSecret<SecureNote>(tEitherJobVault, key, "ssh_key")
- );
- const eitherSshKeyFile = await eitherSshKey.mapRight(({ notes }) => notes)
- .flatMapAsync(saveToTempFile);
- const eitherAnsibleSecrets = await eitherJobVault
- .flatMapAsync(({ key, vault }) =>
- vault.fetchSecret<SecureNote>(tEitherJobVault, key, "ansible_playbooks")
- );
- const eitherAnsibleSecretsFile = await eitherAnsibleSecrets.mapRight((
- { notes },
- ) => notes).flatMapAsync(saveToTempFile);
+ .map(async (tEitherJobVault) => {
+ tEitherJobVault.trace.trace('getting ansible secwets uwu~');
+ const eitherJobVault = await tEitherJobVault.get();
- return eitherJobVault.flatMapAsync(async ({ job, vault, key }) => {
- const eitherLocked = await vault.lock(tEitherJobVault, key);
- return eitherLocked.flatMap((_locked) =>
- eitherSshKeyFile.flatMap((sshKeyFile) =>
- eitherAnsibleSecretsFile.mapRight((secretsFile) => ({
- job,
- sshKeyFile,
- secretsFile,
- }))
- )
- );
- });
- })
- .map(async (tEitherJobAndSecrets) => {
- const eitherJobAndSecrets = await tEitherJobAndSecrets.get();
- return eitherJobAndSecrets.flatMapAsync(
- ({ job, sshKeyFile, secretsFile }) => {
- const volumes = [
- `${job.arguments.path}:/ansible`,
- `${sshKeyFile}:/root/id_rsa`,
- `${secretsFile}:/ansible/secrets.yml`,
- ];
- const playbookCmd =
- `ansible-playbook -e @secrets.yml ${job.arguments.playbooks}`;
- const deployCmd = [
- "docker",
- "run",
- ...prependWith(volumes, "-v"),
- "willhallonline/ansible:latest",
- ...playbookCmd.split(" "),
- ];
- tEitherJobAndSecrets.trace.trace(
- `running ansible magic~ (◕ᴗ◕✿) ${deployCmd}`,
+ const eitherSshKey = await eitherJobVault.flatMapAsync(({ key, vault }) =>
+ vault.fetchSecret<SecureNote>(tEitherJobVault, key, 'ssh_key'),
);
- return tEitherJobAndSecrets.move(deployCmd).map(getStdout).get();
- },
- );
- })
- .get();
+ const eitherSshKeyFile = await eitherSshKey.mapRight(({ notes }) => notes).flatMapAsync(saveToTempFile);
+ const eitherAnsibleSecrets = await eitherJobVault.flatMapAsync(({ key, vault }) =>
+ vault.fetchSecret<SecureNote>(tEitherJobVault, key, 'ansible_playbooks'),
+ );
+ const eitherAnsibleSecretsFile = await eitherAnsibleSecrets
+ .mapRight(({ notes }) => notes)
+ .flatMapAsync(saveToTempFile);
+
+ return eitherJobVault.flatMapAsync(async ({ job, vault, key }) => {
+ const eitherLocked = await vault.lock(tEitherJobVault, key);
+ return eitherLocked.flatMap((_locked) =>
+ eitherSshKeyFile.flatMap((sshKeyFile) =>
+ eitherAnsibleSecretsFile.mapRight((secretsFile) => ({
+ job,
+ sshKeyFile,
+ secretsFile,
+ })),
+ ),
+ );
+ });
+ })
+ .map(async (tEitherJobAndSecrets) => {
+ const eitherJobAndSecrets = await tEitherJobAndSecrets.get();
+ return eitherJobAndSecrets.flatMapAsync(({ job, sshKeyFile, secretsFile }) => {
+ const volumes = [
+ `${job.arguments.path}:/ansible`,
+ `${sshKeyFile}:/root/id_rsa`,
+ `${secretsFile}:/ansible/secrets.yml`,
+ ];
+ const playbookCmd = `ansible-playbook -e @secrets.yml ${job.arguments.playbooks}`;
+ const deployCmd = [
+ 'docker',
+ 'run',
+ ...prependWith(volumes, '-v'),
+ 'willhallonline/ansible:latest',
+ ...playbookCmd.split(' '),
+ ];
+ tEitherJobAndSecrets.trace.trace(`running ansible magic~ (◕ᴗ◕✿) ${deployCmd}`);
+ return tEitherJobAndSecrets.move(deployCmd).map(getStdout).get();
+ });
+ })
+ .get();
const saveToTempFile = (text: string): Promise<IEither<Error, string>> =>
- Either.fromFailableAsync(
- () => Deno.makeTempDir({ dir: Deno.cwd() })
- .then((dir) => Deno.makeTempFile({ dir }))
- .then(async (f) => {
- await Deno.writeTextFile(f, text);
- return f;
- }),
- );
+ Either.fromFailableAsync(() =>
+ mkdtemp(join(tmpdir(), 'ci-')).then(async (dir) => {
+ const filePath = join(dir, 'temp-file');
+ await writeFile(filePath, text);
+ return filePath;
+ }),
+ );
diff --git a/worker/scripts/build_docker_image.ts b/worker/scripts/build_docker_image.ts
index 49abe41..228dfcc 100755
--- a/worker/scripts/build_docker_image.ts
+++ b/worker/scripts/build_docker_image.ts
@@ -1,162 +1,131 @@
-#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run
+#!/usr/bin/env node
import {
- getRequiredEnvVars,
- getStdout,
- LogLevel,
- LogTraceable,
- LogMetricTraceable,
- Metric,
- TraceUtil,
-} from "@emprespresso/pengueno";
-import type {
- BuildDockerImageJob,
- BuildDockerImageJobProps,
-} from "@emprespresso/ci_model";
-import { Bitwarden, type LoginItem } from "@emprespresso/ci_worker";
+ getRequiredEnvVars,
+ getStdout,
+ LogLevel,
+ LogTraceable,
+ LogMetricTraceable,
+ Metric,
+ TraceUtil,
+} from '@emprespresso/pengueno';
+import type { BuildDockerImageJob, BuildDockerImageJobProps } from '@emprespresso/ci_model';
+import { Bitwarden, type LoginItem } from '@emprespresso/ci_worker';
const eitherJob = getRequiredEnvVars([
- "registry",
- "namespace",
- "repository",
- "imageTag",
- "context",
- "dockerfile",
- "buildTarget",
-])
- .mapRight((baseArgs) => (
- <BuildDockerImageJob> {
- type: "build_docker_image.ts",
- arguments: baseArgs,
- }
- ));
-const eitherVault = Bitwarden.getConfigFromEnvironment()
- .mapRight((config) => new Bitwarden(config));
+ 'registry',
+ 'namespace',
+ 'repository',
+ 'imageTag',
+ 'context',
+ 'dockerfile',
+ 'buildTarget',
+]).mapRight(
+ (baseArgs) =>
+ <BuildDockerImageJob>{
+ type: 'build_docker_image.ts',
+ arguments: baseArgs,
+ },
+);
+const eitherVault = Bitwarden.getConfigFromEnvironment().mapRight((config) => new Bitwarden(config));
-const buildImageMetric = Metric.fromName("dockerImage.build");
-const loginMetric = Metric.fromName("dockerRegistry.login");
-const _logJob = LogTraceable.of(eitherJob).bimap(
- (tEitherJob) => {
- const trace = "build_docker_image." +
- tEitherJob.get().fold(({ isRight, value }) => isRight ? value.arguments.buildTarget : "");
- return [tEitherJob.get(), trace];
- },
- );
+const buildImageMetric = Metric.fromName('dockerImage.build');
+const loginMetric = Metric.fromName('dockerRegistry.login');
+const _logJob = LogTraceable.of(eitherJob).bimap((tEitherJob) => {
+ const trace =
+ 'build_docker_image.' +
+ tEitherJob.get().fold(({ isRight, value }) => (isRight ? value.arguments.buildTarget : ''));
+ return [tEitherJob.get(), trace];
+});
await LogMetricTraceable.ofLogTraceable(_logJob)
- .bimap(TraceUtil.withMetricTrace(buildImageMetric))
- .bimap(TraceUtil.withMetricTrace(loginMetric))
- .peek((tEitherJob) =>
- tEitherJob.trace.trace("starting docker image build job! (⑅˘꒳˘)")
- )
- .map((tEitherJob) =>
- tEitherJob.get()
- .flatMapAsync((job) =>
- eitherVault.flatMapAsync(async (vault) => {
- const eitherKey = await vault.unlock(tEitherJob);
- return eitherKey.mapRight((key) => ({ job, key, vault }));
- })
- )
- )
- .map(async (tEitherJobVault) => {
- tEitherJobVault.trace.trace("logging into the wegistwy uwu~");
- const eitherJobVault = await tEitherJobVault.get();
- const eitherDockerRegistryLoginItem = await eitherJobVault.flatMapAsync((
- { job, key, vault },
- ) =>
- vault.fetchSecret<LoginItem>(tEitherJobVault, key, job.arguments.registry)
- .finally(() => vault.lock(tEitherJobVault, key))
- );
- return eitherDockerRegistryLoginItem.flatMapAsync(({ login }) =>
- eitherJobVault.flatMapAsync(async ({ job }) => {
- const loginCommand = getDockerLoginCommand(
- login.username,
- job.arguments.registry,
+ .bimap(TraceUtil.withMetricTrace(buildImageMetric))
+ .bimap(TraceUtil.withMetricTrace(loginMetric))
+ .peek((tEitherJob) => tEitherJob.trace.trace('starting docker image build job! (⑅˘꒳˘)'))
+ .map((tEitherJob) =>
+ tEitherJob.get().flatMapAsync((job) =>
+ eitherVault.flatMapAsync(async (vault) => {
+ const eitherKey = await vault.unlock(tEitherJob);
+ return eitherKey.mapRight((key) => ({ job, key, vault }));
+ }),
+ ),
+ )
+ .map(async (tEitherJobVault) => {
+ tEitherJobVault.trace.trace('logging into the wegistwy uwu~');
+ const eitherJobVault = await tEitherJobVault.get();
+ const eitherDockerRegistryLoginItem = await eitherJobVault.flatMapAsync(({ job, key, vault }) =>
+ vault
+ .fetchSecret<LoginItem>(tEitherJobVault, key, job.arguments.registry)
+ .finally(() => vault.lock(tEitherJobVault, key)),
);
- const eitherLoggedIn = await tEitherJobVault.move(loginCommand).map((
- tLoginCmd,
- ) =>
- getStdout(tLoginCmd, { env: { REGISTRY_PASSWORD: login.password } })
- ).get();
- return eitherLoggedIn.moveRight(job);
- })
- );
- })
- .peek(async (tEitherWithAuthdRegistry) => {
- const eitherWithAuthdRegistry = await tEitherWithAuthdRegistry.get();
- return tEitherWithAuthdRegistry.trace.trace(
- eitherWithAuthdRegistry.fold(({ isLeft}) =>
- loginMetric[isLeft ? "failure" : "success"]
- ),
- );
- })
- .map(async (tEitherWithAuthdRegistryBuildJob) => {
- const eitherWithAuthdRegistryBuildJob =
- await tEitherWithAuthdRegistryBuildJob.get();
- tEitherWithAuthdRegistryBuildJob.trace.trace(
- "finally building the image~ (◕ᴗ◕✿)",
- );
- const eitherBuiltImage = await eitherWithAuthdRegistryBuildJob.flatMapAsync(
- (job) =>
- tEitherWithAuthdRegistryBuildJob
- .move(getBuildCommand(job.arguments))
- .map((tBuildCmd) =>
- getStdout(tBuildCmd, {
- env: {},
- clearEnv: true,
- })
- )
- .get(),
- );
- return eitherBuiltImage.flatMap((buildOutput) =>
- eitherWithAuthdRegistryBuildJob.mapRight((job) => ({ buildOutput, job }))
- );
- })
- .peek(async (tEitherWithBuiltImage) => {
- const eitherWithBuiltImage = await tEitherWithBuiltImage.get();
- eitherWithBuiltImage.fold(({ isLeft, value}) => {
- tEitherWithBuiltImage.trace.trace(
- buildImageMetric[isLeft ? "failure" : "success"],
- );
- if (isLeft) {
- tEitherWithBuiltImage.trace.addTrace(LogLevel.ERROR).trace(
- `oh nyoo we couldn't buiwd the img :(( ${value}`,
+ return eitherDockerRegistryLoginItem.flatMapAsync(({ login }) =>
+ eitherJobVault.flatMapAsync(async ({ job }) => {
+ const loginCommand = getDockerLoginCommand(login.username, job.arguments.registry);
+ const eitherLoggedIn = await tEitherJobVault
+ .move(loginCommand)
+ .map((tLoginCmd) => getStdout(tLoginCmd, { env: { REGISTRY_PASSWORD: login.password } }))
+ .get();
+ return eitherLoggedIn.moveRight(job);
+ }),
);
- return;
- }
- tEitherWithBuiltImage.trace.addTrace("buildOutput").trace(value.buildOutput);
- });
- })
- .map(async (tEitherWithBuiltImage) => {
- const eitherWithBuiltImage = await tEitherWithBuiltImage.get();
- return eitherWithBuiltImage
- .mapRight(({ job }) =>
- tEitherWithBuiltImage.move(getPushCommand(job.arguments.imageTag))
- )
- .flatMapAsync((tPushCommand) => getStdout(tPushCommand));
- })
- .get();
+ })
+ .peek(async (tEitherWithAuthdRegistry) => {
+ const eitherWithAuthdRegistry = await tEitherWithAuthdRegistry.get();
+ return tEitherWithAuthdRegistry.trace.trace(
+ eitherWithAuthdRegistry.fold(({ isLeft }) => loginMetric[isLeft ? 'failure' : 'success']),
+ );
+ })
+ .map(async (tEitherWithAuthdRegistryBuildJob) => {
+ const eitherWithAuthdRegistryBuildJob = await tEitherWithAuthdRegistryBuildJob.get();
+ tEitherWithAuthdRegistryBuildJob.trace.trace('finally building the image~ (◕ᴗ◕✿)');
+ const eitherBuiltImage = await eitherWithAuthdRegistryBuildJob.flatMapAsync((job) =>
+ tEitherWithAuthdRegistryBuildJob
+ .move(getBuildCommand(job.arguments))
+ .map((tBuildCmd) =>
+ getStdout(tBuildCmd, {
+ env: {},
+ clearEnv: true,
+ }),
+ )
+ .get(),
+ );
+ return eitherBuiltImage.flatMap((buildOutput) =>
+ eitherWithAuthdRegistryBuildJob.mapRight((job) => ({ buildOutput, job })),
+ );
+ })
+ .peek(async (tEitherWithBuiltImage) => {
+ const eitherWithBuiltImage = await tEitherWithBuiltImage.get();
+ eitherWithBuiltImage.fold(({ isLeft, value }) => {
+ tEitherWithBuiltImage.trace.trace(buildImageMetric[isLeft ? 'failure' : 'success']);
+ if (isLeft) {
+ tEitherWithBuiltImage.trace
+ .addTrace(LogLevel.ERROR)
+ .trace(`oh nyoo we couldn't buiwd the img :(( ${value}`);
+ return;
+ }
+ tEitherWithBuiltImage.trace.addTrace('buildOutput').trace(value.buildOutput);
+ });
+ })
+ .map(async (tEitherWithBuiltImage) => {
+ const eitherWithBuiltImage = await tEitherWithBuiltImage.get();
+ return eitherWithBuiltImage
+ .mapRight(({ job }) => tEitherWithBuiltImage.move(getPushCommand(job.arguments.imageTag)))
+ .flatMapAsync((tPushCommand) => getStdout(tPushCommand));
+ })
+ .get();
const getDockerLoginCommand = (username: string, registry: string) =>
- `docker login --username ${username} --password $REGISTRY_PASSWORD ${registry}`
- .split(" ");
+ `docker login --username ${username} --password $REGISTRY_PASSWORD ${registry}`.split(' ');
-const getBuildCommand = (
- {
+const getBuildCommand = ({ buildTarget, imageTag, dockerfile, context }: BuildDockerImageJobProps) => [
+ 'docker',
+ 'build',
+ '--target',
buildTarget,
+ '-t',
imageTag,
+ '-f',
dockerfile,
context,
- }: BuildDockerImageJobProps,
-) => [
- "docker",
- "build",
- "--target",
- buildTarget,
- "-t",
- imageTag,
- "-f",
- dockerfile,
- context,
];
-const getPushCommand = (tag: string) => ["docker", "push", tag];
+const getPushCommand = (tag: string) => ['docker', 'push', tag];
diff --git a/worker/scripts/checkout_ci.ts b/worker/scripts/checkout_ci.ts
index efe74fb..8e4dcca 100755
--- a/worker/scripts/checkout_ci.ts
+++ b/worker/scripts/checkout_ci.ts
@@ -1,182 +1,152 @@
-#!/usr/bin/env -S deno run --allow-all
+#!/usr/bin/env node
import {
- type Command,
- Either,
- LogTraceable,
- getRequiredEnvVars,
- getStdout,
- isObject,
- LogMetricTraceable,
- Metric,
- prependWith,
- TraceUtil,
-} from "@emprespresso/pengueno";
-import {
- type CheckoutCiJob,
- type FetchCodeJob,
- PipelineImpl,
-} from "@emprespresso/ci_model";
-import { executeJob, executePipeline } from "@emprespresso/ci_worker";
+ type Command,
+ Either,
+ LogTraceable,
+ getRequiredEnvVars,
+ getStdout,
+ isObject,
+ LogMetricTraceable,
+ Metric,
+ prependWith,
+ TraceUtil,
+} from '@emprespresso/pengueno';
+import { mkdir, readFile, rm } from 'fs/promises';
+import { join } from 'path';
+import { type CheckoutCiJob, type FetchCodeJob, PipelineImpl } from '@emprespresso/ci_model';
+import { executeJob, executePipeline } from '@emprespresso/ci_worker';
const run = Date.now().toString();
-const eitherJob = getRequiredEnvVars(["remote", "refname", "rev"]).mapRight(
- (baseArgs) =>
- <CheckoutCiJob>{
- type: "checkout_ci.ts",
- arguments: {
- ...baseArgs,
- run,
- returnPath: Deno.cwd(),
- },
- },
+const eitherJob = getRequiredEnvVars(['remote', 'refname', 'rev']).mapRight(
+ (baseArgs) =>
+ <CheckoutCiJob>{
+ type: 'checkout_ci.ts',
+ arguments: {
+ ...baseArgs,
+ run,
+ returnPath: process.cwd(),
+ },
+ },
);
-const ciRunMetric = Metric.fromName("checkout_ci.run");
-const _logJob = LogTraceable.of(eitherJob).bimap(
- TraceUtil.withTrace(`checkout_ci.${run}`),
-);
+const ciRunMetric = Metric.fromName('checkout_ci.run');
+const _logJob = LogTraceable.of(eitherJob).bimap(TraceUtil.withTrace(`checkout_ci.${run}`));
await LogMetricTraceable.ofLogTraceable(_logJob)
- .bimap(TraceUtil.withMetricTrace(ciRunMetric))
- .map((tEitherJob) =>
- tEitherJob.get().flatMapAsync((ciJob) => {
- const wd = getWorkingDirectoryForCiJob(ciJob);
- const fetchPackageJob = <FetchCodeJob>{
- type: "fetch_code.ts",
- arguments: {
- remoteUrl: ciJob.arguments.remote,
- checkout: ciJob.arguments.rev,
- path: getSrcDirectoryForCiJob(ciJob),
- },
- };
- return Either.fromFailableAsync<Error, CheckoutCiJob>(() =>
- Deno.mkdir(wd)
- .then(() => Deno.chdir(wd))
- .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get())
- .then(() => ciJob),
- );
- }),
- )
- .map((tEitherCiJob) =>
- tEitherCiJob.get().then((eitherCiJob) =>
- eitherCiJob.flatMapAsync<{ cmd: Command; job: CheckoutCiJob }>((ciJob) =>
- Either.fromFailableAsync<Error, string>(() =>
- Deno.readTextFile(
- `${getSrcDirectoryForCiJob(ciJob)}/${CI_WORKFLOW_FILE}`,
- ),
- ).then((eitherWorkflowJson) =>
- eitherWorkflowJson
- .flatMap((json) =>
- Either.fromFailable<Error, unknown>(JSON.parse(json)),
- )
- .flatMap((eitherWorkflowParse) => {
- if (isCiWorkflow(eitherWorkflowParse)) {
- return Either.right({
- cmd: getPipelineGenerationCommand(
- ciJob,
- eitherWorkflowParse.workflow,
- ),
- job: ciJob,
- });
- }
- return Either.left(
- new Error(
- "couldn't find any valid ci configuration (。•́︿•̀。), that's okay~",
+ .bimap(TraceUtil.withMetricTrace(ciRunMetric))
+ .map((tEitherJob) =>
+ tEitherJob.get().flatMapAsync((ciJob) => {
+ const wd = getWorkingDirectoryForCiJob(ciJob);
+ const fetchPackageJob = <FetchCodeJob>{
+ type: 'fetch_code.ts',
+ arguments: {
+ remoteUrl: ciJob.arguments.remote,
+ checkout: ciJob.arguments.rev,
+ path: getSrcDirectoryForCiJob(ciJob),
+ },
+ };
+ return Either.fromFailableAsync<Error, CheckoutCiJob>(() =>
+ mkdir(wd, { recursive: true })
+ .then(() => process.chdir(wd))
+ .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get())
+ .then(() => ciJob),
+ );
+ }),
+ )
+ .map((tEitherCiJob) =>
+ tEitherCiJob.get().then((eitherCiJob) =>
+ eitherCiJob.flatMapAsync<{ cmd: Command; job: CheckoutCiJob }>((ciJob) =>
+ Either.fromFailableAsync<Error, string>(() =>
+ readFile(join(getSrcDirectoryForCiJob(ciJob), CI_WORKFLOW_FILE), 'utf-8'),
+ ).then((eitherWorkflowJson) =>
+ eitherWorkflowJson
+ .flatMap((json) => Either.fromFailable<Error, unknown>(JSON.parse(json)))
+ .flatMap((eitherWorkflowParse) => {
+ if (isCiWorkflow(eitherWorkflowParse)) {
+ return Either.right({
+ cmd: getPipelineGenerationCommand(ciJob, eitherWorkflowParse.workflow),
+ job: ciJob,
+ });
+ }
+ return Either.left(
+ new Error("couldn't find any valid ci configuration (。•́︿•̀。), that's okay~"),
+ );
+ }),
),
- );
- }),
+ ),
),
- ),
- ),
- )
- .map(async (tEitherPipelineGenerationCommand) => {
- const eitherJobCommand = await tEitherPipelineGenerationCommand.get();
- const eitherPipeline = await eitherJobCommand.flatMapAsync((jobCommand) =>
- tEitherPipelineGenerationCommand
- .move(jobCommand.cmd)
- .map(getStdout)
- .get(),
- );
- return eitherPipeline
- .flatMap(PipelineImpl.from)
- .flatMap((pipeline) =>
- eitherJobCommand.mapRight(({ job }) => ({ job, pipeline })),
- );
- })
- .peek(
- TraceUtil.promiseify((tEitherPipeline) =>
- tEitherPipeline
- .get()
- .mapRight((val) => val.pipeline.serialize())
- .mapRight(
- (pipeline) =>
- `built the pipeline~ (◕ᴗ◕✿) let's make something amazing! ${pipeline}`,
- )
- .mapRight((msg) => tEitherPipeline.trace.trace(msg)),
- ),
- )
- .map(async (tEitherPipeline) => {
- const eitherPipeline = await tEitherPipeline.get();
- return eitherPipeline.flatMapAsync(({ pipeline, job }) =>
- tEitherPipeline
- .move(pipeline)
- .map((p) =>
- executePipeline(p, {
- HOME: getWorkingDirectoryForCiJob(job),
- }),
- )
- .get(),
+ )
+ .map(async (tEitherPipelineGenerationCommand) => {
+ const eitherJobCommand = await tEitherPipelineGenerationCommand.get();
+ const eitherPipeline = await eitherJobCommand.flatMapAsync((jobCommand) =>
+ tEitherPipelineGenerationCommand.move(jobCommand.cmd).map(getStdout).get(),
+ );
+ return eitherPipeline
+ .flatMap(PipelineImpl.from)
+ .flatMap((pipeline) => eitherJobCommand.mapRight(({ job }) => ({ job, pipeline })));
+ })
+ .peek(
+ TraceUtil.promiseify((tEitherPipeline) =>
+ tEitherPipeline
+ .get()
+ .mapRight((val) => val.pipeline.serialize())
+ .mapRight((pipeline) => `built the pipeline~ (◕ᴗ◕✿) let's make something amazing! ${pipeline}`)
+ .mapRight((msg) => tEitherPipeline.trace.trace(msg)),
+ ),
+ )
+ .map(async (tEitherPipeline) => {
+ const eitherPipeline = await tEitherPipeline.get();
+ return eitherPipeline.flatMapAsync(({ pipeline, job }) =>
+ tEitherPipeline
+ .move(pipeline)
+ .map((p) =>
+ executePipeline(p, {
+ HOME: getWorkingDirectoryForCiJob(job),
+ }),
+ )
+ .get(),
+ );
+ })
+ .get()
+ .then((e) =>
+ e
+ .flatMap(() => eitherJob)
+ .fold(({ isLeft, isRight, value }) => {
+ if (isLeft || !isRight) throw value;
+ return rm(getWorkingDirectoryForCiJob(value), {
+ recursive: true,
+ });
+ }),
);
- })
- .get()
- .then((e) =>
- e
- .flatMap(() => eitherJob)
- .fold(({ isLeft, isRight, value }) => {
- if (isLeft || !isRight) throw value;
- return Deno.remove(getWorkingDirectoryForCiJob(value), {
- recursive: true,
- });
- }),
- );
-const getWorkingDirectoryForCiJob = (job: CheckoutCiJob) =>
- `${job.arguments.returnPath}/${job.arguments.run}`;
+const getWorkingDirectoryForCiJob = (job: CheckoutCiJob) => `${job.arguments.returnPath}/${job.arguments.run}`;
-const getSrcDirectoryForCiJob = (job: CheckoutCiJob) =>
- `${job.arguments.returnPath}/${job.arguments.run}/src`;
+const getSrcDirectoryForCiJob = (job: CheckoutCiJob) => `${job.arguments.returnPath}/${job.arguments.run}/src`;
-const _runFlags = (
- "--rm --network none --cap-drop ALL" + "--security-opt no-new-privileges"
-).split(" ");
-const _image = "oci.liz.coffee/img/ci-worker:release";
+const _runFlags = ('--rm --network none --cap-drop ALL' + '--security-opt no-new-privileges').split(' ');
+const _image = 'oci.liz.coffee/img/ci-worker:release';
const getPipelineGenerationCommand = (
- job: CheckoutCiJob,
- pipelineGeneratorPath: string,
- image = _image,
- runFlags = _runFlags,
+ job: CheckoutCiJob,
+ pipelineGeneratorPath: string,
+ image = _image,
+ runFlags = _runFlags,
) => [
- "docker",
- "run",
- ...runFlags,
- ...prependWith(
- Object.entries(job.arguments).map(([key, val]) => `"${key}"="${val}"`),
- "-e",
- ),
- "-v",
- `${getSrcDirectoryForCiJob(
- job,
- )}/${pipelineGeneratorPath}:/pipeline_generator`,
- image,
- "/pipeline_generator",
+ 'docker',
+ 'run',
+ ...runFlags,
+ ...prependWith(
+ Object.entries(job.arguments).map(([key, val]) => `"${key}"="${val}"`),
+ '-e',
+ ),
+ '-v',
+ `${getSrcDirectoryForCiJob(job)}/${pipelineGeneratorPath}:/pipeline_generator`,
+ image,
+ '/pipeline_generator',
];
export interface CiWorkflow {
- workflow: string;
+ workflow: string;
}
export const isCiWorkflow = (t: unknown): t is CiWorkflow =>
- isObject(t) &&
- "workflow" in t &&
- typeof t.workflow === "string" &&
- !t.workflow.includes("..");
-const CI_WORKFLOW_FILE = ".ci/ci.json";
+ isObject(t) && 'workflow' in t && typeof t.workflow === 'string' && !t.workflow.includes('..');
+const CI_WORKFLOW_FILE = '.ci/ci.json';